Support robots.txt

main
Bob Mottram 2024-08-19 18:49:38 +01:00
parent 515b9243cc
commit 343b177622
2 changed files with 12 additions and 2 deletions

View File

@ -259,6 +259,16 @@ def daemon_http_get(self) -> None:
http_402(self)
return
# handle robots.txt
if self.path == '/robots.txt':
msg = "User-agent: *\nAllow: /"
msg = msg.encode('utf-8')
msglen = len(msg)
set_headers(self, 'text/plain', msglen,
'', calling_domain, False)
write2(self, msg)
return
# headers used by LLM scrapers
# oai-host-hash requests come from Microsoft Corporation,
# which has a long term partnership with OpenAI

View File

@ -506,7 +506,7 @@ def _webfinger_add_blog_link(wf_json: {}, actor_json: {}) -> bool:
return True
def _webfinger_updateFromProfile(wf_json: {}, actor_json: {}) -> bool:
def _webfinger_update_from_profile(wf_json: {}, actor_json: {}) -> bool:
"""Updates webfinger Email/blog/xmpp links from profile
Returns true if one or more tags has been changed
"""
@ -634,6 +634,6 @@ def webfinger_update(base_dir: str, nickname: str, domain: str,
if not actor_json:
return
if _webfinger_updateFromProfile(wf_json, actor_json):
if _webfinger_update_from_profile(wf_json, actor_json):
if save_json(wf_json, filename):
store_webfinger_in_cache(handle, wf_json, cached_webfingers)