Support robots.txt

main
Bob Mottram 2024-08-19 18:49:38 +01:00
parent 515b9243cc
commit 343b177622
2 changed files with 12 additions and 2 deletions

View File

@ -259,6 +259,16 @@ def daemon_http_get(self) -> None:
http_402(self) http_402(self)
return return
# handle robots.txt
if self.path == '/robots.txt':
msg = "User-agent: *\nAllow: /"
msg = msg.encode('utf-8')
msglen = len(msg)
set_headers(self, 'text/plain', msglen,
'', calling_domain, False)
write2(self, msg)
return
# headers used by LLM scrapers # headers used by LLM scrapers
# oai-host-hash requests come from Microsoft Corporation, # oai-host-hash requests come from Microsoft Corporation,
# which has a long term partnership with OpenAI # which has a long term partnership with OpenAI

View File

@ -506,7 +506,7 @@ def _webfinger_add_blog_link(wf_json: {}, actor_json: {}) -> bool:
return True return True
def _webfinger_updateFromProfile(wf_json: {}, actor_json: {}) -> bool: def _webfinger_update_from_profile(wf_json: {}, actor_json: {}) -> bool:
"""Updates webfinger Email/blog/xmpp links from profile """Updates webfinger Email/blog/xmpp links from profile
Returns true if one or more tags has been changed Returns true if one or more tags has been changed
""" """
@ -634,6 +634,6 @@ def webfinger_update(base_dir: str, nickname: str, domain: str,
if not actor_json: if not actor_json:
return return
if _webfinger_updateFromProfile(wf_json, actor_json): if _webfinger_update_from_profile(wf_json, actor_json):
if save_json(wf_json, filename): if save_json(wf_json, filename):
store_webfinger_in_cache(handle, wf_json, cached_webfingers) store_webfinger_in_cache(handle, wf_json, cached_webfingers)