main
Bob Mottram 2024-03-20 13:25:57 +00:00
parent 8bdc68fadb
commit 3f2bf43883
1 changed files with 26 additions and 31 deletions

View File

@ -271,7 +271,7 @@ def daemon_http_get(self) -> None:
ua_str = get_user_agent(self) ua_str = get_user_agent(self)
if not _permitted_crawler_path(self, self.path): if not _permitted_crawler_path(self.path):
block, self.server.blocked_cache_last_updated = \ block, self.server.blocked_cache_last_updated = \
blocked_user_agent(calling_domain, ua_str, blocked_user_agent(calling_domain, ua_str,
self.server.news_instance, self.server.news_instance,
@ -467,7 +467,6 @@ def daemon_http_get(self) -> None:
None, self.server.debug, None, self.server.debug,
self.server.enable_shared_inbox): self.server.enable_shared_inbox):
return return
else:
http_404(self, 111) http_404(self, 111)
return return
@ -649,7 +648,6 @@ def daemon_http_get(self) -> None:
None, calling_domain, True) None, calling_domain, True)
write2(self, msg) write2(self, msg)
return return
else:
print('DEBUG: shareditems 6 ' + share_id) print('DEBUG: shareditems 6 ' + share_id)
else: else:
# get json for the shared item in ValueFlows format # get json for the shared item in ValueFlows format
@ -673,7 +671,6 @@ def daemon_http_get(self) -> None:
None, calling_domain, True) None, calling_domain, True)
write2(self, msg) write2(self, msg)
return return
else:
print('DEBUG: shareditems 7 ' + share_id) print('DEBUG: shareditems 7 ' + share_id)
http_404(self, 117) http_404(self, 117)
return return
@ -1052,7 +1049,7 @@ def daemon_http_get(self) -> None:
None, calling_domain, False) None, calling_domain, False)
write2(self, msg) write2(self, msg)
return return
elif catalog_type == 'csv': if catalog_type == 'csv':
# catalog as a CSV file for import into a spreadsheet # catalog as a CSV file for import into a spreadsheet
msg = \ msg = \
shares_catalog_csv_endpoint(self.server.base_dir, shares_catalog_csv_endpoint(self.server.base_dir,
@ -1161,7 +1158,7 @@ def daemon_http_get(self) -> None:
None, calling_domain, False) None, calling_domain, False)
write2(self, msg) write2(self, msg)
return return
elif catalog_type == 'csv': if catalog_type == 'csv':
# catalog as a CSV file for import into a spreadsheet # catalog as a CSV file for import into a spreadsheet
msg = \ msg = \
shares_catalog_csv_endpoint(self.server.base_dir, shares_catalog_csv_endpoint(self.server.base_dir,
@ -2274,8 +2271,7 @@ def daemon_http_get(self) -> None:
# manifest images used to show example screenshots # manifest images used to show example screenshots
# for use by app stores # for use by app stores
if self.path == '/screenshot1.jpg' or \ if self.path in ('/screenshot1.jpg', '/screenshot2.jpg'):
self.path == '/screenshot2.jpg':
screen_filename = \ screen_filename = \
self.server.base_dir + '/img' + self.path self.server.base_dir + '/img' + self.path
if os.path.isfile(screen_filename): if os.path.isfile(screen_filename):
@ -2752,7 +2748,6 @@ def daemon_http_get(self) -> None:
write2(self, msg) write2(self, msg)
self.server.getreq_busy = False self.server.getreq_busy = False
return return
else:
hashtag = urllib.parse.unquote(hashtag_url.split('/')[-1]) hashtag = urllib.parse.unquote(hashtag_url.split('/')[-1])
tags_filename = \ tags_filename = \
self.server.base_dir + '/tags/' + hashtag + '.txt' self.server.base_dir + '/tags/' + hashtag + '.txt'
@ -4317,7 +4312,7 @@ def daemon_http_get(self) -> None:
self.server.debug) self.server.debug)
def _permitted_crawler_path(self, path: str) -> bool: def _permitted_crawler_path(path: str) -> bool:
"""Is the given path permitted to be crawled by a search engine? """Is the given path permitted to be crawled by a search engine?
this should only allow through basic information, such as nodeinfo this should only allow through basic information, such as nodeinfo
""" """