mirror of https://gitlab.com/bashrc2/epicyon
Can use a custom robots.txt file
parent
7ba5ecb675
commit
18cdf1d5f3
22
daemon.py
22
daemon.py
|
@ -473,6 +473,14 @@ class EpicyonServer(ThreadingHTTPServer):
|
|||
qrcode_scale = 6
|
||||
instance_description = ''
|
||||
instance_description_short = 'Epicyon'
|
||||
robots_txt = None
|
||||
last_llm_time = None
|
||||
watermark_width_percent = 0
|
||||
watermark_position = 0
|
||||
watermark_opacity = 0
|
||||
headers_catalog = {}
|
||||
dictionary = []
|
||||
twograms = {}
|
||||
|
||||
def handle_error(self, request, client_address):
|
||||
# surpress connection reset errors
|
||||
|
@ -694,6 +702,20 @@ def run_daemon(accounts_data_dir: str,
|
|||
# the last time when an LLM scraper was replied to
|
||||
httpd.last_llm_time = None
|
||||
|
||||
# if a custom robots.txt exists then read it
|
||||
robots_txt_filename = data_dir(base_dir) + '/robots.txt'
|
||||
httpd.robots_txt = None
|
||||
if os.path.isfile(robots_txt_filename):
|
||||
new_robots_txt = ''
|
||||
try:
|
||||
with open(robots_txt_filename, 'r',
|
||||
encoding='utf-8') as fp_robots:
|
||||
new_robots_txt = fp_robots.read()
|
||||
except OSError:
|
||||
print('EX: error reading ' + robots_txt_filename)
|
||||
if new_robots_txt:
|
||||
httpd.robots_txt = new_robots_txt
|
||||
|
||||
# width, position and opacity of watermark applied to attached images
|
||||
# as a percentage of the attached image width
|
||||
httpd.watermark_width_percent = watermark_width_percent
|
||||
|
|
|
@ -262,7 +262,10 @@ def daemon_http_get(self) -> None:
|
|||
|
||||
# handle robots.txt
|
||||
if self.path == '/robots.txt':
|
||||
msg = "User-agent: *\nAllow: /"
|
||||
if self.server.robots_txt:
|
||||
msg = self.server.robots_txt
|
||||
else:
|
||||
msg = "User-agent: *\nAllow: /"
|
||||
msg = msg.encode('utf-8')
|
||||
msglen = len(msg)
|
||||
set_headers(self, 'text/plain', msglen,
|
||||
|
|
Loading…
Reference in New Issue