mirror of https://gitlab.com/bashrc2/epicyon
Set robots.txt within edit profile screen
parent
18cdf1d5f3
commit
6b1d322663
|
@ -712,7 +712,7 @@ def run_daemon(accounts_data_dir: str,
|
|||
encoding='utf-8') as fp_robots:
|
||||
new_robots_txt = fp_robots.read()
|
||||
except OSError:
|
||||
print('EX: error reading ' + robots_txt_filename)
|
||||
print('EX: error reading 1 ' + robots_txt_filename)
|
||||
if new_robots_txt:
|
||||
httpd.robots_txt = new_robots_txt
|
||||
|
||||
|
|
|
@ -300,6 +300,27 @@ def _profile_post_block_federated(base_dir: str, fields: {}, self) -> None:
|
|||
self.server.block_federated = []
|
||||
|
||||
|
||||
def _profile_post_robots_txt(base_dir: str, fields: {}, self) -> None:
|
||||
""" HTTP POST save robots.txt file
|
||||
"""
|
||||
new_robots_txt = ''
|
||||
if fields.get('robotsTxt'):
|
||||
new_robots_txt = fields['robotsTxt']
|
||||
if str(self.server.robots_txt) != str(new_robots_txt):
|
||||
if not new_robots_txt:
|
||||
self.server.robots_txt = ''
|
||||
else:
|
||||
robots_txt_filename = data_dir(base_dir) + '/robots.txt'
|
||||
try:
|
||||
with open(robots_txt_filename, 'w+',
|
||||
encoding='utf-8') as fp_robots:
|
||||
fp_robots.write(new_robots_txt)
|
||||
except OSError:
|
||||
print('EX: unable to save ' + robots_txt_filename)
|
||||
|
||||
self.server.robots_txt = new_robots_txt
|
||||
|
||||
|
||||
def _profile_post_buy_domains(base_dir: str, fields: {}, self) -> None:
|
||||
""" HTTP POST save allowed buy domains
|
||||
"""
|
||||
|
@ -3291,6 +3312,7 @@ def profile_edit(self, calling_domain: str, cookie: str,
|
|||
_profile_post_crawlers_allowed(base_dir, fields, self)
|
||||
_profile_post_buy_domains(base_dir, fields, self)
|
||||
_profile_post_block_federated(base_dir, fields, self)
|
||||
_profile_post_robots_txt(base_dir, fields, self)
|
||||
_profile_post_peertube_instances(base_dir, fields, self,
|
||||
peertube_instances)
|
||||
|
||||
|
|
|
@ -2655,6 +2655,23 @@ def _html_edit_profile_filtering(base_dir: str, nickname: str, domain: str,
|
|||
block_federated_endpoints_list_str,
|
||||
200, '', False)
|
||||
|
||||
robots_txt_filename = data_dir(base_dir) + '/robots.txt'
|
||||
robots_txt = ''
|
||||
if os.path.isfile(robots_txt_filename):
|
||||
new_robots_txt = ''
|
||||
try:
|
||||
with open(robots_txt_filename, 'r',
|
||||
encoding='utf-8') as fp_robots:
|
||||
new_robots_txt = fp_robots.read()
|
||||
except OSError:
|
||||
print('EX: error reading 2 ' + robots_txt_filename)
|
||||
if new_robots_txt:
|
||||
robots_txt = new_robots_txt
|
||||
edit_profile_form += \
|
||||
edit_text_area("robots.txt", None,
|
||||
'robotsTxt', robots_txt,
|
||||
200, '', False)
|
||||
|
||||
idx = 'Block military instances'
|
||||
if translate.get(idx):
|
||||
name = translate[idx]
|
||||
|
|
Loading…
Reference in New Issue