From 885a2b82c94951d0ad3cec92a233f952f96c65ae Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 6 Mar 2022 13:26:06 +0000 Subject: [PATCH 01/10] Comment --- crawlers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crawlers.py b/crawlers.py index 6ec5c43d0..1fa028c8d 100644 --- a/crawlers.py +++ b/crawlers.py @@ -73,7 +73,8 @@ def blocked_user_agent(calling_domain: str, agent_str: str, agent_domain = None if agent_str: - # is this a web crawler? If so the block it + # is this a web crawler? If so then block it by default + # unless this is a news instance or if it is in the allowed list if 'bot/' in agent_str_lower or 'bot-' in agent_str_lower: # if this is a news instance then we want it # to be indexed by search engines From 9be61c2b38a0c9d16ca212b91aeb1af3b4ecc78d Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 6 Mar 2022 14:02:26 +0000 Subject: [PATCH 02/10] Save a list of known web crawlers --- crawlers.py | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++++- daemon.py | 10 ++++++++-- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/crawlers.py b/crawlers.py index 1fa028c8d..236a69fa3 100644 --- a/crawlers.py +++ b/crawlers.py @@ -7,6 +7,7 @@ __email__ = "bob@libreserver.org" __status__ = "Production" __module_group__ = "Core" +import os import time from utils import save_json from utils import user_agent_domain @@ -51,6 +52,51 @@ def update_known_crawlers(ua_str: str, return curr_time +def load_known_web_crawlers(base_dir: str) -> []: + """Returns a list of known web crawlers + """ + known_crawlers_filename = base_dir + '/accounts/known_crawlers.txt' + if not os.path.isfile(known_crawlers_filename): + return [] + crawlers_str = None + try: + with open(known_crawlers_filename, 'r') as fp_crawlers: + crawlers_str = fp_crawlers.read() + except OSError: + print('EX: unable to load web crawlers from ' + + known_crawlers_filename) + if not crawlers_str: + return [] + known_crawlers = [] + crawlers_list = crawlers_str.split('\n') + for crawler in crawlers_list: + if not crawler: + continue + crawler = crawler.replace('\n', '').strip() + if not crawler: + continue + if crawler not in known_crawlers: + known_crawlers.append(crawler) + return known_crawlers + + +def _save_known_web_crawlers(base_dir: str, known_crawlers: []) -> bool: + """Saves a list of known web crawlers + """ + known_crawlers_filename = base_dir + '/accounts/known_crawlers.txt' + known_crawlers_str = '' + for crawler in known_crawlers: + known_crawlers_str += crawler.strip() + '\n' + try: + with open(known_crawlers_filename, 'w+') as fp_crawlers: + fp_crawlers.write(known_crawlers_str) + except OSError: + print("EX: unable to save known web crawlers to " + + known_crawlers_filename) + return False + return True + + def blocked_user_agent(calling_domain: str, agent_str: str, news_instance: bool, debug: bool, user_agents_blocked: [], @@ -58,7 +104,8 @@ def blocked_user_agent(calling_domain: str, agent_str: str, base_dir: str, blocked_cache: [], blocked_cache_update_secs: int, - crawlers_allowed: []): + crawlers_allowed: [], + known_crawlers: []): """Should a GET or POST be blocked based upon its user agent? """ if not agent_str: @@ -76,6 +123,10 @@ def blocked_user_agent(calling_domain: str, agent_str: str, # is this a web crawler? If so then block it by default # unless this is a news instance or if it is in the allowed list if 'bot/' in agent_str_lower or 'bot-' in agent_str_lower: + if agent_str_lower not in known_crawlers: + known_crawlers.append(agent_str_lower) + known_crawlers.sort() + _save_known_web_crawlers(base_dir, known_crawlers) # if this is a news instance then we want it # to be indexed by search engines if news_instance: diff --git a/daemon.py b/daemon.py index 0d0f5c4b2..a3bfec417 100644 --- a/daemon.py +++ b/daemon.py @@ -380,6 +380,7 @@ from siteactive import referer_is_active from webapp_likers import html_likers_of_post from crawlers import update_known_crawlers from crawlers import blocked_user_agent +from crawlers import load_known_web_crawlers import os @@ -14008,7 +14009,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.base_dir, self.server.blocked_cache, self.server.blocked_cache_update_secs, - self.server.crawlers_allowed) + self.server.crawlers_allowed, + self.server.known_crawlers) if block: self._400() return @@ -18550,7 +18552,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.base_dir, self.server.blocked_cache, self.server.blocked_cache_update_secs, - self.server.crawlers_allowed) + self.server.crawlers_allowed, + self.server.known_crawlers) if block: self._400() self.server.postreq_busy = False @@ -19666,6 +19669,9 @@ def run_daemon(crawlers_allowed: [], # list of crawler bots permitted within the User-Agent header httpd.crawlers_allowed = crawlers_allowed + # list of web crawlers known to the system + httpd.known_crawlers = load_known_web_crawlers(base_dir) + httpd.unit_test = unit_test httpd.allow_local_network_access = allow_local_network_access if unit_test: From 5f1f973d85fa520e1b5e2152cf01c991049b498c Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 6 Mar 2022 14:11:11 +0000 Subject: [PATCH 03/10] Use a different filename to distinguish from api/nodeinfo access --- crawlers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crawlers.py b/crawlers.py index 236a69fa3..ff12730ca 100644 --- a/crawlers.py +++ b/crawlers.py @@ -55,7 +55,7 @@ def update_known_crawlers(ua_str: str, def load_known_web_crawlers(base_dir: str) -> []: """Returns a list of known web crawlers """ - known_crawlers_filename = base_dir + '/accounts/known_crawlers.txt' + known_crawlers_filename = base_dir + '/accounts/known_web_bots.txt' if not os.path.isfile(known_crawlers_filename): return [] crawlers_str = None @@ -83,7 +83,7 @@ def load_known_web_crawlers(base_dir: str) -> []: def _save_known_web_crawlers(base_dir: str, known_crawlers: []) -> bool: """Saves a list of known web crawlers """ - known_crawlers_filename = base_dir + '/accounts/known_crawlers.txt' + known_crawlers_filename = base_dir + '/accounts/known_web_bots.txt' known_crawlers_str = '' for crawler in known_crawlers: known_crawlers_str += crawler.strip() + '\n' From 5b218919b3d8f53d59bdd18ec09c07a18fc08f32 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 6 Mar 2022 14:20:25 +0000 Subject: [PATCH 04/10] Change variable name to avoid confusion --- crawlers.py | 52 ++++++++++++++++++++++++++-------------------------- daemon.py | 8 ++++---- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/crawlers.py b/crawlers.py index ff12730ca..cbb102e1b 100644 --- a/crawlers.py +++ b/crawlers.py @@ -52,22 +52,22 @@ def update_known_crawlers(ua_str: str, return curr_time -def load_known_web_crawlers(base_dir: str) -> []: - """Returns a list of known web crawlers +def load_known_web_bots(base_dir: str) -> []: + """Returns a list of known web bots """ - known_crawlers_filename = base_dir + '/accounts/known_web_bots.txt' - if not os.path.isfile(known_crawlers_filename): + known_bots_filename = base_dir + '/accounts/known_web_bots.txt' + if not os.path.isfile(known_bots_filename): return [] crawlers_str = None try: - with open(known_crawlers_filename, 'r') as fp_crawlers: + with open(known_bots_filename, 'r') as fp_crawlers: crawlers_str = fp_crawlers.read() except OSError: - print('EX: unable to load web crawlers from ' + - known_crawlers_filename) + print('EX: unable to load web bots from ' + + known_bots_filename) if not crawlers_str: return [] - known_crawlers = [] + known_bots = [] crawlers_list = crawlers_str.split('\n') for crawler in crawlers_list: if not crawler: @@ -75,24 +75,24 @@ def load_known_web_crawlers(base_dir: str) -> []: crawler = crawler.replace('\n', '').strip() if not crawler: continue - if crawler not in known_crawlers: - known_crawlers.append(crawler) - return known_crawlers + if crawler not in known_bots: + known_bots.append(crawler) + return known_bots -def _save_known_web_crawlers(base_dir: str, known_crawlers: []) -> bool: - """Saves a list of known web crawlers +def _save_known_web_bots(base_dir: str, known_bots: []) -> bool: + """Saves a list of known web bots """ - known_crawlers_filename = base_dir + '/accounts/known_web_bots.txt' - known_crawlers_str = '' - for crawler in known_crawlers: - known_crawlers_str += crawler.strip() + '\n' + known_bots_filename = base_dir + '/accounts/known_web_bots.txt' + known_bots_str = '' + for crawler in known_bots: + known_bots_str += crawler.strip() + '\n' try: - with open(known_crawlers_filename, 'w+') as fp_crawlers: - fp_crawlers.write(known_crawlers_str) + with open(known_bots_filename, 'w+') as fp_crawlers: + fp_crawlers.write(known_bots_str) except OSError: - print("EX: unable to save known web crawlers to " + - known_crawlers_filename) + print("EX: unable to save known web bots to " + + known_bots_filename) return False return True @@ -105,7 +105,7 @@ def blocked_user_agent(calling_domain: str, agent_str: str, blocked_cache: [], blocked_cache_update_secs: int, crawlers_allowed: [], - known_crawlers: []): + known_bots: []): """Should a GET or POST be blocked based upon its user agent? """ if not agent_str: @@ -123,10 +123,10 @@ def blocked_user_agent(calling_domain: str, agent_str: str, # is this a web crawler? If so then block it by default # unless this is a news instance or if it is in the allowed list if 'bot/' in agent_str_lower or 'bot-' in agent_str_lower: - if agent_str_lower not in known_crawlers: - known_crawlers.append(agent_str_lower) - known_crawlers.sort() - _save_known_web_crawlers(base_dir, known_crawlers) + if agent_str_lower not in known_bots: + known_bots.append(agent_str_lower) + known_bots.sort() + _save_known_web_bots(base_dir, known_bots) # if this is a news instance then we want it # to be indexed by search engines if news_instance: diff --git a/daemon.py b/daemon.py index a3bfec417..9eb7182b7 100644 --- a/daemon.py +++ b/daemon.py @@ -380,7 +380,7 @@ from siteactive import referer_is_active from webapp_likers import html_likers_of_post from crawlers import update_known_crawlers from crawlers import blocked_user_agent -from crawlers import load_known_web_crawlers +from crawlers import load_known_web_bots import os @@ -14010,7 +14010,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.blocked_cache, self.server.blocked_cache_update_secs, self.server.crawlers_allowed, - self.server.known_crawlers) + self.server.known_bots) if block: self._400() return @@ -18553,7 +18553,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.blocked_cache, self.server.blocked_cache_update_secs, self.server.crawlers_allowed, - self.server.known_crawlers) + self.server.known_bots) if block: self._400() self.server.postreq_busy = False @@ -19670,7 +19670,7 @@ def run_daemon(crawlers_allowed: [], httpd.crawlers_allowed = crawlers_allowed # list of web crawlers known to the system - httpd.known_crawlers = load_known_web_crawlers(base_dir) + httpd.known_bots = load_known_web_bots(base_dir) httpd.unit_test = unit_test httpd.allow_local_network_access = allow_local_network_access From 89a9ece06204a5725e9d957e92c445459633cb6d Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 6 Mar 2022 14:21:49 +0000 Subject: [PATCH 05/10] Change filename for consistency --- crawlers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crawlers.py b/crawlers.py index cbb102e1b..952f3ffdd 100644 --- a/crawlers.py +++ b/crawlers.py @@ -55,7 +55,7 @@ def update_known_crawlers(ua_str: str, def load_known_web_bots(base_dir: str) -> []: """Returns a list of known web bots """ - known_bots_filename = base_dir + '/accounts/known_web_bots.txt' + known_bots_filename = base_dir + '/accounts/knownBots.txt' if not os.path.isfile(known_bots_filename): return [] crawlers_str = None @@ -83,7 +83,7 @@ def load_known_web_bots(base_dir: str) -> []: def _save_known_web_bots(base_dir: str, known_bots: []) -> bool: """Saves a list of known web bots """ - known_bots_filename = base_dir + '/accounts/known_web_bots.txt' + known_bots_filename = base_dir + '/accounts/knownBots.txt' known_bots_str = '' for crawler in known_bots: known_bots_str += crawler.strip() + '\n' From 6247dc31b83f377c40f1eb152f97ac3b65705d17 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 6 Mar 2022 15:05:58 +0000 Subject: [PATCH 06/10] Change terminology --- daemon.py | 18 ++++++++++++++++++ translations/ar.json | 3 ++- translations/ca.json | 3 ++- translations/cy.json | 3 ++- translations/de.json | 3 ++- translations/en.json | 3 ++- translations/es.json | 3 ++- translations/fr.json | 3 ++- translations/ga.json | 3 ++- translations/hi.json | 3 ++- translations/it.json | 3 ++- translations/ja.json | 3 ++- translations/ko.json | 3 ++- translations/ku.json | 3 ++- translations/oc.json | 3 ++- translations/pl.json | 3 ++- translations/pt.json | 3 ++- translations/ru.json | 3 ++- translations/sw.json | 3 ++- translations/uk.json | 3 ++- translations/zh.json | 3 ++- webapp_profile.py | 6 +++++- 22 files changed, 63 insertions(+), 21 deletions(-) diff --git a/daemon.py b/daemon.py index 9eb7182b7..cd615e3f9 100644 --- a/daemon.py +++ b/daemon.py @@ -14187,6 +14187,24 @@ class PubServer(BaseHTTPRequestHandler): '_GET', 'isAuthorized', self.server.debug) + if authorized and self.path.endswith('/bots.txt'): + known_bots_str = '' + for bot_name in self.server.known_bots: + known_bots_str += bot_name + '\n' + # TODO + msg = known_bots_str.encode('utf-8') + msglen = len(msg) + self._set_headers('text/plain; charset=utf-8', + msglen, None, calling_domain, True) + self._write(msg) + if self.server.debug: + print('Sent known bots: ' + + self.server.path + ' ' + calling_domain) + fitness_performance(getreq_start_time, self.server.fitness, + '_GET', 'get_known_bots', + self.server.debug) + return + # shared items catalog for this instance # this is only accessible to instance members or to # other instances which present an authorization token diff --git a/translations/ar.json b/translations/ar.json index 69bb8642f..35c885213 100644 --- a/translations/ar.json +++ b/translations/ar.json @@ -516,5 +516,6 @@ "Show who repeated this post": "أظهر من كرر هذا المنصب", "Repeated by": "يتكرر بواسطة", "Register": "يسجل", - "Web Crawlers Allowed": "برامج زحف الويب المسموح بها" + "Web Bots Allowed": "مسموح روبوتات الويب", + "Known Web Bots": "برامج روبوت الويب المعروفة" } diff --git a/translations/ca.json b/translations/ca.json index 3dc675148..11e2bd551 100644 --- a/translations/ca.json +++ b/translations/ca.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Mostra qui ha repetit aquesta publicació", "Repeated by": "Repetit per", "Register": "Registra't", - "Web Crawlers Allowed": "Es permeten rastrejadors web" + "Web Bots Allowed": "Bots web permesos", + "Known Web Bots": "Bots web coneguts" } diff --git a/translations/cy.json b/translations/cy.json index 8ce0051e9..6ed6c0992 100644 --- a/translations/cy.json +++ b/translations/cy.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Dangoswch pwy ailadroddodd y post hwn", "Repeated by": "Ailadrodd gan", "Register": "Cofrestrwch", - "Web Crawlers Allowed": "Caniatáu Ymlusgwyr Gwe" + "Web Bots Allowed": "Web Bots a Ganiateir", + "Known Web Bots": "Web Bots Hysbys" } diff --git a/translations/de.json b/translations/de.json index cb3ee15b2..69c2ca212 100644 --- a/translations/de.json +++ b/translations/de.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Zeigen Sie, wer diesen Beitrag wiederholt hat", "Repeated by": "Wiederholt von", "Register": "Registrieren", - "Web Crawlers Allowed": "Webcrawler erlaubt" + "Web Bots Allowed": "Webbots erlaubt", + "Known Web Bots": "Bekannte Webbots" } diff --git a/translations/en.json b/translations/en.json index 6391accd0..89d699a44 100644 --- a/translations/en.json +++ b/translations/en.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Show who repeated this post", "Repeated by": "Repeated by", "Register": "Register", - "Web Crawlers Allowed": "Web Crawlers Allowed" + "Web Bots Allowed": "Web Bots Allowed", + "Known Web Bots": "Known Web Bots" } diff --git a/translations/es.json b/translations/es.json index e91eb9d20..9c707222b 100644 --- a/translations/es.json +++ b/translations/es.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Mostrar quién repitió esta publicación", "Repeated by": "Repetido por", "Register": "Registrarse", - "Web Crawlers Allowed": "Rastreadores web permitidos" + "Web Bots Allowed": "Bots web permitidos", + "Known Web Bots": "Bots web conocidos" } diff --git a/translations/fr.json b/translations/fr.json index 429016e58..98c4fb260 100644 --- a/translations/fr.json +++ b/translations/fr.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Montrer qui a répété ce post", "Repeated by": "Répété par", "Register": "S'inscrire", - "Web Crawlers Allowed": "Robots d'exploration Web autorisés" + "Web Bots Allowed": "Robots Web autorisés", + "Known Web Bots": "Robots Web connus" } diff --git a/translations/ga.json b/translations/ga.json index 0589b6d87..675299266 100644 --- a/translations/ga.json +++ b/translations/ga.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Taispeáin cé a rinne an postáil seo arís", "Repeated by": "Arís agus arís eile ag", "Register": "Clár", - "Web Crawlers Allowed": "Crawlers Gréasáin Ceadaithe" + "Web Bots Allowed": "Róbónna Gréasáin Ceadaithe", + "Known Web Bots": "Róbónna Gréasáin Aitheanta" } diff --git a/translations/hi.json b/translations/hi.json index d1ac37dee..0f746a2c5 100644 --- a/translations/hi.json +++ b/translations/hi.json @@ -516,5 +516,6 @@ "Show who repeated this post": "दिखाएं कि इस पोस्ट को किसने दोहराया", "Repeated by": "द्वारा दोहराया गया", "Register": "रजिस्टर करें", - "Web Crawlers Allowed": "वेब क्रॉलर की अनुमति है" + "Web Bots Allowed": "वेब बॉट्स की अनुमति है", + "Known Web Bots": "ज्ञात वेब बॉट्स" } diff --git a/translations/it.json b/translations/it.json index 3c0c311ba..45e94e746 100644 --- a/translations/it.json +++ b/translations/it.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Mostra chi ha ripetuto questo post", "Repeated by": "Ripetuto da", "Register": "Registrati", - "Web Crawlers Allowed": "Web crawler consentiti" + "Web Bots Allowed": "Web bot consentiti", + "Known Web Bots": "Webbot noti" } diff --git a/translations/ja.json b/translations/ja.json index fb3f075a3..5617d44b9 100644 --- a/translations/ja.json +++ b/translations/ja.json @@ -516,5 +516,6 @@ "Show who repeated this post": "この投稿を繰り返した人を表示する", "Repeated by": "によって繰り返される", "Register": "登録", - "Web Crawlers Allowed": "許可されるWebクローラー" + "Web Bots Allowed": "許可されたWebボット", + "Known Web Bots": "既知のWebボット" } diff --git a/translations/ko.json b/translations/ko.json index 19d6a6b26..99d58e32b 100644 --- a/translations/ko.json +++ b/translations/ko.json @@ -516,5 +516,6 @@ "Show who repeated this post": "이 포스트를 반복한 사람 표시", "Repeated by": "반복한 사람", "Register": "등록", - "Web Crawlers Allowed": "웹 크롤러 허용" + "Web Bots Allowed": "웹 봇 허용", + "Known Web Bots": "알려진 웹 봇" } diff --git a/translations/ku.json b/translations/ku.json index f55c059cf..c899c8213 100644 --- a/translations/ku.json +++ b/translations/ku.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Nîşan bide kê ev post dubare kiriye", "Repeated by": "Ji hêla dubare kirin", "Register": "Fêhrist", - "Web Crawlers Allowed": "Crawlers Web Destûrdar in" + "Web Bots Allowed": "Web Bots Destûrdar in", + "Known Web Bots": "Botên Webê yên naskirî" } diff --git a/translations/oc.json b/translations/oc.json index c5b280708..7ebebc7e3 100644 --- a/translations/oc.json +++ b/translations/oc.json @@ -512,5 +512,6 @@ "Show who repeated this post": "Show who repeated this post", "Repeated by": "Repeated by", "Register": "Register", - "Web Crawlers Allowed": "Web Crawlers Allowed" + "Web Bots Allowed": "Web Bots Allowed", + "Known Web Bots": "Known Web Bots" } diff --git a/translations/pl.json b/translations/pl.json index bc96da46c..e7d95a096 100644 --- a/translations/pl.json +++ b/translations/pl.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Pokaż, kto powtórzył ten post", "Repeated by": "Powtórzone przez", "Register": "Zarejestrować", - "Web Crawlers Allowed": "Dozwolone roboty sieciowe" + "Web Bots Allowed": "Dozwolone boty internetowe", + "Known Web Bots": "Znane boty internetowe" } diff --git a/translations/pt.json b/translations/pt.json index 03cd5e5aa..fe0fb98c3 100644 --- a/translations/pt.json +++ b/translations/pt.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Mostrar quem repetiu esta postagem", "Repeated by": "Repetido por", "Register": "Registro", - "Web Crawlers Allowed": "Rastreadores da Web permitidos" + "Web Bots Allowed": "Webbots permitidos", + "Known Web Bots": "Webbots conhecidos" } diff --git a/translations/ru.json b/translations/ru.json index 762a7d5cc..a05e33272 100644 --- a/translations/ru.json +++ b/translations/ru.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Показать, кто повторил этот пост", "Repeated by": "Повторено", "Register": "регистр", - "Web Crawlers Allowed": "Веб-сканеры разрешены" + "Web Bots Allowed": "Веб-боты разрешены", + "Known Web Bots": "Известные веб-боты" } diff --git a/translations/sw.json b/translations/sw.json index 4be3a608f..bf57db233 100644 --- a/translations/sw.json +++ b/translations/sw.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Onyesha ni nani aliyerudia chapisho hili", "Repeated by": "Imerudiwa na", "Register": "Sajili", - "Web Crawlers Allowed": "Watambazaji Wavuti Zinaruhusiwa" + "Web Bots Allowed": "Mtandao wa Boti Unaruhusiwa", + "Known Web Bots": "Boti za Wavuti zinazojulikana" } diff --git a/translations/uk.json b/translations/uk.json index 32c2407e9..4208ca9a0 100644 --- a/translations/uk.json +++ b/translations/uk.json @@ -516,5 +516,6 @@ "Show who repeated this post": "Покажіть, хто повторив цей пост", "Repeated by": "Повторюється за", "Register": "Реєстрація", - "Web Crawlers Allowed": "Веб-сканери дозволені" + "Web Bots Allowed": "Веб-боти дозволені", + "Known Web Bots": "Відомі веб-боти" } diff --git a/translations/zh.json b/translations/zh.json index 784009f13..d0373b9bc 100644 --- a/translations/zh.json +++ b/translations/zh.json @@ -516,5 +516,6 @@ "Show who repeated this post": "显示谁重复了这篇文章", "Repeated by": "重复", "Register": "登记", - "Web Crawlers Allowed": "允许网络爬虫" + "Web Bots Allowed": "允许网络机器人", + "Known Web Bots": "已知的网络机器人" } diff --git a/webapp_profile.py b/webapp_profile.py index f57dc07d7..dc9d4a7a0 100644 --- a/webapp_profile.py +++ b/webapp_profile.py @@ -1814,9 +1814,13 @@ def _html_edit_profile_filtering(base_dir: str, nickname: str, domain: str, crawlers_allowed_str += '\n' crawlers_allowed_str += uagent edit_profile_form += \ - edit_text_area(translate['Web Crawlers Allowed'], + edit_text_area(translate['Web Bots Allowed'], 'crawlersAllowedStr', crawlers_allowed_str, 200, '', False) + edit_profile_form += \ + '