forked from indymedia/epicyon
Implement robots.txt
I know, this is widely ignored by search engines because it's not in their business interests to adhere to it, but at least we tried.master
parent
05703db1a3
commit
4ab1499d5b
20
daemon.py
20
daemon.py
|
@ -170,6 +170,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_header('Content-Length', str(length))
|
||||
self.send_header('Host', self.server.domainFull)
|
||||
self.send_header('WWW-Authenticate', 'title="Login to Epicyon", Basic realm="epicyon"')
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
|
||||
def _set_headers(self,fileFormat: str,length: int,cookie: str) -> None:
|
||||
|
@ -180,6 +181,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_header('Cookie', cookie)
|
||||
self.send_header('Host', self.server.domainFull)
|
||||
self.send_header('InstanceID', self.server.instanceId)
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
|
||||
def _redirect_headers(self,redirect: str,cookie: str) -> None:
|
||||
|
@ -191,6 +193,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_header('Host', self.server.domainFull)
|
||||
self.send_header('InstanceID', self.server.instanceId)
|
||||
self.send_header('Content-Length', '0')
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
|
||||
def _404(self) -> None:
|
||||
|
@ -198,6 +201,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_response(404)
|
||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
||||
self.send_header('Content-Length', str(len(msg)))
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
try:
|
||||
self.wfile.write(msg)
|
||||
|
@ -205,6 +209,15 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
print('Error when showing 404')
|
||||
print(e)
|
||||
|
||||
def _robotsTxt(self) -> bool:
|
||||
if not self.path.lower().startswith('/.robots.txt'):
|
||||
return False
|
||||
msg='User-agent: *\nDisallow: /'
|
||||
msg=msg.encode('utf-8')
|
||||
self._set_headers('text/html; charset=utf-8',len(msg),None)
|
||||
self.wfile.write(msg)
|
||||
return True
|
||||
|
||||
def _webfinger(self) -> bool:
|
||||
if not self.path.startswith('/.well-known'):
|
||||
return False
|
||||
|
@ -654,6 +667,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_response(303)
|
||||
self.send_header('Location', '/login')
|
||||
self.send_header('Content-Length', '0')
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
self.server.GETbusy=False
|
||||
return
|
||||
|
@ -862,6 +876,10 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
if self._webfinger():
|
||||
self.server.GETbusy=False
|
||||
return
|
||||
# send robots.txt if asked
|
||||
if self._robotsTxt():
|
||||
self.server.GETbusy=False
|
||||
return
|
||||
|
||||
if self.path.startswith('/login') or self.path=='/':
|
||||
# request basic auth
|
||||
|
@ -2220,6 +2238,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_header('Content-Length', '0')
|
||||
self.send_header('Set-Cookie', 'epicyon=; SameSite=Strict')
|
||||
self.send_header('Location', '/login')
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
self.server.POSTbusy=False
|
||||
return
|
||||
|
@ -2246,6 +2265,7 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
self.send_header('Set-Cookie', 'epicyon='+self.server.tokens[loginNickname]+'; SameSite=Strict')
|
||||
self.send_header('Location', '/users/'+loginNickname+'/inbox')
|
||||
self.send_header('Content-Length', '0')
|
||||
self.send_header('X-Robots-Tag','noindex')
|
||||
self.end_headers()
|
||||
self.server.POSTbusy=False
|
||||
return
|
||||
|
|
Loading…
Reference in New Issue