Improve support for onion and i2p addresses existing alongside clearnet

main
Bob Mottram 2022-03-11 13:27:54 +00:00
parent 81695817a7
commit 794f82dd58
6 changed files with 167 additions and 52 deletions

View File

@ -141,6 +141,7 @@ def get_person_pub_key(base_dir: str, session, person_url: str,
person_cache: {}, debug: bool,
project_version: str, http_prefix: str,
domain: str, onion_domain: str,
i2p_domain: str,
signing_priv_key_pem: str) -> str:
if not person_url:
return None
@ -162,6 +163,9 @@ def get_person_pub_key(base_dir: str, session, person_url: str,
if onion_domain:
if '.onion/' in person_url:
person_domain = onion_domain
elif i2p_domain:
if '.i2p/' in person_url:
person_domain = i2p_domain
profile_str = 'https://www.w3.org/ns/activitystreams'
accept_str = \
'application/activity+json; profile="' + profile_str + '"'

View File

@ -22,6 +22,9 @@ from hashlib import sha256
from hashlib import md5
from shutil import copyfile
from session import create_session
from session import get_session_for_domain
from session import get_session_for_domains
from session import set_session_for_sender
from webfinger import webfinger_meta
from webfinger import webfinger_node_info
from webfinger import webfinger_lookup
@ -654,7 +657,7 @@ class PubServer(BaseHTTPRequestHandler):
print('DEBUG: creating new session during ' + calling_function)
curr_session = create_session(proxy_type)
if curr_session:
self.server.session = curr_session
set_session_for_sender(self.server, proxy_type, curr_session)
return curr_session
print('ERROR: GET failed to create session during ' +
calling_function)
@ -693,7 +696,9 @@ class PubServer(BaseHTTPRequestHandler):
self.server.person_cache, self.server.debug,
self.server.project_version,
self.server.http_prefix,
self.server.domain, self.server.onion_domain,
self.server.domain,
self.server.onion_domain,
self.server.i2p_domain,
self.server.signing_priv_key_pem)
if not pub_key:
if self.server.debug:
@ -1428,6 +1433,9 @@ class PubServer(BaseHTTPRequestHandler):
Client to server message post
https://www.w3.org/TR/activitypub/#client-to-server-outbox-delivery
"""
if not curr_session:
return False
city = self.server.city
if post_to_nickname:
@ -14129,8 +14137,6 @@ class PubServer(BaseHTTPRequestHandler):
return False
def do_GET(self):
curr_session = self.server.session
proxy_type = self.server.proxy_type
calling_domain = self.server.domain_full
if self.headers.get('Host'):
@ -14176,6 +14182,10 @@ class PubServer(BaseHTTPRequestHandler):
referer_domain = self._get_referer_domain(ua_str)
curr_session, proxy_type = \
get_session_for_domains(self.server,
calling_domain, referer_domain)
getreq_start_time = time.time()
fitness_performance(getreq_start_time, self.server.fitness,
@ -18739,20 +18749,9 @@ class PubServer(BaseHTTPRequestHandler):
self._400()
def do_POST(self):
curr_session = self.server.session
proxy_type = self.server.proxy_type
postreq_start_time = time.time()
curr_session = \
self._establish_session("POST", curr_session,
proxy_type)
if not curr_session:
fitness_performance(postreq_start_time, self.server.fitness,
'_POST', 'create_session',
self.server.debug)
self._404()
return
if self.server.debug:
print('DEBUG: POST to ' + self.server.base_dir +
' path: ' + self.path + ' busy: ' +
@ -18815,6 +18814,19 @@ class PubServer(BaseHTTPRequestHandler):
self.server.postreq_busy = False
return
curr_session, proxy_type = \
get_session_for_domain(self.server, calling_domain)
curr_session = \
self._establish_session("POST", curr_session,
proxy_type)
if not curr_session:
fitness_performance(postreq_start_time, self.server.fitness,
'_POST', 'create_session',
self.server.debug)
self._404()
return
# returns after this point should set postreq_busy to False
# remove any trailing slashes from the path
@ -20090,7 +20102,8 @@ def run_daemon(crawlers_allowed: [],
httpd.favicons_cache = {}
httpd.proxy_type = proxy_type
httpd.session = None
httpd.session_last_update = 0
httpd.session_onion = None
httpd.session_i2p = None
httpd.last_getreq = 0
httpd.last_postreq = 0
httpd.getreq_busy = False

103
inbox.py
View File

@ -1824,7 +1824,8 @@ def _receive_delete(session, handle: str, is_group: bool, base_dir: str,
def _receive_announce(recent_posts_cache: {},
session, handle: str, is_group: bool, base_dir: str,
http_prefix: str,
domain: str, onion_domain: str, port: int,
domain: str,
onion_domain: str, i2p_domain: str, port: int,
send_threads: [], post_log: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool, translate: {},
@ -2034,6 +2035,7 @@ def _receive_announce(recent_posts_cache: {},
person_cache, debug,
__version__, http_prefix,
domain, onion_domain,
i2p_domain,
signing_priv_key_pem)
if pub_key:
if debug:
@ -2322,6 +2324,7 @@ def _valid_post_content(base_dir: str, nickname: str, domain: str,
def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str,
domain: str, onion_domain: str,
i2p_domain: str,
person_cache: {},
post_json_object: {}, debug: bool,
signing_priv_key_pem: str) -> None:
@ -2355,7 +2358,8 @@ def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str,
get_person_pub_key(base_dir, session, lookup_actor,
person_cache, debug,
__version__, http_prefix,
domain, onion_domain, signing_priv_key_pem)
domain, onion_domain, i2p_domain,
signing_priv_key_pem)
if pub_key:
if debug:
print('DEBUG: public key obtained for reply: ' + lookup_actor)
@ -3403,7 +3407,7 @@ def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int,
if _receive_announce(recent_posts_cache,
session, handle, is_group,
base_dir, http_prefix,
domain, onion_domain, port,
domain, onion_domain, i2p_domain, port,
send_threads, post_log,
cached_webfingers,
person_cache,
@ -3565,7 +3569,8 @@ def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int,
# get the avatar for a reply/announce
_obtain_avatar_for_reply_post(session, base_dir,
http_prefix, domain, onion_domain,
http_prefix, domain,
onion_domain, i2p_domain,
person_cache, post_json_object, debug,
signing_priv_key_pem)
@ -3915,7 +3920,7 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str,
message_json: {}, federation_list: [],
debug: bool, project_version: str,
max_followers: int, onion_domain: str,
signing_priv_key_pem: str,
i2p_domain: str, signing_priv_key_pem: str,
unit_test: bool) -> bool:
"""Receives a follow request within the POST section of HTTPServer
"""
@ -4036,7 +4041,7 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str,
if not get_person_pub_key(base_dir, session, message_json['actor'],
person_cache, debug, project_version,
http_prefix, domain_to_follow, onion_domain,
signing_priv_key_pem):
i2p_domain, signing_priv_key_pem):
if debug:
print('Unable to obtain following actor: ' +
message_json['actor'])
@ -4074,7 +4079,8 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str,
if not get_person_pub_key(base_dir, session, message_json['actor'],
person_cache, debug, project_version,
http_prefix, domain_to_follow,
onion_domain, signing_priv_key_pem):
onion_domain, i2p_domain,
signing_priv_key_pem):
if debug:
print('Unable to obtain following actor: ' +
message_json['actor'])
@ -4153,10 +4159,24 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
"""Processes received items and moves them to the appropriate
directories
"""
print('Starting new session when starting inbox queue')
curr_session_time = int(time.time())
session_last_update = curr_session_time
print('Starting new session when starting inbox queue')
session = create_session(proxy_type)
# is this is a clearnet instance then optionally start sessions
# for onion and i2p domains
session_onion = None
session_i2p = None
session_last_update_onion = curr_session_time
session_last_update_i2p = curr_session_time
if proxy_type != 'tor' and onion_domain:
print('Starting onion session when starting inbox queue')
session_onion = create_session('tor')
if proxy_type != 'i2p' and i2p_domain:
print('Starting i2p session when starting inbox queue')
session_i2p = create_session('i2p')
inbox_handle = 'inbox@' + domain
if debug:
print('DEBUG: Inbox queue running')
@ -4209,16 +4229,6 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
_restore_queue_items(base_dir, queue)
continue
curr_time = int(time.time())
# recreate the session periodically
if not session or curr_time - session_last_update > 21600:
print('Regenerating inbox queue session at 6hr interval')
session = create_session(proxy_type)
if not session:
continue
session_last_update = curr_time
# oldest item first
queue.sort()
queue_filename = queue[0]
@ -4249,6 +4259,8 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
str(queue_filename))
continue
curr_time = int(time.time())
# clear the daily quotas for maximum numbers of received posts
if curr_time - quotas_last_update_daily > 60 * 60 * 24:
quotas_daily = {
@ -4276,6 +4288,42 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
account_max_posts_per_day, debug):
continue
curr_session = session
curr_proxy_type = proxy_type
curr_session_last_update = session_last_update
session_type = 'default'
if queue_json.get('actor'):
if isinstance(queue_json['actor'], str):
sender_domain, _ = get_domain_from_actor(queue_json['actor'])
if sender_domain.endswith('.onion') and \
session_onion and proxy_type != 'tor':
curr_proxy_type = 'tor'
curr_session = session_onion
session_type = 'onion'
curr_session_last_update = session_last_update_onion
elif (sender_domain.endswith('.i2p') and
session_i2p and proxy_type != 'i2p'):
curr_proxy_type = 'i2p'
curr_session = session_i2p
session_type = 'i2p'
curr_session_last_update = session_last_update_i2p
# recreate the session periodically
if not curr_session or curr_time - curr_session_last_update > 21600:
print('Regenerating inbox queue session at 6hr interval')
curr_session = create_session(curr_proxy_type)
if not curr_session:
continue
if session_type == 'default':
session = curr_session
session_last_update = curr_time
elif session_type == 'onion':
session_onion = curr_session
session_last_update_onion = curr_time
elif session_type == 'i2p':
session_i2p = curr_session
session_last_update_i2p = curr_time
if debug and queue_json.get('actor'):
print('Obtaining public key for actor ' + queue_json['actor'])
@ -4298,10 +4346,11 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
break
pub_key = \
get_person_pub_key(base_dir, session, key_id,
get_person_pub_key(base_dir, curr_session, key_id,
person_cache, debug,
project_version, http_prefix,
domain, onion_domain, signing_priv_key_pem)
domain, onion_domain, i2p_domain,
signing_priv_key_pem)
if pub_key:
if debug:
print('DEBUG: public key: ' + str(pub_key))
@ -4409,7 +4458,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
# if queue_json['post'].get('id'):
# queue_json['post']['id'] = queue_json['id']
if _receive_undo(session,
if _receive_undo(curr_session,
base_dir, http_prefix, port,
send_threads, post_log,
cached_webfingers,
@ -4430,7 +4479,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
if debug:
print('DEBUG: checking for follow requests')
if _receive_follow_request(session,
if _receive_follow_request(curr_session,
base_dir, http_prefix, port,
send_threads, post_log,
cached_webfingers,
@ -4438,7 +4487,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
queue_json['post'],
federation_list,
debug, project_version,
max_followers, onion_domain,
max_followers, onion_domain, i2p_domain,
signing_priv_key_pem, unit_test):
if os.path.isfile(queue_filename):
try:
@ -4455,7 +4504,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
if debug:
print('DEBUG: No follow requests')
if receive_accept_reject(session,
if receive_accept_reject(curr_session,
base_dir, http_prefix, domain, port,
send_threads, post_log,
cached_webfingers, person_cache,
@ -4472,7 +4521,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
queue.pop(0)
continue
if _receive_update_activity(recent_posts_cache, session,
if _receive_update_activity(recent_posts_cache, curr_session,
base_dir, http_prefix,
domain, port,
send_threads, post_log,
@ -4555,7 +4604,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
languages_understood = []
_inbox_after_initial(recent_posts_cache,
max_recent_posts,
session, key_id, handle,
curr_session, key_id, handle,
queue_json['post'],
base_dir, http_prefix,
send_threads, post_log,
@ -4563,7 +4612,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
person_cache, queue,
domain,
onion_domain, i2p_domain,
port, proxy_type,
port, curr_proxy_type,
federation_list,
debug,
queue_filename, destination,

View File

@ -9,7 +9,6 @@ __module_group__ = "Timeline"
import os
from shutil import copyfile
from session import create_session
from auth import create_password
from posts import is_image_media
from posts import outbox_message_create_wrap
@ -489,12 +488,6 @@ def post_message_to_outbox(session, translate: {},
if debug:
print('DEBUG: Updated announcements (shares) collection ' +
'for the post associated with the Announce activity')
if not server.session:
print('DEBUG: creating new session for c2s')
server.session = create_session(proxy_type)
if not server.session:
print('ERROR: Failed to create session for post_message_to_outbox')
return False
if debug:
print('DEBUG: sending c2s post to followers')
# remove inactive threads

View File

@ -16,6 +16,7 @@ from utils import load_json
from utils import is_account_dir
from utils import acct_dir
from outbox import post_message_to_outbox
from session import create_session
def _update_post_schedule(base_dir: str, handle: str, httpd,
@ -93,7 +94,17 @@ def _update_post_schedule(base_dir: str, handle: str, httpd,
if nickname:
httpd.post_to_nickname = nickname
if not post_message_to_outbox(httpd.session,
# create session if needed
curr_session = httpd.session
curr_proxy_type = httpd.proxy_type
if not curr_session:
curr_session = create_session(httpd.proxy_type)
httpd.session = curr_session
if not curr_session:
continue
if not post_message_to_outbox(curr_session,
httpd.translate,
post_json_object, nickname,
httpd, base_dir,
@ -111,7 +122,7 @@ def _update_post_schedule(base_dir: str, handle: str, httpd,
httpd.cached_webfingers,
httpd.person_cache,
httpd.allow_deletion,
httpd.proxy_type,
curr_proxy_type,
httpd.project_version,
httpd.debug,
httpd.yt_replace_domain,

View File

@ -767,3 +767,48 @@ def get_method(method_name: str, xml_str: str,
print('EX: get_method failed, ' +
'connection was reset during get_vcard ' + str(ex))
return None
def get_session_for_domains(server, calling_domain: str, referer_domain: str):
"""Returns the appropriate session for the given domains
"""
if referer_domain is None:
referer_domain = ''
if '.onion:' in calling_domain or \
calling_domain.endswith('.onion') or \
'.onion:' in referer_domain or \
referer_domain.endswith('.onion'):
if not server.domain.endswith('.onion'):
if server.onion_domain and server.session_onion:
return server.session_onion, 'tor'
if '.i2p:' in calling_domain or \
calling_domain.endswith('.i2p') or \
'.i2p:' in referer_domain or \
referer_domain.endswith('.i2p'):
if not server.domain.endswith('.i2p'):
if server.i2p_domain and server.session_i2p:
return server.session_i2p, 'i2p'
return server.session, server.proxy_type
def get_session_for_domain(server, referer_domain: str):
"""Returns the appropriate session for the given domain
"""
return get_session_for_domains(server, referer_domain, referer_domain)
def set_session_for_sender(server, proxy_type: str, new_session) -> None:
"""Sets the appropriate session for the given sender
"""
if proxy_type == 'tor':
if not server.domain.endswith('.onion'):
if server.onion_domain and server.session_onion:
server.session_onion = new_session
return
if proxy_type == 'i2p':
if not server.domain.endswith('.i2p'):
if server.i2p_domain and server.session_i2p:
server.session_i2p = new_session
return
server.session = new_session