From 81695817a775e3b5e53f85faaf374d886f20b635 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Thu, 10 Mar 2022 15:01:23 +0000 Subject: [PATCH 01/31] Make session and proxy type into per request variables --- daemon.py | 922 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 584 insertions(+), 338 deletions(-) diff --git a/daemon.py b/daemon.py index cd615e3f9..71c27eab9 100644 --- a/daemon.py +++ b/daemon.py @@ -456,7 +456,8 @@ class PubServer(BaseHTTPRequestHandler): pass def _send_reply_to_question(self, nickname: str, message_id: str, - answer: str) -> None: + answer: str, + curr_session, proxy_type: str) -> None: """Sends a reply to a question """ votes_filename = \ @@ -518,7 +519,8 @@ class PubServer(BaseHTTPRequestHandler): # name field contains the answer message_json['object']['name'] = answer if self._post_to_outbox(message_json, - self.server.project_version, nickname): + self.server.project_version, nickname, + curr_session, proxy_type): post_filename = \ locate_post(self.server.base_dir, nickname, self.server.domain, message_id) @@ -641,20 +643,25 @@ class PubServer(BaseHTTPRequestHandler): return key_id return None - def _establish_session(self, calling_function: str) -> bool: + def _establish_session(self, + calling_function: str, + curr_session, + proxy_type: str): """Recreates session if needed """ - if self.server.session: - return True + if curr_session: + return curr_session print('DEBUG: creating new session during ' + calling_function) - self.server.session = create_session(self.server.proxy_type) - if self.server.session: - return True + curr_session = create_session(proxy_type) + if curr_session: + self.server.session = curr_session + return curr_session print('ERROR: GET failed to create session during ' + calling_function) - return False + return None - def _secure_mode(self, force: bool = False) -> bool: + def _secure_mode(self, curr_session, proxy_type: str, + force: bool = False) -> bool: """http authentication of GET requests for json """ if not self.server.secure_mode and not force: @@ -673,13 +680,16 @@ class PubServer(BaseHTTPRequestHandler): print('AUTH: Secure mode GET request not permitted: ' + key_id) return False - if not self._establish_session("secure mode"): + curr_session = \ + self._establish_session("secure mode", + curr_session, proxy_type) + if not curr_session: return False # obtain the public key pub_key = \ get_person_pub_key(self.server.base_dir, - self.server.session, key_id, + curr_session, key_id, self.server.person_cache, self.server.debug, self.server.project_version, self.server.http_prefix, @@ -1412,7 +1422,8 @@ class PubServer(BaseHTTPRequestHandler): return True def _post_to_outbox(self, message_json: {}, version: str, - post_to_nickname: str) -> bool: + post_to_nickname: str, + curr_session, proxy_type: str) -> bool: """post is received by the outbox Client to server message post https://www.w3.org/TR/activitypub/#client-to-server-outbox-delivery @@ -1430,7 +1441,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.shared_items_federated_domains shared_item_federation_tokens = \ self.server.shared_item_federation_tokens - return post_message_to_outbox(self.server.session, + return post_message_to_outbox(curr_session, self.server.translate, message_json, self.post_to_nickname, self.server, self.server.base_dir, @@ -1448,7 +1459,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.cached_webfingers, self.server.person_cache, self.server.allow_deletion, - self.server.proxy_type, version, + proxy_type, version, self.server.debug, self.server.yt_replace_domain, self.server.twitter_replacement_domain, @@ -2446,7 +2457,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Receive POST from person options screen """ page_number = 1 @@ -2803,7 +2815,8 @@ class PubServer(BaseHTTPRequestHandler): # send block activity self._send_block(http_prefix, chooser_nickname, domain_full, - options_nickname, options_domain_full) + options_nickname, options_domain_full, + curr_session, proxy_type) # person options screen, unblock button # See html_person_options @@ -2899,7 +2912,7 @@ class PubServer(BaseHTTPRequestHandler): conversation_id, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.port, @@ -3035,7 +3048,7 @@ class PubServer(BaseHTTPRequestHandler): conversation_id, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.port, @@ -3163,7 +3176,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Confirm to follow """ users_path = path.split('/followconfirm')[0] @@ -3226,7 +3240,7 @@ class PubServer(BaseHTTPRequestHandler): follower_nickname + ' to ' + following_actor) if not self.server.signing_priv_key_pem: print('Sending follow request with no signing key') - send_follow_request(self.server.session, + send_follow_request(curr_session, base_dir, follower_nickname, domain, port, http_prefix, @@ -3253,7 +3267,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Confirms a block """ users_path = path.split('/blockconfirm')[0] @@ -3328,7 +3343,8 @@ class PubServer(BaseHTTPRequestHandler): # send block activity self._send_block(http_prefix, blocker_nickname, domain_full, - blocking_nickname, blocking_domain_full) + blocking_nickname, blocking_domain_full, + curr_session, proxy_type) if calling_domain.endswith('.onion') and onion_domain: origin_path_str = 'http://' + onion_domain + users_path elif (calling_domain.endswith('.i2p') and i2p_domain): @@ -3428,7 +3444,8 @@ class PubServer(BaseHTTPRequestHandler): port: int, search_for_emoji: bool, onion_domain: str, i2p_domain: str, getreq_start_time, getreq_timings: {}, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Receive a search query """ # get the page number @@ -3494,7 +3511,7 @@ class PubServer(BaseHTTPRequestHandler): base_dir, search_str[1:], 1, MAX_POSTS_IN_HASHTAG_FEED, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, http_prefix, @@ -3595,7 +3612,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.project_version, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, port, @@ -3668,7 +3685,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.project_version, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, port, @@ -3706,7 +3723,11 @@ class PubServer(BaseHTTPRequestHandler): return # profile search nickname = get_nickname_from_actor(actor_str) - if not self._establish_session("handle search"): + curr_session = \ + self._establish_session("handle search", + curr_session, + proxy_type) + if not curr_session: self.server.postreq_busy = False return profile_path_str = path.replace('/searchhandle', '') @@ -3725,7 +3746,7 @@ class PubServer(BaseHTTPRequestHandler): else: actor = search_str avatar_url = \ - get_avatar_image_url(self.server.session, + get_avatar_image_url(curr_session, base_dir, http_prefix, actor, self.server.person_cache, @@ -3739,7 +3760,8 @@ class PubServer(BaseHTTPRequestHandler): domain, domain_full, getreq_start_time, onion_domain, i2p_domain, - cookie, debug, authorized) + cookie, debug, authorized, + curr_session, proxy_type) return else: show_published_date_only = \ @@ -3779,7 +3801,7 @@ class PubServer(BaseHTTPRequestHandler): domain, port, search_str, - self.server.session, + curr_session, cached_webfingers, self.server.person_cache, self.server.debug, @@ -3891,7 +3913,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, onion_domain: str, i2p_domain: str, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Receive a vote via POST """ page_number = 1 @@ -3959,7 +3982,8 @@ class PubServer(BaseHTTPRequestHandler): if '&' in answer: answer = answer.split('&')[0] - self._send_reply_to_question(nickname, message_id, answer) + self._send_reply_to_question(nickname, message_id, answer, + curr_session, proxy_type) if calling_domain.endswith('.onion') and onion_domain: actor = 'http://' + onion_domain + users_path elif (calling_domain.endswith('.i2p') and i2p_domain): @@ -4907,7 +4931,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, debug: bool, allow_local_network_access: bool, system_language: str, - content_license_url: str) -> None: + content_license_url: str, + curr_session, proxy_type: str) -> None: """Updates your user profile after editing via the Edit button on the profile screen """ @@ -6809,7 +6834,8 @@ class PubServer(BaseHTTPRequestHandler): str(update_actor_json)) self._post_to_outbox(update_actor_json, self.server.project_version, - nickname) + nickname, + curr_session, proxy_type) # deactivate the account if fields.get('deactivateThisAccount'): @@ -7157,7 +7183,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, port: int, proxy_type: str, getreq_start_time, - debug: bool) -> None: + debug: bool, + curr_session) -> None: """Returns an RSS2 feed for the blog """ nickname = path.split('/blog/')[1] @@ -7166,12 +7193,16 @@ class PubServer(BaseHTTPRequestHandler): if not nickname.startswith('rss.'): account_dir = acct_dir(self.server.base_dir, nickname, domain) if os.path.isdir(account_dir): - if not self._establish_session("RSS request"): + curr_session = \ + self._establish_session("RSS request", + curr_session, + proxy_type) + if not curr_session: return msg = \ html_blog_page_rss2(authorized, - self.server.session, + curr_session, base_dir, http_prefix, self.server.translate, @@ -7205,10 +7236,15 @@ class PubServer(BaseHTTPRequestHandler): domain_full: str, port: int, proxy_type: str, translate: {}, getreq_start_time, - debug: bool) -> None: + debug: bool, + curr_session) -> None: """Returns an RSS2 feed for all blogs on this instance """ - if not self._establish_session("get_rss2site"): + curr_session = \ + self._establish_session("get_rss2site", + curr_session, + proxy_type) + if not curr_session: self._404() return @@ -7221,7 +7257,7 @@ class PubServer(BaseHTTPRequestHandler): domain = acct.split('@')[1] msg += \ html_blog_page_rss2(authorized, - self.server.session, + curr_session, base_dir, http_prefix, self.server.translate, @@ -7259,10 +7295,15 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, port: int, proxy_type: str, getreq_start_time, - debug: bool) -> None: + debug: bool, + curr_session) -> None: """Returns the newswire feed """ - if not self._establish_session("get_newswire_feed"): + curr_session = \ + self._establish_session("get_newswire_feed", + curr_session, + proxy_type) + if not curr_session: self._404() return @@ -7293,10 +7334,14 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, port: int, proxy_type: str, getreq_start_time, - debug: bool) -> None: + debug: bool, + curr_session) -> None: """Returns the hashtag categories feed """ - if not self._establish_session("get_hashtag_categories_feed"): + curr_session = \ + self._establish_session("get_hashtag_categories_feed", + curr_session, proxy_type) + if not curr_session: self._404() return @@ -7325,7 +7370,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, port: int, proxy_type: str, getreq_start_time, - debug: bool, system_language: str) -> None: + debug: bool, system_language: str, + curr_session) -> None: """Returns an RSS3 feed """ nickname = path.split('/blog/')[1] @@ -7334,12 +7380,15 @@ class PubServer(BaseHTTPRequestHandler): if not nickname.startswith('rss.'): account_dir = acct_dir(base_dir, nickname, domain) if os.path.isdir(account_dir): - if not self._establish_session("get_rss3feed"): + curr_session = \ + self._establish_session("get_rss3feed", + curr_session, proxy_type) + if not curr_session: self._404() return msg = \ html_blog_page_rss3(authorized, - self.server.session, + curr_session, base_dir, http_prefix, self.server.translate, nickname, domain, port, @@ -7368,7 +7417,8 @@ class PubServer(BaseHTTPRequestHandler): getreq_start_time, onion_domain: str, i2p_domain: str, cookie: str, debug: bool, - authorized: bool) -> None: + authorized: bool, + curr_session, proxy_type: str) -> None: """Show person options screen """ back_to_path = '' @@ -7446,8 +7496,8 @@ class PubServer(BaseHTTPRequestHandler): if actor_json.get('alsoKnownAs'): also_known_as = actor_json['alsoKnownAs'] - if self.server.session: - check_for_changed_actor(self.server.session, + if curr_session: + check_for_changed_actor(curr_session, self.server.base_dir, self.server.http_prefix, self.server.domain_full, @@ -7820,7 +7870,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, - getreq_start_time) -> None: + getreq_start_time, + curr_session, proxy_type: str) -> None: """Return the result of a hashtag search """ page_number = 1 @@ -7860,7 +7911,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, base_dir, hashtag, page_number, - MAX_POSTS_IN_HASHTAG_FEED, self.server.session, + MAX_POSTS_IN_HASHTAG_FEED, + curr_session, self.server.cached_webfingers, self.server.person_cache, http_prefix, @@ -7904,7 +7956,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, - getreq_start_time) -> None: + getreq_start_time, + curr_session, proxy_type: str) -> None: """Return an RSS 2 feed for a hashtag """ hashtag = path.split('/tags/rss2/')[1] @@ -7924,7 +7977,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, base_dir, hashtag, - MAX_POSTS_IN_FEED, self.server.session, + MAX_POSTS_IN_FEED, curr_session, self.server.cached_webfingers, self.server.person_cache, http_prefix, @@ -7962,7 +8015,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, repeat_private: bool, - debug: bool) -> None: + debug: bool, + curr_session) -> None: """The announce/repeat button was pressed on a post """ page_number = 1 @@ -7999,7 +8053,10 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("announceButton"): + curr_session = \ + self._establish_session("announceButton", + curr_session, proxy_type) + if not curr_session: self._404() return self.server.actorRepeat = path.split('?actor=')[1] @@ -8010,7 +8067,7 @@ class PubServer(BaseHTTPRequestHandler): if not repeat_private: announce_to_str = 'https://www.w3.org/ns/activitystreams#Public' announce_json = \ - create_announce(self.server.session, + create_announce(curr_session, base_dir, self.server.federation_list, self.post_to_nickname, @@ -8043,7 +8100,8 @@ class PubServer(BaseHTTPRequestHandler): # send out the announce within a separate thread self._post_to_outbox(announce_json, self.server.project_version, - self.post_to_nickname) + self.post_to_nickname, + curr_session, proxy_type) fitness_performance(getreq_start_time, self.server.fitness, '_GET', '_announce_button postToOutboxThread', @@ -8075,7 +8133,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -8117,7 +8175,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, repeat_private: bool, debug: bool, - recent_posts_cache: {}) -> None: + recent_posts_cache: {}, + curr_session) -> None: """Undo announce/repeat button was pressed """ page_number = 1 @@ -8157,7 +8216,10 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("undoAnnounceButton"): + curr_session = \ + self._establish_session("undoAnnounceButton", + curr_session, proxy_type) + if not curr_session: self._404() return undo_announce_actor = \ @@ -8200,7 +8262,8 @@ class PubServer(BaseHTTPRequestHandler): self._post_to_outbox(new_undo_announce, self.server.project_version, - self.post_to_nickname) + self.post_to_nickname, + curr_session, proxy_type) actor_absolute = self._get_instance_url(calling_domain) + actor actor_path_str = \ @@ -8217,7 +8280,8 @@ class PubServer(BaseHTTPRequestHandler): domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, getreq_start_time, - proxy_type: str, debug: bool) -> None: + proxy_type: str, debug: bool, + curr_session) -> None: """Follow approve button was pressed """ origin_path_str = path.split('/followapprove=')[0] @@ -8231,12 +8295,15 @@ class PubServer(BaseHTTPRequestHandler): handle_nickname + '@' + \ get_full_domain(handle_domain, handle_port) if '@' in following_handle: - if not self._establish_session("followApproveButton"): + curr_session = \ + self._establish_session("followApproveButton", + curr_session, proxy_type) + if not curr_session: self._404() return signing_priv_key_pem = \ self.server.signing_priv_key_pem - manual_approve_follow_request_thread(self.server.session, + manual_approve_follow_request_thread(curr_session, base_dir, http_prefix, follower_nickname, domain, port, @@ -8381,7 +8448,8 @@ class PubServer(BaseHTTPRequestHandler): domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, getreq_start_time, - proxy_type: str, debug: bool) -> None: + proxy_type: str, debug: bool, + curr_session) -> None: """Follow deny button was pressed """ origin_path_str = path.split('/followdeny=')[0] @@ -8395,7 +8463,7 @@ class PubServer(BaseHTTPRequestHandler): handle_nickname + '@' + \ get_full_domain(handle_domain, handle_port) if '@' in following_handle: - manual_deny_follow_request_thread(self.server.session, + manual_deny_follow_request_thread(curr_session, base_dir, http_prefix, follower_nickname, domain, port, @@ -8428,7 +8496,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """Press the like button """ page_number = 1 @@ -8466,7 +8535,10 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("likeButton"): + curr_session = \ + self._establish_session("likeButton", + curr_session, proxy_type) + if not curr_session: self._404() return like_actor = \ @@ -8495,7 +8567,8 @@ class PubServer(BaseHTTPRequestHandler): } # send out the like to followers - self._post_to_outbox(like_json, self.server.project_version, None) + self._post_to_outbox(like_json, self.server.project_version, None, + curr_session, proxy_type) fitness_performance(getreq_start_time, self.server.fitness, '_GET', '_like_button postToOutbox', @@ -8550,7 +8623,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -8600,7 +8673,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """A button is pressed to undo """ page_number = 1 @@ -8637,7 +8711,10 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("undoLikeButton"): + curr_session = \ + self._establish_session("undoLikeButton", + curr_session, proxy_type) + if not curr_session: self._404() return undo_actor = \ @@ -8672,7 +8749,8 @@ class PubServer(BaseHTTPRequestHandler): # send out the undo like to followers self._post_to_outbox(undo_like_json, - self.server.project_version, None) + self.server.project_version, None, + curr_session, proxy_type) # directly undo the like within the post file if not liked_post_filename: @@ -8713,7 +8791,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -8759,7 +8837,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """Press an emoji reaction button Note that this is not the emoji reaction selection icon at the bottom of the post @@ -8813,7 +8892,10 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("reactionButton"): + curr_session = \ + self._establish_session("reactionButton", + curr_session, proxy_type) + if not curr_session: self._404() return reaction_actor = \ @@ -8844,7 +8926,8 @@ class PubServer(BaseHTTPRequestHandler): } # send out the emoji reaction to followers - self._post_to_outbox(reaction_json, self.server.project_version, None) + self._post_to_outbox(reaction_json, self.server.project_version, None, + curr_session, proxy_type) fitness_performance(getreq_start_time, self.server.fitness, '_GET', '_reaction_button postToOutbox', @@ -8905,7 +8988,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -8953,7 +9036,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """A button is pressed to undo emoji reaction """ page_number = 1 @@ -9005,7 +9089,10 @@ class PubServer(BaseHTTPRequestHandler): calling_domain) return emoji_content = urllib.parse.unquote_plus(emoji_content_encoded) - if not self._establish_session("undoReactionButton"): + curr_session = \ + self._establish_session("undoReactionButton", + curr_session, proxy_type) + if not curr_session: self._404() return undo_actor = \ @@ -9041,7 +9128,8 @@ class PubServer(BaseHTTPRequestHandler): # send out the undo emoji reaction to followers self._post_to_outbox(undo_reaction_json, - self.server.project_version, None) + self.server.project_version, None, + curr_session, proxy_type) # directly undo the emoji reaction within the post file if not reaction_post_filename: @@ -9087,7 +9175,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -9131,7 +9219,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """Press the emoji reaction picker icon at the bottom of the post """ page_number = 1 @@ -9193,7 +9282,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, self.server.base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, @@ -9229,7 +9318,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """Bookmark button was pressed """ page_number = 1 @@ -9267,14 +9357,17 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("bookmarkButton"): + curr_session = \ + self._establish_session("bookmarkButton", + curr_session, proxy_type) + if not curr_session: self._404() return bookmark_actor = \ local_actor_url(http_prefix, self.post_to_nickname, domain_full) cc_list = [] bookmark_post(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, self.server.federation_list, self.post_to_nickname, @@ -9319,7 +9412,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -9347,7 +9440,8 @@ class PubServer(BaseHTTPRequestHandler): else: print('WARN: Bookmarked post not found: ' + bookmark_filename) # self._post_to_outbox(bookmark_json, - # self.server.project_version, None) + # self.server.project_version, None, + # curr_session, proxy_type) actor_absolute = self._get_instance_url(calling_domain) + actor actor_path_str = \ actor_absolute + '/' + timeline_str + \ @@ -9364,7 +9458,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """Button pressed to undo a bookmark """ page_number = 1 @@ -9401,14 +9496,17 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return - if not self._establish_session("undo_bookmarkButton"): + curr_session = \ + self._establish_session("undo_bookmarkButton", + curr_session, proxy_type) + if not curr_session: self._404() return undo_actor = \ local_actor_url(http_prefix, self.post_to_nickname, domain_full) cc_list = [] undo_bookmark_post(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, self.server.federation_list, self.post_to_nickname, @@ -9426,7 +9524,8 @@ class PubServer(BaseHTTPRequestHandler): if self.server.iconsCache.get('bookmark_inactive.png'): del self.server.iconsCache['bookmark_inactive.png'] # self._post_to_outbox(undo_bookmark_json, - # self.server.project_version, None) + # self.server.project_version, None, + # curr_session, proxy_type) bookmark_filename = \ locate_post(base_dir, self.post_to_nickname, domain, bookmark_url) if bookmark_filename: @@ -9455,7 +9554,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.post_to_nickname, domain, @@ -9499,7 +9598,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> None: + debug: str, + curr_session) -> None: """Delete button is pressed on a post """ if not cookie: @@ -9550,7 +9650,10 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor + '/' + timeline_str, cookie, calling_domain) return - if not self._establish_session("deleteButton"): + curr_session = \ + self._establish_session("deleteButton", + curr_session, proxy_type) + if not curr_session: self._404() return @@ -9559,7 +9662,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.recent_posts_cache, self.server.max_recent_posts, self.server.translate, page_number, - self.server.session, base_dir, + curr_session, base_dir, delete_url, http_prefix, self.server.project_version, self.server.cached_webfingers, @@ -9598,7 +9701,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str): + debug: str, + curr_session): """Mute button is pressed """ mute_url = path.split('?mute=')[1] @@ -9665,7 +9769,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, nickname, domain, @@ -9714,7 +9818,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str): + debug: str, + curr_session): """Undo mute button is pressed """ mute_url = path.split('?unmute=')[1] @@ -9781,7 +9886,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, nickname, domain, @@ -9828,7 +9933,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str, session) -> bool: + debug: str, + curr_session) -> bool: """Shows the replies to a post """ if not ('/statuses/' in path and '/users/' in path): @@ -9885,7 +9991,10 @@ class PubServer(BaseHTTPRequestHandler): } if self._request_http(): - if not self._establish_session("showRepliesToPost"): + curr_session = \ + self._establish_session("showRepliesToPost", + curr_session, proxy_type) + if not curr_session: self._404() return True recent_posts_cache = self.server.recent_posts_cache @@ -9908,7 +10017,7 @@ class PubServer(BaseHTTPRequestHandler): max_recent_posts, translate, base_dir, - session, + curr_session, cached_webfingers, person_cache, nickname, @@ -9938,7 +10047,7 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_show_replies_to_post', self.server.debug) else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): msg = json.dumps(replies_json, ensure_ascii=False) msg = msg.encode('utf-8') protocol_str = 'application/json' @@ -9981,7 +10090,10 @@ class PubServer(BaseHTTPRequestHandler): # send the replies json if self._request_http(): - if not self._establish_session("showRepliesToPost2"): + curr_session = \ + self._establish_session("showRepliesToPost2", + curr_session, proxy_type) + if not curr_session: self._404() return True recent_posts_cache = self.server.recent_posts_cache @@ -10004,7 +10116,7 @@ class PubServer(BaseHTTPRequestHandler): max_recent_posts, translate, base_dir, - session, + curr_session, cached_webfingers, person_cache, nickname, @@ -10034,7 +10146,7 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_show_replies_to_post', self.server.debug) else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): msg = json.dumps(replies_json, ensure_ascii=False) msg = msg.encode('utf-8') @@ -10058,7 +10170,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Show roles within profile screen """ named_status = path.split('/users/')[1] @@ -10121,7 +10234,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.project_version, base_dir, http_prefix, True, get_person, 'roles', - self.server.session, + curr_session, cached_webfingers, self.server.person_cache, yt_replace_domain, @@ -10152,7 +10265,7 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_show_roles', self.server.debug) else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): roles_list = get_actor_roles_list(actor_json) msg = json.dumps(roles_list, ensure_ascii=False) @@ -10176,7 +10289,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Show skills on the profile screen """ named_status = path.split('/users/')[1] @@ -10246,7 +10360,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.project_version, base_dir, http_prefix, True, get_person, 'skills', - self.server.session, + curr_session, cached_webfingers, self.server.person_cache, yt_replace_domain, @@ -10280,7 +10394,8 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_show_skills', self.server.debug) else: - if self._secure_mode(): + if self._secure_mode(curr_session, + proxy_type): actor_skills_list = \ get_occupation_skills(actor_json) skills = \ @@ -10313,7 +10428,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """get an individual post from the path /@nickname/statusnumber """ if '/@' not in path: @@ -10369,7 +10485,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain, i2p_domain, getreq_start_time, proxy_type, cookie, debug, - include_create_wrapper) + include_create_wrapper, + curr_session) fitness_performance(getreq_start_time, self.server.fitness, '_GET', '_show_individual_at_post', self.server.debug) @@ -10382,7 +10499,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Show the likers of a post """ if not authorized: @@ -10407,7 +10525,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.access_keys, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.project_version, @@ -10442,7 +10560,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Show the announcers of a post """ if not authorized: @@ -10468,7 +10587,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.access_keys, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.project_version, @@ -10506,7 +10625,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str, include_create_wrapper: bool) -> bool: + debug: str, include_create_wrapper: bool, + curr_session) -> bool: """Shows an individual post from its filename """ if not os.path.isfile(post_filename): @@ -10541,7 +10661,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, base_dir, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, nickname, domain, port, @@ -10571,7 +10691,7 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_show_post_from_file', self.server.debug) else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): if not include_create_wrapper and \ post_json_object['type'] == 'Create' and \ has_object_dict(post_json_object): @@ -10604,7 +10724,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows an individual post """ liked_by = None @@ -10654,7 +10775,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain, i2p_domain, getreq_start_time, proxy_type, cookie, debug, - include_create_wrapper) + include_create_wrapper, + curr_session) fitness_performance(getreq_start_time, self.server.fitness, '_GET', '_show_individual_post', self.server.debug) @@ -10667,7 +10789,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows an individual post from an account which you are following and where you have the notify checkbox set on person options """ @@ -10699,7 +10822,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain, i2p_domain, getreq_start_time, proxy_type, cookie, debug, - include_create_wrapper) + include_create_wrapper, + curr_session) fitness_performance(getreq_start_time, self.server.fitness, '_GET', '_show_notify_post', self.server.debug) @@ -10713,7 +10837,7 @@ class PubServer(BaseHTTPRequestHandler): getreq_start_time, proxy_type: str, cookie: str, debug: str, - recent_posts_cache: {}, session, + recent_posts_cache: {}, curr_session, default_timeline: str, max_recent_posts: int, translate: {}, @@ -10729,7 +10853,7 @@ class PubServer(BaseHTTPRequestHandler): if authorized: inbox_feed = \ person_box_json(recent_posts_cache, - session, + curr_session, base_dir, domain, port, @@ -10761,7 +10885,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_feed = \ person_box_json(recent_posts_cache, - session, + curr_session, base_dir, domain, port, @@ -10799,7 +10923,7 @@ class PubServer(BaseHTTPRequestHandler): max_recent_posts, translate, page_number, MAX_POSTS_IN_FEED, - session, + curr_session, base_dir, cached_webfingers, person_cache, @@ -10880,21 +11004,22 @@ class PubServer(BaseHTTPRequestHandler): return True return False - def _show_d_ms(self, authorized: bool, - calling_domain: str, path: str, - base_dir: str, http_prefix: str, - domain: str, domain_full: str, port: int, - onion_domain: str, i2p_domain: str, - getreq_start_time, - proxy_type: str, cookie: str, - debug: str) -> bool: + def _show_dms(self, authorized: bool, + calling_domain: str, path: str, + base_dir: str, http_prefix: str, + domain: str, domain_full: str, port: int, + onion_domain: str, i2p_domain: str, + getreq_start_time, + proxy_type: str, cookie: str, + debug: str, + curr_session) -> bool: """Shows the DMs timeline """ if '/users/' in path: if authorized: inbox_dm_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -10920,7 +11045,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_dm_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -10959,7 +11084,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11000,7 +11125,7 @@ class PubServer(BaseHTTPRequestHandler): self._write(msg) fitness_performance(getreq_start_time, self.server.fitness, - '_GET', '_show_d_ms', + '_GET', '_show_dms', self.server.debug) else: # don't need authorized fetch here because @@ -11014,7 +11139,7 @@ class PubServer(BaseHTTPRequestHandler): self._write(msg) fitness_performance(getreq_start_time, self.server.fitness, - '_GET', '_show_d_ms json', + '_GET', '_show_dms json', self.server.debug) return True else: @@ -11038,15 +11163,15 @@ class PubServer(BaseHTTPRequestHandler): domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, getreq_start_time, - proxy_type: str, cookie: str, - debug: str) -> bool: + proxy_type: str, cookie: str, debug: str, + curr_session) -> bool: """Shows the replies timeline """ if '/users/' in path: if authorized: inbox_replies_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11073,7 +11198,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_replies_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11111,7 +11236,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11191,14 +11316,15 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the media timeline """ if '/users/' in path: if authorized: inbox_media_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11225,7 +11351,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_media_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11260,7 +11386,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_MEDIA_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11341,14 +11467,15 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the blogs timeline """ if '/users/' in path: if authorized: inbox_blogs_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11375,7 +11502,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_blogs_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11410,7 +11537,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_BLOGS_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11492,14 +11619,15 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the news timeline """ if '/users/' in path: if authorized: inbox_news_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11529,7 +11657,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_news_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11568,7 +11696,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_NEWS_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11650,14 +11778,15 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the features timeline (all local blogs) """ if '/users/' in path: if authorized: inbox_features_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11687,7 +11816,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first inbox_features_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -11731,7 +11860,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.translate, page_number, MAX_POSTS_IN_BLOGS_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11812,7 +11941,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the shares timeline """ if '/users/' in path: @@ -11847,7 +11977,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11902,7 +12032,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the wanted timeline """ if '/users/' in path: @@ -11936,7 +12067,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -11992,14 +12123,15 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the bookmarks timeline """ if '/users/' in path: if authorized: bookmarks_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -12026,7 +12158,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first bookmarks_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -12065,7 +12197,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -12145,13 +12277,14 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the outbox timeline """ # get outbox feed for a person outbox_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, path, http_prefix, MAX_POSTS_IN_FEED, 'outbox', authorized, @@ -12177,7 +12310,7 @@ class PubServer(BaseHTTPRequestHandler): page_str = '?page=' + str(page_number) outbox_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, path + page_str, http_prefix, @@ -12210,7 +12343,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -12253,7 +12386,7 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_show_outbox_timeline', self.server.debug) else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): msg = json.dumps(outbox_feed, ensure_ascii=False) msg = msg.encode('utf-8') @@ -12277,14 +12410,15 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the moderation timeline """ if '/users/' in path: if authorized: moderation_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -12310,7 +12444,7 @@ class PubServer(BaseHTTPRequestHandler): # if no page was specified then show the first moderation_feed = \ person_box_json(self.server.recent_posts_cache, - self.server.session, + curr_session, base_dir, domain, port, @@ -12349,7 +12483,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.max_recent_posts, self.server.translate, page_number, MAX_POSTS_IN_FEED, - self.server.session, + curr_session, base_dir, self.server.cached_webfingers, self.server.person_cache, @@ -12426,7 +12560,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str, shares_file_type: str) -> bool: + debug: str, shares_file_type: str, + curr_session) -> bool: """Shows the shares feed """ shares = \ @@ -12455,7 +12590,10 @@ class PubServer(BaseHTTPRequestHandler): search_path2 = search_path.replace('/' + shares_file_type, '') get_person = person_lookup(domain, search_path2, base_dir) if get_person: - if not self._establish_session("show_shares_feed"): + curr_session = \ + self._establish_session("show_shares_feed", + curr_session, proxy_type) + if not curr_session: self._404() self.server.getreq_busy = False return True @@ -12490,7 +12628,7 @@ class PubServer(BaseHTTPRequestHandler): base_dir, http_prefix, authorized, get_person, shares_file_type, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.yt_replace_domain, @@ -12525,7 +12663,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.getreq_busy = False return True else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): msg = json.dumps(shares, ensure_ascii=False) msg = msg.encode('utf-8') @@ -12549,7 +12687,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the following feed """ following = \ @@ -12581,7 +12720,10 @@ class PubServer(BaseHTTPRequestHandler): search_path.replace('/following', ''), base_dir) if get_person: - if not self._establish_session("show_following_feed"): + curr_session = \ + self._establish_session("show_following_feed", + curr_session, proxy_type) + if not curr_session: self._404() return True @@ -12618,7 +12760,7 @@ class PubServer(BaseHTTPRequestHandler): base_dir, http_prefix, authorized, get_person, 'following', - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.yt_replace_domain, @@ -12652,7 +12794,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.debug) return True else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): msg = json.dumps(following, ensure_ascii=False).encode('utf-8') msglen = len(msg) @@ -12675,7 +12817,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the followers feed """ followers = \ @@ -12707,7 +12850,10 @@ class PubServer(BaseHTTPRequestHandler): search_path.replace('/followers', ''), base_dir) if get_person: - if not self._establish_session("show_followers_feed"): + curr_session = \ + self._establish_session("show_followers_feed", + curr_session, proxy_type) + if not curr_session: self._404() return True @@ -12745,7 +12891,7 @@ class PubServer(BaseHTTPRequestHandler): http_prefix, authorized, get_person, 'followers', - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.yt_replace_domain, @@ -12779,7 +12925,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.debug) return True else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): msg = json.dumps(followers, ensure_ascii=False).encode('utf-8') msglen = len(msg) @@ -12844,7 +12990,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - debug: str) -> bool: + debug: str, + curr_session) -> bool: """Shows the profile for a person """ # look up a person @@ -12852,7 +12999,10 @@ class PubServer(BaseHTTPRequestHandler): if not actor_json: return False if self._request_http(): - if not self._establish_session("showPersonProfile"): + curr_session = \ + self._establish_session("showPersonProfile", + curr_session, proxy_type) + if not curr_session: self._404() return True @@ -12886,7 +13036,7 @@ class PubServer(BaseHTTPRequestHandler): http_prefix, authorized, actor_json, 'posts', - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.yt_replace_domain, @@ -12919,7 +13069,7 @@ class PubServer(BaseHTTPRequestHandler): if self.server.debug: print('DEBUG: html actor sent') else: - if self._secure_mode(): + if self._secure_mode(curr_session, proxy_type): accept_str = self.headers['Accept'] msg_str = json.dumps(actor_json, ensure_ascii=False) msg = msg_str.encode('utf-8') @@ -13029,7 +13179,8 @@ class PubServer(BaseHTTPRequestHandler): onion_domain: str, i2p_domain: str, getreq_start_time, proxy_type: str, cookie: str, - translate: {}, debug: str) -> bool: + translate: {}, debug: str, + curr_session) -> bool: """Shows a blog page """ page_number = 1 @@ -13050,12 +13201,15 @@ class PubServer(BaseHTTPRequestHandler): page_number = 1 elif page_number > 10: page_number = 10 - if not self._establish_session("showBlogPage"): + curr_session = \ + self._establish_session("showBlogPage", + curr_session, proxy_type) + if not curr_session: self._404() self.server.getreq_busy = False return True msg = html_blog_page(authorized, - self.server.session, + curr_session, base_dir, http_prefix, translate, @@ -13601,7 +13755,8 @@ class PubServer(BaseHTTPRequestHandler): reply_category: str, domain: str, domain_full: str, getreq_start_time, cookie, - no_drop_down: bool, conversation_id: str) -> bool: + no_drop_down: bool, conversation_id: str, + curr_session, proxy_type: str) -> bool: """Shows the new post screen """ is_new_post_endpoint = False @@ -13660,7 +13815,7 @@ class PubServer(BaseHTTPRequestHandler): conversation_id, self.server.recent_posts_cache, self.server.max_recent_posts, - self.server.session, + curr_session, self.server.cached_webfingers, self.server.person_cache, self.server.port, @@ -13901,7 +14056,8 @@ class PubServer(BaseHTTPRequestHandler): def _send_block(self, http_prefix: str, blocker_nickname: str, blocker_domain_full: str, - blocking_nickname: str, blocking_domain_full: str) -> bool: + blocking_nickname: str, blocking_domain_full: str, + curr_session, proxy_type: str) -> bool: if blocker_domain_full == blocking_domain_full: if blocker_nickname == blocking_nickname: # don't block self @@ -13923,7 +14079,8 @@ class PubServer(BaseHTTPRequestHandler): 'cc': [cc_url] } self._post_to_outbox(block_json, self.server.project_version, - blocker_nickname) + blocker_nickname, + curr_session, proxy_type) return True def _get_referer_domain(self, ua_str: str) -> str: @@ -13972,6 +14129,8 @@ class PubServer(BaseHTTPRequestHandler): return False def do_GET(self): + curr_session = self.server.session + proxy_type = self.server.proxy_type calling_domain = self.server.domain_full if self.headers.get('Host'): @@ -14109,7 +14268,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, None, self.server.debug, self.server.enable_shared_inbox): return @@ -14419,7 +14578,10 @@ class PubServer(BaseHTTPRequestHandler): '_GET', '_masto_api[calling_domain]', self.server.debug) - if not self._establish_session("GET"): + curr_session = \ + self._establish_session("GET", curr_session, + proxy_type) + if not curr_session: self._404() fitness_performance(getreq_start_time, self.server.fitness, '_GET', 'session fail', @@ -14520,9 +14682,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.http_prefix, self.server.domain, self.server.port, - self.server.proxy_type, + proxy_type, getreq_start_time, - self.server.debug) + self.server.debug, + curr_session) return if self.path == '/newswire.xml': @@ -14532,9 +14695,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.http_prefix, self.server.domain, self.server.port, - self.server.proxy_type, + proxy_type, getreq_start_time, - self.server.debug) + self.server.debug, + curr_session) return # RSS 2.0 @@ -14547,9 +14711,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.http_prefix, self.server.domain, self.server.port, - self.server.proxy_type, + proxy_type, getreq_start_time, - self.server.debug) + self.server.debug, + curr_session) else: self._get_rss2site(authorized, calling_domain, self.path, @@ -14557,10 +14722,11 @@ class PubServer(BaseHTTPRequestHandler): self.server.http_prefix, self.server.domain_full, self.server.port, - self.server.proxy_type, + proxy_type, self.server.translate, getreq_start_time, - self.server.debug) + self.server.debug, + curr_session) return fitness_performance(getreq_start_time, self.server.fitness, @@ -14576,10 +14742,11 @@ class PubServer(BaseHTTPRequestHandler): self.server.http_prefix, self.server.domain, self.server.port, - self.server.proxy_type, + proxy_type, getreq_start_time, self.server.debug, - self.server.system_language) + self.server.system_language, + curr_session) return users_in_path = False @@ -14795,11 +14962,15 @@ class PubServer(BaseHTTPRequestHandler): self.path == '/blogs' or self.path == '/blogs/'): if '/rss.xml' not in self.path: - if not self._establish_session("show the main blog page"): + curr_session = \ + self._establish_session("show the main blog page", + curr_session, + proxy_type) + if not curr_session: self._404() return msg = html_blog_view(authorized, - self.server.session, + curr_session, self.server.base_dir, self.server.http_prefix, self.server.translate, @@ -14841,9 +15012,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, cookie, self.server.translate, - self.server.debug): + self.server.debug, + curr_session): return # list of registered devices for e2ee @@ -14886,7 +15058,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, cookie, self.server.debug, - authorized) + authorized, + curr_session, + proxy_type) return fitness_performance(getreq_start_time, self.server.fitness, @@ -14902,7 +15076,7 @@ class PubServer(BaseHTTPRequestHandler): if blog_filename and nickname: post_json_object = load_json(blog_filename) if is_blog_post(post_json_object): - msg = html_blog_post(self.server.session, + msg = html_blog_post(curr_session, authorized, self.server.base_dir, self.server.http_prefix, @@ -15812,7 +15986,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.port, self.server.onion_domain, self.server.i2p_domain, - getreq_start_time) + getreq_start_time, + curr_session, + proxy_type) self.server.getreq_busy = False return self._hashtag_search(calling_domain, @@ -15824,7 +16000,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.port, self.server.onion_domain, self.server.i2p_domain, - getreq_start_time) + getreq_start_time, + curr_session, + proxy_type) self.server.getreq_busy = False return @@ -16037,7 +16215,7 @@ class PubServer(BaseHTTPRequestHandler): if authorized and html_getreq and '?repeat=' in self.path: self._announce_button(calling_domain, self.path, self.server.base_dir, - cookie, self.server.proxy_type, + cookie, proxy_type, self.server.http_prefix, self.server.domain, self.server.domain_full, @@ -16046,7 +16224,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.i2p_domain, getreq_start_time, repeat_private, - self.server.debug) + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16061,7 +16240,7 @@ class PubServer(BaseHTTPRequestHandler): if authorized and html_getreq and '?unrepeat=' in self.path: self._undo_announce_button(calling_domain, self.path, self.server.base_dir, - cookie, self.server.proxy_type, + cookie, proxy_type, self.server.http_prefix, self.server.domain, self.server.domain_full, @@ -16071,7 +16250,8 @@ class PubServer(BaseHTTPRequestHandler): getreq_start_time, repeat_private, self.server.debug, - self.server.recent_posts_cache) + self.server.recent_posts_cache, + curr_session) self.server.getreq_busy = False return @@ -16092,7 +16272,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, self.server.debug, self.server.newswire) self.server.getreq_busy = False @@ -16111,7 +16291,7 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, self.server.debug, self.server.newswire) self.server.getreq_busy = False @@ -16130,8 +16310,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - self.server.debug) + proxy_type, + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16152,8 +16333,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - self.server.debug) + proxy_type, + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16171,9 +16353,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, cookie, - self.server.debug) + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16191,8 +16374,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug) + proxy_type, + cookie, self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16212,9 +16396,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, cookie, - self.server.debug) + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16234,8 +16419,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug) + proxy_type, + cookie, self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16254,8 +16440,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug) + proxy_type, + cookie, self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16274,8 +16461,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug) + proxy_type, + cookie, self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16294,8 +16482,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, cookie, - self.server.debug) + proxy_type, cookie, + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16314,8 +16503,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, cookie, - self.server.debug) + proxy_type, cookie, + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16334,8 +16524,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, cookie, - self.server.debug) + proxy_type, cookie, + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16354,8 +16545,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, cookie, - self.server.debug) + proxy_type, cookie, + self.server.debug, + curr_session) self.server.getreq_busy = False return @@ -16570,7 +16762,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.domain, self.server.domain_full, getreq_start_time, - cookie, no_drop_down, conversation_id): + cookie, no_drop_down, conversation_id, + curr_session, proxy_type): self.server.getreq_busy = False return @@ -16589,8 +16782,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16605,8 +16799,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16621,8 +16816,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16642,9 +16838,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, cookie, + proxy_type, cookie, self.server.debug, - self.server.session): + curr_session): self.server.getreq_busy = False return @@ -16664,8 +16860,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16685,8 +16882,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16705,8 +16903,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16723,8 +16922,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16744,10 +16944,10 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, + proxy_type, cookie, self.server.debug, self.server.recent_posts_cache, - self.server.session, + curr_session, self.server.default_timeline, self.server.max_recent_posts, self.server.translate, @@ -16766,18 +16966,19 @@ class PubServer(BaseHTTPRequestHandler): # get the direct messages timeline for a given person if self.path.endswith('/dm') or '/dm?page=' in self.path: - if self._show_d_ms(authorized, - calling_domain, self.path, - self.server.base_dir, - self.server.http_prefix, - self.server.domain, - self.server.domain_full, - self.server.port, - self.server.onion_domain, - self.server.i2p_domain, - getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + if self._show_dms(authorized, + calling_domain, self.path, + self.server.base_dir, + self.server.http_prefix, + self.server.domain, + self.server.domain_full, + self.server.port, + self.server.onion_domain, + self.server.i2p_domain, + getreq_start_time, + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16797,8 +16998,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16818,8 +17020,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16839,8 +17042,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16860,8 +17064,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16878,8 +17083,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16899,8 +17105,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -16916,8 +17123,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17016,8 +17224,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17038,8 +17247,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17060,8 +17270,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17079,8 +17290,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug, 'shares'): + proxy_type, + cookie, self.server.debug, 'shares', + curr_session): self.server.getreq_busy = False return @@ -17098,8 +17310,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17117,8 +17330,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17137,8 +17351,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, getreq_start_time, - self.server.proxy_type, - cookie, self.server.debug): + proxy_type, + cookie, self.server.debug, + curr_session): self.server.getreq_busy = False return @@ -17155,7 +17370,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.getreq_busy = False return - if not self._secure_mode(): + if not self._secure_mode(curr_session, + proxy_type): if self.server.debug: print('WARN: Unauthorized GET') self._404() @@ -17400,7 +17616,8 @@ class PubServer(BaseHTTPRequestHandler): length: int, post_bytes, boundary: str, calling_domain: str, cookie: str, authorized: bool, - content_license_url: str) -> int: + content_license_url: str, + curr_session, proxy_type: str) -> int: # Note: this needs to happen synchronously # 0=this is not a new post # 1=new post success @@ -17633,7 +17850,8 @@ class PubServer(BaseHTTPRequestHandler): return 1 if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): populate_replies(self.server.base_dir, self.server.http_prefix, self.server.domain_full, @@ -17716,7 +17934,8 @@ class PubServer(BaseHTTPRequestHandler): return 1 if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): refresh_newswire(self.server.base_dir) populate_replies(self.server.base_dir, self.server.http_prefix, @@ -17771,7 +17990,7 @@ class PubServer(BaseHTTPRequestHandler): tags.append(tag) # get list of tags fields['message'] = \ - replace_emoji_from_tags(self.server.session, + replace_emoji_from_tags(curr_session, self.server.base_dir, fields['message'], tags, 'content', @@ -17877,7 +18096,8 @@ class PubServer(BaseHTTPRequestHandler): return 1 if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): populate_replies(self.server.base_dir, self.server.http_prefix, self.server.domain, @@ -17937,7 +18157,8 @@ class PubServer(BaseHTTPRequestHandler): return 1 if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): populate_replies(self.server.base_dir, self.server.http_prefix, self.server.domain, @@ -18012,7 +18233,8 @@ class PubServer(BaseHTTPRequestHandler): str(message_json['object']['to'])) if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): populate_replies(self.server.base_dir, self.server.http_prefix, self.server.domain, @@ -18076,7 +18298,8 @@ class PubServer(BaseHTTPRequestHandler): str(message_json['object']['to'])) if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): return 1 return -1 elif post_type == 'newreport': @@ -18115,7 +18338,8 @@ class PubServer(BaseHTTPRequestHandler): if message_json: if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): return 1 return -1 elif post_type == 'newquestion': @@ -18165,7 +18389,8 @@ class PubServer(BaseHTTPRequestHandler): print('DEBUG: new Question') if self._post_to_outbox(message_json, self.server.project_version, - nickname): + nickname, + curr_session, proxy_type): return 1 return -1 elif post_type == 'newshare' or post_type == 'newwanted': @@ -18247,7 +18472,8 @@ class PubServer(BaseHTTPRequestHandler): def _receive_new_post(self, post_type: str, path: str, calling_domain: str, cookie: str, authorized: bool, - content_license_url: str) -> int: + content_license_url: str, + curr_session, proxy_type: str) -> int: """A new post has been created This creates a thread to send the new post """ @@ -18351,7 +18577,8 @@ class PubServer(BaseHTTPRequestHandler): post_bytes, boundary, calling_domain, cookie, authorized, - content_license_url) + content_license_url, + curr_session, proxy_type) return page_number def _crypto_ap_iread_handle(self): @@ -18512,9 +18739,14 @@ class PubServer(BaseHTTPRequestHandler): self._400() def do_POST(self): + curr_session = self.server.session + proxy_type = self.server.proxy_type postreq_start_time = time.time() - if not self._establish_session("POST"): + curr_session = \ + self._establish_session("POST", curr_session, + proxy_type) + if not curr_session: fitness_performance(postreq_start_time, self.server.fitness, '_POST', 'create_session', self.server.debug) @@ -18667,7 +18899,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.i2p_domain, self.server.debug, self.server.allow_local_network_access, self.server.system_language, - self.server.content_license_url) + self.server.content_license_url, + curr_session, + proxy_type) self.server.postreq_busy = False return @@ -18776,7 +19010,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.onion_domain, self.server.i2p_domain, postreq_start_time, {}, - self.server.debug) + self.server.debug, + curr_session, + proxy_type) self.server.postreq_busy = False return @@ -18803,7 +19039,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.domain_full, self.server.onion_domain, self.server.i2p_domain, - self.server.debug) + self.server.debug, + curr_session, + proxy_type) self.server.postreq_busy = False return @@ -18875,7 +19113,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.port, self.server.onion_domain, self.server.i2p_domain, - self.server.debug) + self.server.debug, + curr_session, + proxy_type) self.server.postreq_busy = False return @@ -18932,7 +19172,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.port, self.server.onion_domain, self.server.i2p_domain, - self.server.debug) + self.server.debug, + curr_session, + proxy_type) self.server.postreq_busy = False return @@ -18952,7 +19194,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.port, self.server.onion_domain, self.server.i2p_domain, - self.server.debug) + self.server.debug, + curr_session, + proxy_type) self.server.postreq_busy = False return @@ -19085,7 +19329,8 @@ class PubServer(BaseHTTPRequestHandler): self._receive_new_post(curr_post_type, self.path, calling_domain, cookie, authorized, - self.server.content_license_url) + self.server.content_license_url, + curr_session, proxy_type) if page_number: print(curr_post_type + ' post received') nickname = self.path.split('/users/')[1] @@ -19295,7 +19540,8 @@ class PubServer(BaseHTTPRequestHandler): # https://www.w3.org/TR/activitypub/#object-without-create if self.outbox_authenticated: if self._post_to_outbox(message_json, - self.server.project_version, None): + self.server.project_version, None, + curr_session, proxy_type): if message_json.get('id'): locn_str = remove_id_ending(message_json['id']) self.headers['Location'] = locn_str From 794f82dd58e9b75b87aa6b44da0621d0c06ea104 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 13:27:54 +0000 Subject: [PATCH 02/31] Improve support for onion and i2p addresses existing alongside clearnet --- cache.py | 4 ++ daemon.py | 45 +++++++++++++++-------- inbox.py | 103 ++++++++++++++++++++++++++++++++++++++-------------- outbox.py | 7 ---- schedule.py | 15 +++++++- session.py | 45 +++++++++++++++++++++++ 6 files changed, 167 insertions(+), 52 deletions(-) diff --git a/cache.py b/cache.py index 10deff344..1ba24e7e9 100644 --- a/cache.py +++ b/cache.py @@ -141,6 +141,7 @@ def get_person_pub_key(base_dir: str, session, person_url: str, person_cache: {}, debug: bool, project_version: str, http_prefix: str, domain: str, onion_domain: str, + i2p_domain: str, signing_priv_key_pem: str) -> str: if not person_url: return None @@ -162,6 +163,9 @@ def get_person_pub_key(base_dir: str, session, person_url: str, if onion_domain: if '.onion/' in person_url: person_domain = onion_domain + elif i2p_domain: + if '.i2p/' in person_url: + person_domain = i2p_domain profile_str = 'https://www.w3.org/ns/activitystreams' accept_str = \ 'application/activity+json; profile="' + profile_str + '"' diff --git a/daemon.py b/daemon.py index 71c27eab9..805fda549 100644 --- a/daemon.py +++ b/daemon.py @@ -22,6 +22,9 @@ from hashlib import sha256 from hashlib import md5 from shutil import copyfile from session import create_session +from session import get_session_for_domain +from session import get_session_for_domains +from session import set_session_for_sender from webfinger import webfinger_meta from webfinger import webfinger_node_info from webfinger import webfinger_lookup @@ -654,7 +657,7 @@ class PubServer(BaseHTTPRequestHandler): print('DEBUG: creating new session during ' + calling_function) curr_session = create_session(proxy_type) if curr_session: - self.server.session = curr_session + set_session_for_sender(self.server, proxy_type, curr_session) return curr_session print('ERROR: GET failed to create session during ' + calling_function) @@ -693,7 +696,9 @@ class PubServer(BaseHTTPRequestHandler): self.server.person_cache, self.server.debug, self.server.project_version, self.server.http_prefix, - self.server.domain, self.server.onion_domain, + self.server.domain, + self.server.onion_domain, + self.server.i2p_domain, self.server.signing_priv_key_pem) if not pub_key: if self.server.debug: @@ -1428,6 +1433,9 @@ class PubServer(BaseHTTPRequestHandler): Client to server message post https://www.w3.org/TR/activitypub/#client-to-server-outbox-delivery """ + if not curr_session: + return False + city = self.server.city if post_to_nickname: @@ -14129,8 +14137,6 @@ class PubServer(BaseHTTPRequestHandler): return False def do_GET(self): - curr_session = self.server.session - proxy_type = self.server.proxy_type calling_domain = self.server.domain_full if self.headers.get('Host'): @@ -14176,6 +14182,10 @@ class PubServer(BaseHTTPRequestHandler): referer_domain = self._get_referer_domain(ua_str) + curr_session, proxy_type = \ + get_session_for_domains(self.server, + calling_domain, referer_domain) + getreq_start_time = time.time() fitness_performance(getreq_start_time, self.server.fitness, @@ -18739,20 +18749,9 @@ class PubServer(BaseHTTPRequestHandler): self._400() def do_POST(self): - curr_session = self.server.session proxy_type = self.server.proxy_type postreq_start_time = time.time() - curr_session = \ - self._establish_session("POST", curr_session, - proxy_type) - if not curr_session: - fitness_performance(postreq_start_time, self.server.fitness, - '_POST', 'create_session', - self.server.debug) - self._404() - return - if self.server.debug: print('DEBUG: POST to ' + self.server.base_dir + ' path: ' + self.path + ' busy: ' + @@ -18815,6 +18814,19 @@ class PubServer(BaseHTTPRequestHandler): self.server.postreq_busy = False return + curr_session, proxy_type = \ + get_session_for_domain(self.server, calling_domain) + + curr_session = \ + self._establish_session("POST", curr_session, + proxy_type) + if not curr_session: + fitness_performance(postreq_start_time, self.server.fitness, + '_POST', 'create_session', + self.server.debug) + self._404() + return + # returns after this point should set postreq_busy to False # remove any trailing slashes from the path @@ -20090,7 +20102,8 @@ def run_daemon(crawlers_allowed: [], httpd.favicons_cache = {} httpd.proxy_type = proxy_type httpd.session = None - httpd.session_last_update = 0 + httpd.session_onion = None + httpd.session_i2p = None httpd.last_getreq = 0 httpd.last_postreq = 0 httpd.getreq_busy = False diff --git a/inbox.py b/inbox.py index 9ec1c9d5a..3c614ea31 100644 --- a/inbox.py +++ b/inbox.py @@ -1824,7 +1824,8 @@ def _receive_delete(session, handle: str, is_group: bool, base_dir: str, def _receive_announce(recent_posts_cache: {}, session, handle: str, is_group: bool, base_dir: str, http_prefix: str, - domain: str, onion_domain: str, port: int, + domain: str, + onion_domain: str, i2p_domain: str, port: int, send_threads: [], post_log: [], cached_webfingers: {}, person_cache: {}, message_json: {}, federation_list: [], debug: bool, translate: {}, @@ -2034,6 +2035,7 @@ def _receive_announce(recent_posts_cache: {}, person_cache, debug, __version__, http_prefix, domain, onion_domain, + i2p_domain, signing_priv_key_pem) if pub_key: if debug: @@ -2322,6 +2324,7 @@ def _valid_post_content(base_dir: str, nickname: str, domain: str, def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str, domain: str, onion_domain: str, + i2p_domain: str, person_cache: {}, post_json_object: {}, debug: bool, signing_priv_key_pem: str) -> None: @@ -2355,7 +2358,8 @@ def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str, get_person_pub_key(base_dir, session, lookup_actor, person_cache, debug, __version__, http_prefix, - domain, onion_domain, signing_priv_key_pem) + domain, onion_domain, i2p_domain, + signing_priv_key_pem) if pub_key: if debug: print('DEBUG: public key obtained for reply: ' + lookup_actor) @@ -3403,7 +3407,7 @@ def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int, if _receive_announce(recent_posts_cache, session, handle, is_group, base_dir, http_prefix, - domain, onion_domain, port, + domain, onion_domain, i2p_domain, port, send_threads, post_log, cached_webfingers, person_cache, @@ -3565,7 +3569,8 @@ def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int, # get the avatar for a reply/announce _obtain_avatar_for_reply_post(session, base_dir, - http_prefix, domain, onion_domain, + http_prefix, domain, + onion_domain, i2p_domain, person_cache, post_json_object, debug, signing_priv_key_pem) @@ -3915,7 +3920,7 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, message_json: {}, federation_list: [], debug: bool, project_version: str, max_followers: int, onion_domain: str, - signing_priv_key_pem: str, + i2p_domain: str, signing_priv_key_pem: str, unit_test: bool) -> bool: """Receives a follow request within the POST section of HTTPServer """ @@ -4036,7 +4041,7 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, if not get_person_pub_key(base_dir, session, message_json['actor'], person_cache, debug, project_version, http_prefix, domain_to_follow, onion_domain, - signing_priv_key_pem): + i2p_domain, signing_priv_key_pem): if debug: print('Unable to obtain following actor: ' + message_json['actor']) @@ -4074,7 +4079,8 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, if not get_person_pub_key(base_dir, session, message_json['actor'], person_cache, debug, project_version, http_prefix, domain_to_follow, - onion_domain, signing_priv_key_pem): + onion_domain, i2p_domain, + signing_priv_key_pem): if debug: print('Unable to obtain following actor: ' + message_json['actor']) @@ -4153,10 +4159,24 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, """Processes received items and moves them to the appropriate directories """ + print('Starting new session when starting inbox queue') curr_session_time = int(time.time()) session_last_update = curr_session_time - print('Starting new session when starting inbox queue') session = create_session(proxy_type) + + # is this is a clearnet instance then optionally start sessions + # for onion and i2p domains + session_onion = None + session_i2p = None + session_last_update_onion = curr_session_time + session_last_update_i2p = curr_session_time + if proxy_type != 'tor' and onion_domain: + print('Starting onion session when starting inbox queue') + session_onion = create_session('tor') + if proxy_type != 'i2p' and i2p_domain: + print('Starting i2p session when starting inbox queue') + session_i2p = create_session('i2p') + inbox_handle = 'inbox@' + domain if debug: print('DEBUG: Inbox queue running') @@ -4209,16 +4229,6 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, _restore_queue_items(base_dir, queue) continue - curr_time = int(time.time()) - - # recreate the session periodically - if not session or curr_time - session_last_update > 21600: - print('Regenerating inbox queue session at 6hr interval') - session = create_session(proxy_type) - if not session: - continue - session_last_update = curr_time - # oldest item first queue.sort() queue_filename = queue[0] @@ -4249,6 +4259,8 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, str(queue_filename)) continue + curr_time = int(time.time()) + # clear the daily quotas for maximum numbers of received posts if curr_time - quotas_last_update_daily > 60 * 60 * 24: quotas_daily = { @@ -4276,6 +4288,42 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, account_max_posts_per_day, debug): continue + curr_session = session + curr_proxy_type = proxy_type + curr_session_last_update = session_last_update + session_type = 'default' + if queue_json.get('actor'): + if isinstance(queue_json['actor'], str): + sender_domain, _ = get_domain_from_actor(queue_json['actor']) + if sender_domain.endswith('.onion') and \ + session_onion and proxy_type != 'tor': + curr_proxy_type = 'tor' + curr_session = session_onion + session_type = 'onion' + curr_session_last_update = session_last_update_onion + elif (sender_domain.endswith('.i2p') and + session_i2p and proxy_type != 'i2p'): + curr_proxy_type = 'i2p' + curr_session = session_i2p + session_type = 'i2p' + curr_session_last_update = session_last_update_i2p + + # recreate the session periodically + if not curr_session or curr_time - curr_session_last_update > 21600: + print('Regenerating inbox queue session at 6hr interval') + curr_session = create_session(curr_proxy_type) + if not curr_session: + continue + if session_type == 'default': + session = curr_session + session_last_update = curr_time + elif session_type == 'onion': + session_onion = curr_session + session_last_update_onion = curr_time + elif session_type == 'i2p': + session_i2p = curr_session + session_last_update_i2p = curr_time + if debug and queue_json.get('actor'): print('Obtaining public key for actor ' + queue_json['actor']) @@ -4298,10 +4346,11 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, break pub_key = \ - get_person_pub_key(base_dir, session, key_id, + get_person_pub_key(base_dir, curr_session, key_id, person_cache, debug, project_version, http_prefix, - domain, onion_domain, signing_priv_key_pem) + domain, onion_domain, i2p_domain, + signing_priv_key_pem) if pub_key: if debug: print('DEBUG: public key: ' + str(pub_key)) @@ -4409,7 +4458,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, # if queue_json['post'].get('id'): # queue_json['post']['id'] = queue_json['id'] - if _receive_undo(session, + if _receive_undo(curr_session, base_dir, http_prefix, port, send_threads, post_log, cached_webfingers, @@ -4430,7 +4479,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, if debug: print('DEBUG: checking for follow requests') - if _receive_follow_request(session, + if _receive_follow_request(curr_session, base_dir, http_prefix, port, send_threads, post_log, cached_webfingers, @@ -4438,7 +4487,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, queue_json['post'], federation_list, debug, project_version, - max_followers, onion_domain, + max_followers, onion_domain, i2p_domain, signing_priv_key_pem, unit_test): if os.path.isfile(queue_filename): try: @@ -4455,7 +4504,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, if debug: print('DEBUG: No follow requests') - if receive_accept_reject(session, + if receive_accept_reject(curr_session, base_dir, http_prefix, domain, port, send_threads, post_log, cached_webfingers, person_cache, @@ -4472,7 +4521,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, queue.pop(0) continue - if _receive_update_activity(recent_posts_cache, session, + if _receive_update_activity(recent_posts_cache, curr_session, base_dir, http_prefix, domain, port, send_threads, post_log, @@ -4555,7 +4604,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, languages_understood = [] _inbox_after_initial(recent_posts_cache, max_recent_posts, - session, key_id, handle, + curr_session, key_id, handle, queue_json['post'], base_dir, http_prefix, send_threads, post_log, @@ -4563,7 +4612,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, person_cache, queue, domain, onion_domain, i2p_domain, - port, proxy_type, + port, curr_proxy_type, federation_list, debug, queue_filename, destination, diff --git a/outbox.py b/outbox.py index 81f65458a..be8ff9e1d 100644 --- a/outbox.py +++ b/outbox.py @@ -9,7 +9,6 @@ __module_group__ = "Timeline" import os from shutil import copyfile -from session import create_session from auth import create_password from posts import is_image_media from posts import outbox_message_create_wrap @@ -489,12 +488,6 @@ def post_message_to_outbox(session, translate: {}, if debug: print('DEBUG: Updated announcements (shares) collection ' + 'for the post associated with the Announce activity') - if not server.session: - print('DEBUG: creating new session for c2s') - server.session = create_session(proxy_type) - if not server.session: - print('ERROR: Failed to create session for post_message_to_outbox') - return False if debug: print('DEBUG: sending c2s post to followers') # remove inactive threads diff --git a/schedule.py b/schedule.py index b6fae6a4c..46e9b1a19 100644 --- a/schedule.py +++ b/schedule.py @@ -16,6 +16,7 @@ from utils import load_json from utils import is_account_dir from utils import acct_dir from outbox import post_message_to_outbox +from session import create_session def _update_post_schedule(base_dir: str, handle: str, httpd, @@ -93,7 +94,17 @@ def _update_post_schedule(base_dir: str, handle: str, httpd, if nickname: httpd.post_to_nickname = nickname - if not post_message_to_outbox(httpd.session, + + # create session if needed + curr_session = httpd.session + curr_proxy_type = httpd.proxy_type + if not curr_session: + curr_session = create_session(httpd.proxy_type) + httpd.session = curr_session + if not curr_session: + continue + + if not post_message_to_outbox(curr_session, httpd.translate, post_json_object, nickname, httpd, base_dir, @@ -111,7 +122,7 @@ def _update_post_schedule(base_dir: str, handle: str, httpd, httpd.cached_webfingers, httpd.person_cache, httpd.allow_deletion, - httpd.proxy_type, + curr_proxy_type, httpd.project_version, httpd.debug, httpd.yt_replace_domain, diff --git a/session.py b/session.py index dbb471bb6..0f57d03ac 100644 --- a/session.py +++ b/session.py @@ -767,3 +767,48 @@ def get_method(method_name: str, xml_str: str, print('EX: get_method failed, ' + 'connection was reset during get_vcard ' + str(ex)) return None + + +def get_session_for_domains(server, calling_domain: str, referer_domain: str): + """Returns the appropriate session for the given domains + """ + if referer_domain is None: + referer_domain = '' + + if '.onion:' in calling_domain or \ + calling_domain.endswith('.onion') or \ + '.onion:' in referer_domain or \ + referer_domain.endswith('.onion'): + if not server.domain.endswith('.onion'): + if server.onion_domain and server.session_onion: + return server.session_onion, 'tor' + if '.i2p:' in calling_domain or \ + calling_domain.endswith('.i2p') or \ + '.i2p:' in referer_domain or \ + referer_domain.endswith('.i2p'): + if not server.domain.endswith('.i2p'): + if server.i2p_domain and server.session_i2p: + return server.session_i2p, 'i2p' + return server.session, server.proxy_type + + +def get_session_for_domain(server, referer_domain: str): + """Returns the appropriate session for the given domain + """ + return get_session_for_domains(server, referer_domain, referer_domain) + + +def set_session_for_sender(server, proxy_type: str, new_session) -> None: + """Sets the appropriate session for the given sender + """ + if proxy_type == 'tor': + if not server.domain.endswith('.onion'): + if server.onion_domain and server.session_onion: + server.session_onion = new_session + return + if proxy_type == 'i2p': + if not server.domain.endswith('.i2p'): + if server.i2p_domain and server.session_i2p: + server.session_i2p = new_session + return + server.session = new_session From aa93a15c8a1937a6ebd9b7143eec7b8979cd3ce6 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 14:26:35 +0000 Subject: [PATCH 03/31] Search for onion/i2p handles from clearnet --- daemon.py | 49 ++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/daemon.py b/daemon.py index 805fda549..9c088d881 100644 --- a/daemon.py +++ b/daemon.py @@ -3731,17 +3731,11 @@ class PubServer(BaseHTTPRequestHandler): return # profile search nickname = get_nickname_from_actor(actor_str) - curr_session = \ - self._establish_session("handle search", - curr_session, - proxy_type) - if not curr_session: - self.server.postreq_busy = False - return profile_path_str = path.replace('/searchhandle', '') # are we already following the searched for handle? if is_following_actor(base_dir, nickname, domain, search_str): + # get the actor if not has_users_path(search_str): search_nickname = get_nickname_from_actor(search_str) search_domain, search_port = \ @@ -3753,6 +3747,23 @@ class PubServer(BaseHTTPRequestHandler): search_domain_full) else: actor = search_str + + # establish the session + curr_proxy_type = proxy_type + if '.onion/' in actor: + curr_proxy_type = 'tor' + elif '.i2p/' in actor: + curr_proxy_type = 'i2p' + + curr_session = \ + self._establish_session("handle search", + curr_session, + curr_proxy_type) + if not curr_session: + self.server.postreq_busy = False + return + + # get the avatar url for the actor avatar_url = \ get_avatar_image_url(curr_session, base_dir, http_prefix, @@ -3769,7 +3780,7 @@ class PubServer(BaseHTTPRequestHandler): getreq_start_time, onion_domain, i2p_domain, cookie, debug, authorized, - curr_session, proxy_type) + curr_session, curr_proxy_type) return else: show_published_date_only = \ @@ -3797,6 +3808,26 @@ class PubServer(BaseHTTPRequestHandler): if self.server.account_timezone.get(nickname): timezone = \ self.server.account_timezone.get(nickname) + + profile_handle = search_str.replace('\n', '').strip() + + # establish the session + curr_proxy_type = proxy_type + if '.onion/' in profile_handle or \ + profile_handle.endswith('.onion'): + curr_proxy_type = 'tor' + elif ('.i2p/' in profile_handle or + profile_handle.endswith('.i2p')): + curr_proxy_type = 'i2p' + + curr_session = \ + self._establish_session("handle search", + curr_session, + curr_proxy_type) + if not curr_session: + self.server.postreq_busy = False + return + profile_str = \ html_profile_after_search(self.server.css_cache, recent_posts_cache, @@ -3808,7 +3839,7 @@ class PubServer(BaseHTTPRequestHandler): nickname, domain, port, - search_str, + profile_handle, curr_session, cached_webfingers, self.server.person_cache, From ff2aa1564e645cf0d78e4d0f4ab29a288ffe6ff1 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 14:38:47 +0000 Subject: [PATCH 04/31] More debug --- person.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/person.py b/person.py index 835950d47..57d41663e 100644 --- a/person.py +++ b/person.py @@ -1427,10 +1427,10 @@ def _detect_users_path(url: str) -> str: return '/users/' -def get_actor_json(hostDomain: str, handle: str, http: bool, gnunet: bool, +def get_actor_json(host_domain: str, handle: str, http: bool, gnunet: bool, debug: bool, quiet: bool, signing_priv_key_pem: str, - existingSession) -> ({}, {}): + existing_session) -> ({}, {}): """Returns the actor json """ if debug: @@ -1513,8 +1513,8 @@ def get_actor_json(hostDomain: str, handle: str, http: bool, gnunet: bool, http_prefix = 'https' else: http_prefix = 'http' - if existingSession: - session = existingSession + if existing_session: + session = existing_session else: session = create_session(proxy_type) if nickname == 'inbox': @@ -1541,11 +1541,12 @@ def get_actor_json(hostDomain: str, handle: str, http: bool, gnunet: bool, handle = nickname + '@' + domain wf_request = webfinger_handle(session, handle, http_prefix, cached_webfingers, - hostDomain, __version__, debug, + host_domain, __version__, debug, group_account, signing_priv_key_pem) if not wf_request: if not quiet: - print('get_actor_json Unable to webfinger ' + handle) + print('get_actor_json Unable to webfinger ' + handle + + ' ' + http_prefix + ' proxy: ' + str(proxy_type)) return None, None if not isinstance(wf_request, dict): if not quiet: @@ -1600,7 +1601,7 @@ def get_actor_json(hostDomain: str, handle: str, http: bool, gnunet: bool, } person_json = \ get_json(signing_priv_key_pem, session, person_url, as_header, - None, debug, __version__, http_prefix, hostDomain, + None, debug, __version__, http_prefix, host_domain, 20, quiet) if person_json: if not quiet: From 673301f22d3a416293b03577587e362815445a70 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 15:00:01 +0000 Subject: [PATCH 05/31] Debug --- webfinger.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/webfinger.py b/webfinger.py index fa0497513..e488d859f 100644 --- a/webfinger.py +++ b/webfinger.py @@ -68,12 +68,12 @@ def webfinger_handle(session, handle: str, http_prefix: str, """Gets webfinger result for the given ActivityPub handle """ if not session: - if debug: - print('WARN: No session specified for webfinger_handle') + print('WARN: No session specified for webfinger_handle') return None nickname, domain, _ = _parse_handle(handle) if not nickname: + print('WARN: No nickname found in handle ' + handle) return None wf_domain = remove_domain_port(domain) @@ -123,14 +123,13 @@ def webfinger_handle(session, handle: str, http_prefix: str, if result: store_webfinger_in_cache(wf_handle, result, cached_webfingers) else: - if debug: - print("WARN: Unable to webfinger " + url + ' ' + - 'nickname: ' + str(nickname) + ' ' + - 'handle: ' + str(handle) + ' ' + - 'wf_handle: ' + str(wf_handle) + ' ' + - 'domain: ' + str(wf_domain) + ' ' + - 'headers: ' + str(hdr) + ' ' + - 'params: ' + str(par)) + print("WARN: Unable to webfinger " + url + ' ' + + 'nickname: ' + str(nickname) + ' ' + + 'handle: ' + str(handle) + ' ' + + 'wf_handle: ' + str(wf_handle) + ' ' + + 'domain: ' + str(wf_domain) + ' ' + + 'headers: ' + str(hdr) + ' ' + + 'params: ' + str(par)) return result From ae9c77870e6e95927bc966dc03021fbabfbfba2f Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 15:19:43 +0000 Subject: [PATCH 06/31] Webfinger debug --- webfinger.py | 1 + 1 file changed, 1 insertion(+) diff --git a/webfinger.py b/webfinger.py index e488d859f..3432af169 100644 --- a/webfinger.py +++ b/webfinger.py @@ -124,6 +124,7 @@ def webfinger_handle(session, handle: str, http_prefix: str, store_webfinger_in_cache(wf_handle, result, cached_webfingers) else: print("WARN: Unable to webfinger " + url + ' ' + + 'from_domain: ' + from_domain + ' ' + 'nickname: ' + str(nickname) + ' ' + 'handle: ' + str(handle) + ' ' + 'wf_handle: ' + str(wf_handle) + ' ' + From 259016b725331b1354ffc26abeaa88f71c2a847b Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 16:13:40 +0000 Subject: [PATCH 07/31] Set from domain when searching for onion handle from clearnet --- daemon.py | 4 +++- webapp_profile.py | 16 +++++++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/daemon.py b/daemon.py index 9c088d881..de27d4bc6 100644 --- a/daemon.py +++ b/daemon.py @@ -3858,7 +3858,9 @@ class PubServer(BaseHTTPRequestHandler): signing_priv_key_pem, self.server.cw_lists, self.server.lists_enabled, - timezone) + timezone, + self.server.onion_domain, + self.server.i2p_domain) if profile_str: msg = profile_str.encode('utf-8') msglen = len(msg) diff --git a/webapp_profile.py b/webapp_profile.py index 713190bc8..cde8d016b 100644 --- a/webapp_profile.py +++ b/webapp_profile.py @@ -144,7 +144,8 @@ def html_profile_after_search(css_cache: {}, max_like_count: int, signing_priv_key_pem: str, cw_lists: {}, lists_enabled: str, - timezone: str) -> str: + timezone: str, + onion_domain: str, i2p_domain: str) -> str: """Show a profile page after a search for a fediverse address """ http = False @@ -153,8 +154,17 @@ def html_profile_after_search(css_cache: {}, http = True elif http_prefix == 'gnunet': gnunet = True + from_domain = domain + if onion_domain: + if '.onion/' in profile_handle or profile_handle.endswith('.onion'): + from_domain = onion_domain + http = True + if i2p_domain: + if '.i2p/' in profile_handle or profile_handle.endswith('.i2p'): + from_domain = i2p_domain + http = True profile_json, as_header = \ - get_actor_json(domain, profile_handle, http, gnunet, debug, False, + get_actor_json(from_domain, profile_handle, http, gnunet, debug, False, signing_priv_key_pem, session) if not profile_json: return None @@ -327,7 +337,7 @@ def html_profile_after_search(css_cache: {}, user_feed = \ parse_user_feed(signing_priv_key_pem, session, outbox_url, as_header, project_version, - http_prefix, domain, debug) + http_prefix, from_domain, debug) if user_feed: i = 0 for item in user_feed: From 00c2c4b7b2c4b7050b60ba111430cb59aa812eae Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 17:21:31 +0000 Subject: [PATCH 08/31] Debug --- inbox.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/inbox.py b/inbox.py index 3c614ea31..6018c6f0d 100644 --- a/inbox.py +++ b/inbox.py @@ -625,8 +625,12 @@ def _inbox_post_recipients_add(base_dir: str, http_prefix: str, toList: [], handle + ' does not exist') else: if debug: - print('DEBUG: ' + recipient + ' is not local to ' + - domain_match) + if recipient.endswith('#Public'): + print('DEBUG: #Public recipient is too non-specific. ' + + recipient + ' ' + domain_match) + else: + print('DEBUG: ' + recipient + ' is not local to ' + + domain_match) print(str(toList)) if recipient.endswith('followers'): if debug: From cf9da94bba9004f41c8a92f7a1718ebc194a5899 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 17:44:53 +0000 Subject: [PATCH 09/31] Detect post delivery via a third party --- daemon.py | 20 +++++++++++++++++++- inbox.py | 6 ++++-- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/daemon.py b/daemon.py index de27d4bc6..1c88d2622 100644 --- a/daemon.py +++ b/daemon.py @@ -424,6 +424,21 @@ def save_domain_qrcode(base_dir: str, http_prefix: str, class PubServer(BaseHTTPRequestHandler): protocol_version = 'HTTP/1.1' + def _detect_mitm(self) -> bool: + """Detect if a request contains a MiTM + """ + mitm_domains = ['cloudflare'] + check_headers = ( + 'Server', 'Report-To', 'Report-to', 'report-to', + 'Expect-CT', 'Expect-Ct', 'expect-ct' + ) + for interloper in mitm_domains: + for header_name in check_headers: + if self.headers.get(header_name): + if interloper in self.headers[header_name]: + return True + return False + def _get_instance_url(self, calling_domain: str) -> str: """Returns the URL for this instance """ @@ -1711,6 +1726,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.blocked_cache_last_updated, self.server.blocked_cache_update_secs) + mitm = self._detect_mitm() + queue_filename = \ save_post_to_inbox_queue(self.server.base_dir, self.server.http_prefix, @@ -1722,7 +1739,8 @@ class PubServer(BaseHTTPRequestHandler): self.path, self.server.debug, self.server.blocked_cache, - self.server.system_language) + self.server.system_language, + mitm) if queue_filename: # add json to the queue if queue_filename not in self.server.inbox_queue: diff --git a/inbox.py b/inbox.py index 6018c6f0d..88c78fd05 100644 --- a/inbox.py +++ b/inbox.py @@ -459,7 +459,8 @@ def save_post_to_inbox_queue(base_dir: str, http_prefix: str, message_bytes: str, http_headers: {}, post_path: str, debug: bool, - blocked_cache: [], system_language: str) -> str: + blocked_cache: [], system_language: str, + mitm: bool) -> str: """Saves the given json to the inbox queue for the person key_id specifies the actor sending the post """ @@ -592,7 +593,8 @@ def save_post_to_inbox_queue(base_dir: str, http_prefix: str, 'original': original_post_json_object, 'digest': digest, 'filename': filename, - 'destination': destination + 'destination': destination, + 'mitm': mitm } if debug: From 8b9dc4652d872fd3d825464204671ba10e8b2c28 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 18:07:17 +0000 Subject: [PATCH 10/31] Debug --- person.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/person.py b/person.py index 57d41663e..15e11374e 100644 --- a/person.py +++ b/person.py @@ -1515,8 +1515,14 @@ def get_actor_json(host_domain: str, handle: str, http: bool, gnunet: bool, http_prefix = 'http' if existing_session: session = existing_session + if debug: + print('DEBUG: get_actor_json using existing session ' + + str(proxy_type) + ' ' + domain) else: session = create_session(proxy_type) + if debug: + print('DEBUG: get_actor_json using session ' + + str(proxy_type) + ' ' + domain) if nickname == 'inbox': nickname = domain From 5025924b99aa0a8db8b433d14d5ceb420ac87841 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 18:20:38 +0000 Subject: [PATCH 11/31] Change current session --- daemon.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/daemon.py b/daemon.py index 1c88d2622..53d0c06fa 100644 --- a/daemon.py +++ b/daemon.py @@ -3770,8 +3770,10 @@ class PubServer(BaseHTTPRequestHandler): curr_proxy_type = proxy_type if '.onion/' in actor: curr_proxy_type = 'tor' + curr_session = self.server.session_onion elif '.i2p/' in actor: curr_proxy_type = 'i2p' + curr_session = self.server.session_i2p curr_session = \ self._establish_session("handle search", @@ -3834,9 +3836,11 @@ class PubServer(BaseHTTPRequestHandler): if '.onion/' in profile_handle or \ profile_handle.endswith('.onion'): curr_proxy_type = 'tor' + curr_session = self.server.session_onion elif ('.i2p/' in profile_handle or profile_handle.endswith('.i2p')): curr_proxy_type = 'i2p' + curr_session = self.server.session_i2p curr_session = \ self._establish_session("handle search", From 1119f1014087e00f1bd364bd1bb8087ef34543fe Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 22:13:22 +0000 Subject: [PATCH 12/31] Use different sessions for sending out posts Posts addressed to onion instances use the onion session, etc --- daemon.py | 111 +++++++++++++++++++++++++++++++++++++++++++++++++++++- inbox.py | 88 ++++++++++++++++++++++++++----------------- outbox.py | 8 +++- posts.py | 61 +++++++++++++++++++----------- 4 files changed, 208 insertions(+), 60 deletions(-) diff --git a/daemon.py b/daemon.py index 53d0c06fa..cd28df4da 100644 --- a/daemon.py +++ b/daemon.py @@ -698,13 +698,22 @@ class PubServer(BaseHTTPRequestHandler): print('AUTH: Secure mode GET request not permitted: ' + key_id) return False + if self.server.onion_domain: + if '.onion/' in key_id: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.i2p/' in key_id: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("secure mode", curr_session, proxy_type) if not curr_session: return False - # obtain the public key + # obtain the public key. key_id is the actor pub_key = \ get_person_pub_key(self.server.base_dir, curr_session, key_id, @@ -8116,6 +8125,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("announceButton", curr_session, proxy_type) @@ -8279,6 +8298,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("undoAnnounceButton", curr_session, proxy_type) @@ -8358,6 +8387,16 @@ class PubServer(BaseHTTPRequestHandler): handle_nickname + '@' + \ get_full_domain(handle_domain, handle_port) if '@' in following_handle: + + if self.server.onion_domain: + if following_handle.endswith('.onion'): + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if following_handle.endswith('.i2p'): + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("followApproveButton", curr_session, proxy_type) @@ -8598,6 +8637,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("likeButton", curr_session, proxy_type) @@ -8774,6 +8823,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("undoLikeButton", curr_session, proxy_type) @@ -8955,6 +9014,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("reactionButton", curr_session, proxy_type) @@ -9152,6 +9221,16 @@ class PubServer(BaseHTTPRequestHandler): calling_domain) return emoji_content = urllib.parse.unquote_plus(emoji_content_encoded) + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("undoReactionButton", curr_session, proxy_type) @@ -9420,6 +9499,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("bookmarkButton", curr_session, proxy_type) @@ -9559,6 +9648,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor_path_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("undo_bookmarkButton", curr_session, proxy_type) @@ -9713,6 +9812,16 @@ class PubServer(BaseHTTPRequestHandler): self._redirect_headers(actor + '/' + timeline_str, cookie, calling_domain) return + + if self.server.onion_domain: + if '.onion/' in actor: + curr_session = self.server.session_onion + proxy_type = 'tor' + if self.server.i2p_domain: + if '.onion/' in actor: + curr_session = self.server.session_i2p + proxy_type = 'i2p' + curr_session = \ self._establish_session("deleteButton", curr_session, proxy_type) diff --git a/inbox.py b/inbox.py index 88c78fd05..3d0bbadbb 100644 --- a/inbox.py +++ b/inbox.py @@ -2647,7 +2647,8 @@ def _group_handle(base_dir: str, handle: str) -> bool: return actor_json['type'] == 'Group' -def _send_to_group_members(session, base_dir: str, handle: str, port: int, +def _send_to_group_members(session, session_onion, session_i2p, + base_dir: str, handle: str, port: int, post_json_object: {}, http_prefix: str, federation_list: [], send_threads: [], post_log: [], @@ -2708,7 +2709,8 @@ def _send_to_group_members(session, base_dir: str, handle: str, port: int, person_cache, cached_webfingers, debug, __version__, signing_priv_key_pem) - send_to_followers_thread(session, base_dir, nickname, domain, + send_to_followers_thread(session, session_onion, session_i2p, + base_dir, nickname, domain, onion_domain, i2p_domain, port, http_prefix, federation_list, send_threads, post_log, @@ -3033,8 +3035,8 @@ def _is_valid_dm(base_dir: str, nickname: str, domain: str, port: int, def _receive_question_vote(base_dir: str, nickname: str, domain: str, http_prefix: str, handle: str, debug: bool, post_json_object: {}, recent_posts_cache: {}, - session, onion_domain: str, - i2p_domain: str, port: int, + session, session_onion, session_i2p, + onion_domain: str, i2p_domain: str, port: int, federation_list: [], send_threads: [], post_log: [], cached_webfingers: {}, person_cache: {}, signing_priv_key_pem: str, @@ -3110,7 +3112,8 @@ def _receive_question_vote(base_dir: str, nickname: str, domain: str, question_json['type'] = 'Update' shared_items_federated_domains = [] shared_item_federation_tokens = {} - send_to_followers_thread(session, base_dir, nickname, domain, + send_to_followers_thread(session, session_onion, session_i2p, + base_dir, nickname, domain, onion_domain, i2p_domain, port, http_prefix, federation_list, send_threads, post_log, @@ -3238,7 +3241,8 @@ def _check_for_git_patches(base_dir: str, nickname: str, domain: str, def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int, - session, key_id: str, handle: str, message_json: {}, + session, session_onion, session_i2p, + key_id: str, handle: str, message_json: {}, base_dir: str, http_prefix: str, send_threads: [], post_log: [], cached_webfingers: {}, person_cache: {}, queue: [], domain: str, @@ -3513,7 +3517,8 @@ def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int, _receive_question_vote(base_dir, nickname, domain, http_prefix, handle, debug, post_json_object, recent_posts_cache, - session, onion_domain, i2p_domain, port, + session, session_onion, session_i2p, + onion_domain, i2p_domain, port, federation_list, send_threads, post_log, cached_webfingers, person_cache, signing_priv_key_pem, @@ -3679,7 +3684,8 @@ def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int, # send the post out to group members if is_group: - _send_to_group_members(session, base_dir, handle, port, + _send_to_group_members(session, session_onion, session_i2p, + base_dir, handle, port, post_json_object, http_prefix, federation_list, send_threads, @@ -4167,21 +4173,27 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, """ print('Starting new session when starting inbox queue') curr_session_time = int(time.time()) - session_last_update = curr_session_time + session_last_update = 0 session = create_session(proxy_type) + if session: + session_last_update = curr_session_time # is this is a clearnet instance then optionally start sessions # for onion and i2p domains session_onion = None session_i2p = None - session_last_update_onion = curr_session_time - session_last_update_i2p = curr_session_time + session_last_update_onion = 0 + session_last_update_i2p = 0 if proxy_type != 'tor' and onion_domain: print('Starting onion session when starting inbox queue') session_onion = create_session('tor') + if session_onion: + session_onion = curr_session_time if proxy_type != 'i2p' and i2p_domain: print('Starting i2p session when starting inbox queue') session_i2p = create_session('i2p') + if session_i2p: + session_i2p = curr_session_time inbox_handle = 'inbox@' + domain if debug: @@ -4294,10 +4306,37 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, account_max_posts_per_day, debug): continue + # recreate the session periodically + if not session or curr_time - session_last_update > 21600: + print('Regenerating inbox queue session at 6hr interval') + session = create_session(proxy_type) + if session: + session_last_update = curr_time + else: + print('WARN: inbox session not created') + continue + if onion_domain: + if not session_onion or \ + curr_time - session_last_update_onion > 21600: + print('Regenerating inbox queue onion session at 6hr interval') + session_onion = create_session('tor') + if session_onion: + session_last_update_onion = curr_time + else: + print('WARN: inbox onion session not created') + continue + if i2p_domain: + if not session_i2p or curr_time - session_last_update_i2p > 21600: + print('Regenerating inbox queue i2p session at 6hr interval') + session_i2p = create_session('i2p') + if session_i2p: + session_last_update_i2p = curr_time + else: + print('WARN: inbox i2p session not created') + continue + curr_session = session curr_proxy_type = proxy_type - curr_session_last_update = session_last_update - session_type = 'default' if queue_json.get('actor'): if isinstance(queue_json['actor'], str): sender_domain, _ = get_domain_from_actor(queue_json['actor']) @@ -4305,30 +4344,10 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, session_onion and proxy_type != 'tor': curr_proxy_type = 'tor' curr_session = session_onion - session_type = 'onion' - curr_session_last_update = session_last_update_onion elif (sender_domain.endswith('.i2p') and session_i2p and proxy_type != 'i2p'): curr_proxy_type = 'i2p' curr_session = session_i2p - session_type = 'i2p' - curr_session_last_update = session_last_update_i2p - - # recreate the session periodically - if not curr_session or curr_time - curr_session_last_update > 21600: - print('Regenerating inbox queue session at 6hr interval') - curr_session = create_session(curr_proxy_type) - if not curr_session: - continue - if session_type == 'default': - session = curr_session - session_last_update = curr_time - elif session_type == 'onion': - session_onion = curr_session - session_last_update_onion = curr_time - elif session_type == 'i2p': - session_i2p = curr_session - session_last_update_i2p = curr_time if debug and queue_json.get('actor'): print('Obtaining public key for actor ' + queue_json['actor']) @@ -4610,7 +4629,8 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, languages_understood = [] _inbox_after_initial(recent_posts_cache, max_recent_posts, - curr_session, key_id, handle, + session, session_onion, session_i2p, + key_id, handle, queue_json['post'], base_dir, http_prefix, send_threads, post_log, diff --git a/outbox.py b/outbox.py index be8ff9e1d..c8bd0f5a0 100644 --- a/outbox.py +++ b/outbox.py @@ -510,6 +510,8 @@ def post_message_to_outbox(session, translate: {}, # create a thread to send the post to followers followers_thread = \ send_to_followers_thread(server.session, + server.session_onion, + server.session_i2p, base_dir, post_to_nickname, domain, onion_domain, i2p_domain, @@ -651,8 +653,10 @@ def post_message_to_outbox(session, translate: {}, print('c2s sender: ' + post_to_nickname + '@' + domain + ':' + str(port)) named_addresses_thread = \ - send_to_named_addresses_thread(server.session, base_dir, - post_to_nickname, + send_to_named_addresses_thread(server.session, + server.session_onion, + server.session_i2p, + base_dir, post_to_nickname, domain, onion_domain, i2p_domain, port, http_prefix, federation_list, diff --git a/posts.py b/posts.py index f452c02b7..c5a15ea24 100644 --- a/posts.py +++ b/posts.py @@ -2981,7 +2981,8 @@ def _is_profile_update(post_json_object: {}) -> bool: return False -def _send_to_named_addresses(session, base_dir: str, +def _send_to_named_addresses(session, session_onion, session_i2p, + base_dir: str, nickname: str, domain: str, onion_domain: str, i2p_domain: str, port: int, http_prefix: str, federation_list: [], @@ -3102,16 +3103,19 @@ def _send_to_named_addresses(session, base_dir: str, from_domain = domain from_domain_full = get_full_domain(domain, port) from_http_prefix = http_prefix + curr_session = session if onion_domain: if to_domain.endswith('.onion'): from_domain = onion_domain from_domain_full = onion_domain from_http_prefix = 'http' - elif i2p_domain: + curr_session = session_onion + if i2p_domain: if to_domain.endswith('.i2p'): from_domain = i2p_domain from_domain_full = i2p_domain from_http_prefix = 'http' + curr_session = session_i2p cc_list = [] # if the "to" domain is within the shared items @@ -3125,7 +3129,7 @@ def _send_to_named_addresses(session, base_dir: str, group_account = has_group_type(base_dir, address, person_cache) - send_signed_json(post_json_object, session, base_dir, + send_signed_json(post_json_object, curr_session, base_dir, nickname, from_domain, port, to_nickname, to_domain, to_port, cc_list, from_http_prefix, True, client_to_server, @@ -3136,8 +3140,8 @@ def _send_to_named_addresses(session, base_dir: str, signing_priv_key_pem, 34436782) -def send_to_named_addresses_thread(session, base_dir: str, - nickname: str, domain: str, +def send_to_named_addresses_thread(session, session_onion, session_i2p, + base_dir: str, nickname: str, domain: str, onion_domain: str, i2p_domain: str, port: int, http_prefix: str, federation_list: [], @@ -3152,8 +3156,8 @@ def send_to_named_addresses_thread(session, base_dir: str, """ send_thread = \ thread_with_trace(target=_send_to_named_addresses, - args=(session, base_dir, - nickname, domain, + args=(session, session_onion, session_i2p, + base_dir, nickname, domain, onion_domain, i2p_domain, port, http_prefix, federation_list, send_threads, post_log, @@ -3209,9 +3213,8 @@ def _sending_profile_update(post_json_object: {}) -> bool: return False -def send_to_followers(session, base_dir: str, - nickname: str, - domain: str, +def send_to_followers(session, session_onion, session_i2p, + base_dir: str, nickname: str, domain: str, onion_domain: str, i2p_domain: str, port: int, http_prefix: str, federation_list: [], send_threads: [], post_log: [], @@ -3224,9 +3227,6 @@ def send_to_followers(session, base_dir: str, """sends a post to the followers of the given nickname """ print('send_to_followers') - if not session: - print('WARN: No session for send_to_followers') - return if not _post_is_addressed_to_followers(base_dir, nickname, domain, port, http_prefix, post_json_object): @@ -3278,9 +3278,25 @@ def send_to_followers(session, base_dir: str, print('Sending post to followers domain is active: ' + follower_domain_url) + # select the appropriate session + curr_session = session + curr_http_prefix = http_prefix + if onion_domain: + if follower_domain.endswith('.onion'): + curr_session = session_onion + curr_http_prefix = 'http' + if i2p_domain: + if follower_domain.endswith('.i2p'): + curr_session = session_i2p + curr_http_prefix = 'http' + if not curr_session: + print('WARN: session not found when sending to follower ' + + follower_domain_url) + continue + with_shared_inbox = \ - _has_shared_inbox(session, http_prefix, follower_domain, debug, - signing_priv_key_pem) + _has_shared_inbox(curr_session, curr_http_prefix, follower_domain, + debug, signing_priv_key_pem) if debug: if with_shared_inbox: print(follower_domain + ' has shared inbox') @@ -3305,7 +3321,7 @@ def send_to_followers(session, base_dir: str, if to_domain.endswith('.onion'): from_domain = onion_domain from_http_prefix = 'http' - elif i2p_domain: + if i2p_domain: if to_domain.endswith('.i2p'): from_domain = i2p_domain from_http_prefix = 'http' @@ -3333,7 +3349,7 @@ def send_to_followers(session, base_dir: str, nickname + '@' + domain + ' to ' + to_nickname + '@' + to_domain) - send_signed_json(post_json_object, session, base_dir, + send_signed_json(post_json_object, curr_session, base_dir, nickname, from_domain, port, to_nickname, to_domain, to_port, cc_list, from_http_prefix, True, @@ -3362,7 +3378,7 @@ def send_to_followers(session, base_dir: str, nickname + '@' + domain + ' to ' + to_nickname + '@' + to_domain) - send_signed_json(post_json_object, session, base_dir, + send_signed_json(post_json_object, curr_session, base_dir, nickname, from_domain, port, to_nickname, to_domain, to_port, cc_list, from_http_prefix, True, @@ -3383,9 +3399,8 @@ def send_to_followers(session, base_dir: str, print('Sending post to followers ends ' + str(sending_mins) + ' mins') -def send_to_followers_thread(session, base_dir: str, - nickname: str, - domain: str, +def send_to_followers_thread(session, session_onion, session_i2p, + base_dir: str, nickname: str, domain: str, onion_domain: str, i2p_domain: str, port: int, http_prefix: str, federation_list: [], send_threads: [], post_log: [], @@ -3399,8 +3414,8 @@ def send_to_followers_thread(session, base_dir: str, """ send_thread = \ thread_with_trace(target=send_to_followers, - args=(session, base_dir, - nickname, domain, + args=(session, session_onion, session_i2p, + base_dir, nickname, domain, onion_domain, i2p_domain, port, http_prefix, federation_list, send_threads, post_log, From ca24fa326513c4076ac4b7524954f0639283d5fa Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Fri, 11 Mar 2022 22:59:15 +0000 Subject: [PATCH 13/31] Send follow requests using the appropriate session --- daemon.py | 23 ++++++++++++++++++++--- follow.py | 7 ++++--- tests.py | 12 ++++++++---- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/daemon.py b/daemon.py index cd28df4da..51b94cdf2 100644 --- a/daemon.py +++ b/daemon.py @@ -3275,14 +3275,31 @@ class PubServer(BaseHTTPRequestHandler): follower_nickname + ' to ' + following_actor) if not self.server.signing_priv_key_pem: print('Sending follow request with no signing key') + + curr_domain = domain + curr_port = port + curr_http_prefix = http_prefix + if onion_domain: + if following_domain.endswith('.onion'): + curr_session = self.server.session_onion + curr_domain = onion_domain + curr_port = 80 + curr_http_prefix = 'http' + if i2p_domain: + if following_domain.endswith('.i2p'): + curr_session = self.server.session_i2p + curr_domain = i2p_domain + curr_port = 80 + curr_http_prefix = 'http' + send_follow_request(curr_session, base_dir, follower_nickname, - domain, port, - http_prefix, + domain, curr_domain, curr_port, + curr_http_prefix, following_nickname, following_domain, following_actor, - following_port, http_prefix, + following_port, curr_http_prefix, False, self.server.federation_list, self.server.send_threads, self.server.postLog, diff --git a/follow.py b/follow.py index 0d632184f..3349e3b0b 100644 --- a/follow.py +++ b/follow.py @@ -852,7 +852,8 @@ def followed_account_rejects(session, base_dir: str, http_prefix: str, def send_follow_request(session, base_dir: str, - nickname: str, domain: str, port: int, + nickname: str, domain: str, + sender_domain: str, sender_port: int, http_prefix: str, follow_nickname: str, follow_domain: str, followedActor: str, @@ -870,7 +871,7 @@ def send_follow_request(session, base_dir: str, print('You are not permitted to follow the domain ' + follow_domain) return None - full_domain = get_full_domain(domain, port) + full_domain = get_full_domain(sender_domain, sender_port) follow_actor = local_actor_url(http_prefix, nickname, full_domain) request_domain = get_full_domain(follow_domain, followPort) @@ -934,7 +935,7 @@ def send_follow_request(session, base_dir: str, follow_handle, debug) send_signed_json(new_follow_json, session, base_dir, - nickname, domain, port, + nickname, sender_domain, sender_port, follow_nickname, follow_domain, followPort, 'https://www.w3.org/ns/activitystreams#Public', http_prefix, True, client_to_server, diff --git a/tests.py b/tests.py index f4121d76a..d5bed8eb3 100644 --- a/tests.py +++ b/tests.py @@ -1590,7 +1590,8 @@ def test_follow_between_servers(base_dir: str) -> None: signing_priv_key_pem = None send_result = \ send_follow_request(session_alice, alice_dir, - 'alice', alice_domain, alice_port, http_prefix, + 'alice', alice_domain, + alice_domain, alice_port, http_prefix, 'bob', bob_domain, bob_actor, bob_port, http_prefix, client_to_server, federation_list, @@ -1810,7 +1811,8 @@ def test_shared_items_federation(base_dir: str) -> None: bob_actor = http_prefix + '://' + bob_address + '/users/bob' send_result = \ send_follow_request(session_alice, alice_dir, - 'alice', alice_domain, alice_port, http_prefix, + 'alice', alice_domain, + alice_domain, alice_port, http_prefix, 'bob', bob_domain, bob_actor, bob_port, http_prefix, client_to_server, federation_list, @@ -2261,7 +2263,8 @@ def test_group_follow(base_dir: str) -> None: signing_priv_key_pem = None send_result = \ send_follow_request(session_alice, alice_dir, - 'alice', alice_domain, alice_port, http_prefix, + 'alice', alice_domain, + alice_domain, alice_port, http_prefix, 'testgroup', testgroup_domain, testgroup_actor, testgroupPort, http_prefix, client_to_server, federation_list, @@ -2338,7 +2341,8 @@ def test_group_follow(base_dir: str) -> None: signing_priv_key_pem = None send_result = \ send_follow_request(session_bob, bob_dir, - 'bob', bob_domain, bob_port, http_prefix, + 'bob', bob_domain, + bob_domain, bob_port, http_prefix, 'testgroup', testgroup_domain, testgroup_actor, testgroupPort, http_prefix, client_to_server, federation_list, From bf43d9cb342d0ac537cfb096c3caaeba7c3bd534 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 10:41:36 +0000 Subject: [PATCH 14/31] Follow accept supports sending to onion and i2p handles --- daemon.py | 4 ++++ epicyon.py | 17 +++++++++++++++-- inbox.py | 37 ++++++++++++++++++++++++++++--------- manualapprove.py | 40 +++++++++++++++++++++++++++++++--------- 4 files changed, 78 insertions(+), 20 deletions(-) diff --git a/daemon.py b/daemon.py index 51b94cdf2..beebe147c 100644 --- a/daemon.py +++ b/daemon.py @@ -8423,6 +8423,10 @@ class PubServer(BaseHTTPRequestHandler): signing_priv_key_pem = \ self.server.signing_priv_key_pem manual_approve_follow_request_thread(curr_session, + self.server.session_onion, + self.server.session_i2p, + self.server.onion_domain, + self.server.i2p_domain, base_dir, http_prefix, follower_nickname, domain, port, diff --git a/epicyon.py b/epicyon.py index 87bcbbaf6..e5c4db318 100644 --- a/epicyon.py +++ b/epicyon.py @@ -1262,6 +1262,8 @@ if args.approve: if '@' not in args.approve: print('syntax: --approve nick@domain') sys.exit() + session_onion = None + session_i2p = None session = create_session(proxy_type) send_threads = [] postLog = [] @@ -1272,8 +1274,19 @@ if args.approve: signing_priv_key_pem = None if args.secure_mode: signing_priv_key_pem = get_instance_actor_key(base_dir, domain) - manual_approve_follow_request(session, base_dir, - http_prefix, + onion_domain = get_config_param(base_dir, 'onionDomain') + if args.onionDomain: + onion_domain = args.onionDomain + if onion_domain: + session_onion = create_session('tor') + i2p_domain = get_config_param(base_dir, 'i2pDomain') + if args.i2pDomain: + i2p_domain = args.i2pDomain + if i2p_domain: + session_i2p = create_session('i2p') + manual_approve_follow_request(session, session_onion, session_i2p, + onion_domain, i2p_domain, + base_dir, http_prefix, args.nickname, domain, port, args.approve, federation_list, diff --git a/inbox.py b/inbox.py index 3d0bbadbb..1f83888e7 100644 --- a/inbox.py +++ b/inbox.py @@ -3926,7 +3926,8 @@ def _check_json_signature(base_dir: str, queue_json: {}) -> (bool, bool): return has_json_signature, jwebsig_type -def _receive_follow_request(session, base_dir: str, http_prefix: str, +def _receive_follow_request(session, session_onion, session_i2p, + base_dir: str, http_prefix: str, port: int, send_threads: [], post_log: [], cached_webfingers: {}, person_cache: {}, message_json: {}, federation_list: [], @@ -4012,8 +4013,23 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, approve_handle = nickname + '@' + domain_full + curr_session = session + curr_http_prefix = http_prefix + curr_domain = domain + curr_port = from_port + if onion_domain and domain_to_follow.endswith('.onion'): + curr_session = session_onion + curr_http_prefix = 'http' + curr_domain = onion_domain + curr_port = 80 + elif i2p_domain and domain_to_follow.endswith('.i2p'): + curr_session = session_i2p + curr_http_prefix = 'http' + curr_domain = i2p_domain + curr_port = 80 + # is the actor sending the request valid? - if not valid_sending_actor(session, base_dir, + if not valid_sending_actor(curr_session, base_dir, nickname_to_follow, domain_to_follow, person_cache, message_json, signing_priv_key_pem, debug, unit_test): @@ -4050,9 +4066,11 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, # Getting their public key has the same result if debug: print('Obtaining the following actor: ' + message_json['actor']) - if not get_person_pub_key(base_dir, session, message_json['actor'], + if not get_person_pub_key(base_dir, curr_session, + message_json['actor'], person_cache, debug, project_version, - http_prefix, domain_to_follow, onion_domain, + curr_http_prefix, + domain_to_follow, onion_domain, i2p_domain, signing_priv_key_pem): if debug: print('Unable to obtain following actor: ' + @@ -4088,9 +4106,10 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, if debug: print('Obtaining the following actor: ' + message_json['actor']) - if not get_person_pub_key(base_dir, session, message_json['actor'], + if not get_person_pub_key(base_dir, curr_session, + message_json['actor'], person_cache, debug, project_version, - http_prefix, domain_to_follow, + curr_http_prefix, domain_to_follow, onion_domain, i2p_domain, signing_priv_key_pem): if debug: @@ -4134,9 +4153,9 @@ def _receive_follow_request(session, base_dir: str, http_prefix: str, print('EX: unable to write ' + followers_filename) print('Beginning follow accept') - return followed_account_accepts(session, base_dir, http_prefix, + return followed_account_accepts(curr_session, base_dir, curr_http_prefix, nickname_to_follow, domain_to_follow, port, - nickname, domain, from_port, + nickname, curr_domain, curr_port, message_json['actor'], federation_list, message_json, send_threads, post_log, cached_webfingers, person_cache, @@ -4504,7 +4523,7 @@ def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int, if debug: print('DEBUG: checking for follow requests') - if _receive_follow_request(curr_session, + if _receive_follow_request(curr_session, session_onion, session_i2p, base_dir, http_prefix, port, send_threads, post_log, cached_webfingers, diff --git a/manualapprove.py b/manualapprove.py index 6e0cd9ca8..b5fc1f5fc 100644 --- a/manualapprove.py +++ b/manualapprove.py @@ -120,8 +120,9 @@ def _approve_follower_handle(account_dir: str, approve_handle: str) -> None: print('EX: unable to write ' + approved_filename) -def manual_approve_follow_request(session, base_dir: str, - http_prefix: str, +def manual_approve_follow_request(session, session_onion, session_i2p, + onion_domain: str, i2p_domain: str, + base_dir: str, http_prefix: str, nickname: str, domain: str, port: int, approve_handle: str, federation_list: [], @@ -206,12 +207,31 @@ def manual_approve_follow_request(session, base_dir: str, get_port_from_domain(approve_domain) approve_domain = \ remove_domain_port(approve_domain) + + curr_domain = domain + curr_port = port + curr_session = session + curr_http_prefix = http_prefix + if onion_domain and \ + approve_domain.endswith('.onion'): + curr_domain = onion_domain + curr_port = 80 + curr_session = session_onion + curr_http_prefix = 'http' + elif (i2p_domain and + approve_domain.endswith('.i2p')): + curr_domain = i2p_domain + curr_port = 80 + curr_session = session_i2p + curr_http_prefix = 'http' + print('Manual follow accept: Sending Accept for ' + handle + ' follow request from ' + approve_nickname + '@' + approve_domain) - followed_account_accepts(session, base_dir, - http_prefix, - nickname, domain, port, + followed_account_accepts(curr_session, base_dir, + curr_http_prefix, + nickname, + curr_domain, curr_port, approve_nickname, approve_domain, approve_port, @@ -281,8 +301,9 @@ def manual_approve_follow_request(session, base_dir: str, approve_follows_filename + '.new') -def manual_approve_follow_request_thread(session, base_dir: str, - http_prefix: str, +def manual_approve_follow_request_thread(session, session_onion, session_i2p, + onion_domain: str, i2p_domain: str, + base_dir: str, http_prefix: str, nickname: str, domain: str, port: int, approve_handle: str, federation_list: [], @@ -297,8 +318,9 @@ def manual_approve_follow_request_thread(session, base_dir: str, """ thr = \ thread_with_trace(target=manual_approve_follow_request, - args=(session, base_dir, - http_prefix, + args=(session, session_onion, session_i2p, + onion_domain, i2p_domain, + base_dir, http_prefix, nickname, domain, port, approve_handle, federation_list, From 02444ecdefc69bf3cbff98ec3ae8e9587b621e95 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 11:04:37 +0000 Subject: [PATCH 15/31] Follow reject supports sending to onion or i2p domains --- daemon.py | 4 ++++ epicyon.py | 25 +++++++++++++++++++------ manualapprove.py | 19 ++++++++++++------- 3 files changed, 35 insertions(+), 13 deletions(-) diff --git a/daemon.py b/daemon.py index beebe147c..140a14c71 100644 --- a/daemon.py +++ b/daemon.py @@ -8587,6 +8587,10 @@ class PubServer(BaseHTTPRequestHandler): get_full_domain(handle_domain, handle_port) if '@' in following_handle: manual_deny_follow_request_thread(curr_session, + self.server.session_onion, + self.server.session_i2p, + self.server.onion_domain, + self.server.i2p_domain, base_dir, http_prefix, follower_nickname, domain, port, diff --git a/epicyon.py b/epicyon.py index e5c4db318..43af99c75 100644 --- a/epicyon.py +++ b/epicyon.py @@ -1275,13 +1275,13 @@ if args.approve: if args.secure_mode: signing_priv_key_pem = get_instance_actor_key(base_dir, domain) onion_domain = get_config_param(base_dir, 'onionDomain') - if args.onionDomain: - onion_domain = args.onionDomain + if args.onion: + onion_domain = args.onion if onion_domain: session_onion = create_session('tor') i2p_domain = get_config_param(base_dir, 'i2pDomain') - if args.i2pDomain: - i2p_domain = args.i2pDomain + if args.i2p_domain: + i2p_domain = args.i2p_domain if i2p_domain: session_i2p = create_session('i2p') manual_approve_follow_request(session, session_onion, session_i2p, @@ -1303,6 +1303,8 @@ if args.deny: if '@' not in args.deny: print('syntax: --deny nick@domain') sys.exit() + session_onion = None + session_i2p = None session = create_session(proxy_type) send_threads = [] postLog = [] @@ -1313,8 +1315,19 @@ if args.deny: signing_priv_key_pem = None if args.secure_mode: signing_priv_key_pem = get_instance_actor_key(base_dir, domain) - manual_deny_follow_request(session, base_dir, - http_prefix, + onion_domain = get_config_param(base_dir, 'onionDomain') + if args.onion: + onion_domain = args.onion + if onion_domain: + session_onion = create_session('tor') + i2p_domain = get_config_param(base_dir, 'i2pDomain') + if args.i2p_domain: + i2p_domain = args.i2p_domain + if i2p_domain: + session_i2p = create_session('i2p') + manual_deny_follow_request(session, session_onion, session_i2p, + onion_domain, i2p_domain, + base_dir, http_prefix, args.nickname, domain, port, args.deny, federation_list, diff --git a/manualapprove.py b/manualapprove.py index b5fc1f5fc..df7c7708b 100644 --- a/manualapprove.py +++ b/manualapprove.py @@ -19,8 +19,9 @@ from utils import acct_dir from threads import thread_with_trace -def manual_deny_follow_request(session, base_dir: str, - http_prefix: str, +def manual_deny_follow_request(session, session_onion, session_i2p, + onion_domain: str, i2p_domain: str, + base_dir: str, http_prefix: str, nickname: str, domain: str, port: int, deny_handle: str, federation_list: [], @@ -59,7 +60,9 @@ def manual_deny_follow_request(session, base_dir: str, if ':' in deny_domain: deny_port = get_port_from_domain(deny_domain) deny_domain = remove_domain_port(deny_domain) - followed_account_rejects(session, base_dir, http_prefix, + followed_account_rejects(session, session_onion, session_i2p, + onion_domain, i2p_domain, + base_dir, http_prefix, nickname, domain, port, deny_nickname, deny_domain, deny_port, federation_list, @@ -71,8 +74,9 @@ def manual_deny_follow_request(session, base_dir: str, print('Follow request from ' + deny_handle + ' was denied.') -def manual_deny_follow_request_thread(session, base_dir: str, - http_prefix: str, +def manual_deny_follow_request_thread(session, session_onion, session_i2p, + onion_domain: str, i2p_domain: str, + base_dir: str, http_prefix: str, nickname: str, domain: str, port: int, deny_handle: str, federation_list: [], @@ -86,8 +90,9 @@ def manual_deny_follow_request_thread(session, base_dir: str, """ thr = \ thread_with_trace(target=manual_deny_follow_request, - args=(session, base_dir, - http_prefix, + args=(session, session_onion, session_i2p, + onion_domain, i2p_domain, + base_dir, http_prefix, nickname, domain, port, deny_handle, federation_list, From 8e6327d127de7bfbbc2863bc4f566d28eca2aae5 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 11:44:07 +0000 Subject: [PATCH 16/31] More mitm headers --- daemon.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/daemon.py b/daemon.py index 140a14c71..1ab471da6 100644 --- a/daemon.py +++ b/daemon.py @@ -437,6 +437,16 @@ class PubServer(BaseHTTPRequestHandler): if self.headers.get(header_name): if interloper in self.headers[header_name]: return True + # The presence if these headers on their own indicates a MiTM + mitm_headers = ( + 'CF-Connecting-IP', 'CF-RAY', 'CF-IPCountry', 'CF-Visitor', + 'CDN-Loop', 'CF-Worker' + ) + for header_name in mitm_headers: + if self.headers.get(header_name): + return True + if self.headers.get(header_name.lower()): + return True return False def _get_instance_url(self, calling_domain: str) -> str: From ec9ae968503bd2a2ecf986615eb0b97248216f30 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 13:24:36 +0000 Subject: [PATCH 17/31] Can reply to unlisted posts with unlisted scope --- webapp_create_post.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/webapp_create_post.py b/webapp_create_post.py index 7157adebd..47e432273 100644 --- a/webapp_create_post.py +++ b/webapp_create_post.py @@ -300,8 +300,6 @@ def html_new_post(css_cache: {}, media_instance: bool, translate: {}, new_post_path = new_post_path.split('?')[0] if new_post_path.endswith('/newpost'): path = path.replace('/newpost', '/newfollowers') - elif new_post_path.endswith('/newunlisted'): - path = path.replace('/newunlisted', '/newfollowers') show_public_on_dropdown = False else: new_post_text = \ From d77d397bcadbb7c82d0afcddd490ffe610651a64 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 13:28:32 +0000 Subject: [PATCH 18/31] Unlisted available on dropdown when replying to unlisted post --- webapp_create_post.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/webapp_create_post.py b/webapp_create_post.py index 47e432273..b764f262d 100644 --- a/webapp_create_post.py +++ b/webapp_create_post.py @@ -122,12 +122,12 @@ def _html_new_post_drop_down(scope_icon: str, scope_description: str, 'icons/scope_blog.png"/>' + \ translate['Blog'] + '
' + \ translate['Publicly visible post'] + '\n' - drop_down_content += \ - '
  • ' + \ - translate['Unlisted'] + '
    ' + \ - translate['Not on public timeline'] + '
  • \n' + drop_down_content += \ + '
  • ' + \ + translate['Unlisted'] + '
    ' + \ + translate['Not on public timeline'] + '
  • \n' drop_down_content += \ '
  • ' + \ From 998fa21b138212e648e9f832b5180b013ddae893 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 14:09:36 +0000 Subject: [PATCH 19/31] Handle replying to unlisted posts --- daemon.py | 20 ++++++++++++++++++++ utils.py | 26 ++++++++++++++++++++++++++ webapp_create_post.py | 2 +- webapp_post.py | 18 +++++++++++++++--- 4 files changed, 62 insertions(+), 4 deletions(-) diff --git a/daemon.py b/daemon.py index 1ab471da6..7d032b70a 100644 --- a/daemon.py +++ b/daemon.py @@ -16799,6 +16799,26 @@ class PubServer(BaseHTTPRequestHandler): if self.server.debug: print('DEBUG: replyto path ' + self.path) + # unlisted reply + if '?replyunlisted=' in self.path: + in_reply_to_url = self.path.split('?replyunlisted=')[1] + if '?' in in_reply_to_url: + mentions_list = in_reply_to_url.split('?') + for m in mentions_list: + if m.startswith('mention='): + reply_handle = m.replace('mention=', '') + if reply_handle not in reply_to_list: + reply_to_list.append(reply_handle) + if m.startswith('page='): + reply_page_str = m.replace('page=', '') + if reply_page_str.isdigit(): + reply_page_number = int(reply_page_str) + in_reply_to_url = mentions_list[0] + self.path = \ + self.path.split('?replyunlisted=')[0] + '/newunlisted' + if self.server.debug: + print('DEBUG: replyunlisted path ' + self.path) + # reply to followers if '?replyfollowers=' in self.path: in_reply_to_url = self.path.split('?replyfollowers=')[1] diff --git a/utils.py b/utils.py index 7a866dde2..7f46917b8 100644 --- a/utils.py +++ b/utils.py @@ -2023,6 +2023,32 @@ def is_public_post(post_json_object: {}) -> bool: return False +def is_unlisted_post(post_json_object: {}) -> bool: + """Returns true if the given post is unlisted + """ + if not post_json_object.get('type'): + return False + if post_json_object['type'] != 'Create': + return False + if not has_object_dict(post_json_object): + return False + if not post_json_object['object'].get('to'): + return False + if not post_json_object['object'].get('cc'): + return False + has_followers = False + for recipient in post_json_object['object']['to']: + if recipient.endswith('/followers'): + has_followers = True + break + if not has_followers: + return False + for recipient in post_json_object['object']['cc']: + if recipient.endswith('#Public'): + return True + return False + + def copytree(src: str, dst: str, symlinks: str = False, ignore: bool = None): """Copy a directory """ diff --git a/webapp_create_post.py b/webapp_create_post.py index b764f262d..4dff64b43 100644 --- a/webapp_create_post.py +++ b/webapp_create_post.py @@ -692,7 +692,7 @@ def html_new_post(css_cache: {}, media_instance: bool, translate: {}, if inReplyTo: dropdown_new_post_suffix += '?replyto=' + inReplyTo dropdown_new_blog_suffix += '?replyto=' + inReplyTo - dropdown_unlisted_suffix += '?replyto=' + inReplyTo + dropdown_unlisted_suffix += '?replyunlisted=' + inReplyTo dropdown_followers_suffix += '?replyfollowers=' + inReplyTo if reply_is_chat: dropdown_dm_suffix += '?replychat=' + inReplyTo diff --git a/webapp_post.py b/webapp_post.py index 59f40e1b8..5100eeb09 100644 --- a/webapp_post.py +++ b/webapp_post.py @@ -57,6 +57,7 @@ from utils import get_nickname_from_actor from utils import get_domain_from_actor from utils import acct_dir from utils import local_actor_url +from utils import is_unlisted_post from content import limit_repeated_words from content import replace_emoji_from_tags from content import html_replace_quote_marks @@ -389,7 +390,7 @@ def _get_avatar_image_html(showAvatarOptions: bool, def _get_reply_icon_html(base_dir: str, nickname: str, domain: str, - is_public_repeat: bool, + is_public_reply: bool, is_unlisted_reply: bool, show_icons: bool, comments_enabled: bool, post_json_object: {}, page_number_param: str, translate: {}, system_language: str, @@ -439,13 +440,20 @@ def _get_reply_icon_html(base_dir: str, nickname: str, domain: str, conversation_str = '' if conversation_id: conversation_str = '?conversationId=' + conversation_id - if is_public_repeat: + if is_public_reply: reply_str += \ ' \n' + elif is_unlisted_reply: + reply_str += \ + ' \n' else: if is_dm(post_json_object): reply_type = 'replydm' @@ -1698,10 +1706,14 @@ def individual_post_as_html(signing_priv_key_pem: str, conversation_id = post_json_object['object']['conversation'] public_reply = False + unlisted_reply = False if is_public_post(post_json_object): public_reply = True + if is_unlisted_post(post_json_object): + public_reply = False + unlisted_reply = True reply_str = _get_reply_icon_html(base_dir, nickname, domain, - public_reply, + public_reply, unlisted_reply, show_icons, comments_enabled, post_json_object, page_number_param, translate, system_language, From 8f8a3098fd064f37e06b440b0720b94cbcaaff93 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 14:22:18 +0000 Subject: [PATCH 20/31] Get users path for delete --- daemon.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/daemon.py b/daemon.py index 7d032b70a..bbeb8ee57 100644 --- a/daemon.py +++ b/daemon.py @@ -9821,6 +9821,9 @@ class PubServer(BaseHTTPRequestHandler): if '?' in timeline_str: timeline_str = timeline_str.split('?')[0] users_path = path.split('?delete=')[0] + if not users_path.startswith('/users/'): + if '/users/' in users_path: + users_path = '/users/' + users_path.split('/users/')[1] actor = \ http_prefix + '://' + domain_full + users_path if self.server.allow_deletion or \ From 02b0df7bc73fc9dc04a9cf31603081e9840106dd Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 14:24:36 +0000 Subject: [PATCH 21/31] Users path for delete --- daemon.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/daemon.py b/daemon.py index bbeb8ee57..7d032b70a 100644 --- a/daemon.py +++ b/daemon.py @@ -9821,9 +9821,6 @@ class PubServer(BaseHTTPRequestHandler): if '?' in timeline_str: timeline_str = timeline_str.split('?')[0] users_path = path.split('?delete=')[0] - if not users_path.startswith('/users/'): - if '/users/' in users_path: - users_path = '/users/' + users_path.split('/users/')[1] actor = \ http_prefix + '://' + domain_full + users_path if self.server.allow_deletion or \ From ec3b9cacf8af603988521b300c1727298015537a Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 14:29:26 +0000 Subject: [PATCH 22/31] Url for deletion --- daemon.py | 1 + 1 file changed, 1 insertion(+) diff --git a/daemon.py b/daemon.py index 7d032b70a..ca6e41af3 100644 --- a/daemon.py +++ b/daemon.py @@ -9864,6 +9864,7 @@ class PubServer(BaseHTTPRequestHandler): self._404() return + delete_url = delete_url.replace('/', '#') delete_str = \ html_confirm_delete(self.server.css_cache, self.server.recent_posts_cache, From 8c328849d4595dd5c2296570162736fba1d1a562 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 14:50:47 +0000 Subject: [PATCH 23/31] Debug --- daemon.py | 1 - utils.py | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/daemon.py b/daemon.py index ca6e41af3..7d032b70a 100644 --- a/daemon.py +++ b/daemon.py @@ -9864,7 +9864,6 @@ class PubServer(BaseHTTPRequestHandler): self._404() return - delete_url = delete_url.replace('/', '#') delete_str = \ html_confirm_delete(self.server.css_cache, self.server.recent_posts_cache, diff --git a/utils.py b/utils.py index 7f46917b8..f592d05ac 100644 --- a/utils.py +++ b/utils.py @@ -1383,6 +1383,8 @@ def locate_post(base_dir: str, nickname: str, domain: str, account_dir = acct_dir(base_dir, nickname, domain) + '/' for box_name in boxes: post_filename = account_dir + box_name + '/' + post_url + if box_name == 'outbox': + print('locate_post filename: ' + post_filename) if os.path.isfile(post_filename): return post_filename From d06c69e656cb2dd9283a83245b2e56508e49160a Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 15:23:15 +0000 Subject: [PATCH 24/31] Debug --- daemon.py | 1 + utils.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/daemon.py b/daemon.py index 7d032b70a..2f18fac7a 100644 --- a/daemon.py +++ b/daemon.py @@ -4337,6 +4337,7 @@ class PubServer(BaseHTTPRequestHandler): remove_post_confirm_params.split('messageId=')[1] if '&' in remove_message_id: remove_message_id = remove_message_id.split('&')[0] + print('remove_message_id: ' + remove_message_id) if 'pageNumber=' in remove_post_confirm_params: page_number_str = \ remove_post_confirm_params.split('pageNumber=')[1] diff --git a/utils.py b/utils.py index f592d05ac..7f46917b8 100644 --- a/utils.py +++ b/utils.py @@ -1383,8 +1383,6 @@ def locate_post(base_dir: str, nickname: str, domain: str, account_dir = acct_dir(base_dir, nickname, domain) + '/' for box_name in boxes: post_filename = account_dir + box_name + '/' + post_url - if box_name == 'outbox': - print('locate_post filename: ' + post_filename) if os.path.isfile(post_filename): return post_filename From 2d474486a88a73c43a95b66e5b5c238c50ef3124 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 15:24:01 +0000 Subject: [PATCH 25/31] Debug --- epicyon.py | 1 + 1 file changed, 1 insertion(+) diff --git a/epicyon.py b/epicyon.py index 43af99c75..37580ccf1 100644 --- a/epicyon.py +++ b/epicyon.py @@ -3407,6 +3407,7 @@ if args.defaultCurrency: print('Default currency set to ' + args.defaultCurrency) if __name__ == "__main__": + print('allowdeletion: ' + str(allowdeletion)) run_daemon(crawlers_allowed, args.dyslexic_font, content_license_url, From ae7bfd78fb2647e2caa926f5476bdbf917f1b643 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 15:25:57 +0000 Subject: [PATCH 26/31] Debug --- epicyon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/epicyon.py b/epicyon.py index 37580ccf1..57a62f784 100644 --- a/epicyon.py +++ b/epicyon.py @@ -3407,7 +3407,7 @@ if args.defaultCurrency: print('Default currency set to ' + args.defaultCurrency) if __name__ == "__main__": - print('allowdeletion: ' + str(allowdeletion)) + print('allowdeletion: ' + str(args.allowdeletion)) run_daemon(crawlers_allowed, args.dyslexic_font, content_license_url, From b9ea66bf5a36d26d6786fa301225d3c59b39acd2 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 17:19:09 +0000 Subject: [PATCH 27/31] Inbox arrival debug --- daemon.py | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/daemon.py b/daemon.py index 2f18fac7a..4586bb172 100644 --- a/daemon.py +++ b/daemon.py @@ -1582,14 +1582,16 @@ class PubServer(BaseHTTPRequestHandler): """ if self.server.restart_inbox_queue_in_progress: self._503() - print('Message arrived but currently restarting inbox queue') + print('INBOX: ' + + 'message arrived but currently restarting inbox queue') self.server.postreq_busy = False return 2 # check that the incoming message has a fully recognized # linked data context if not has_valid_context(message_json): - print('Message arriving at inbox queue has no valid context') + print('INBOX: ' + + 'message arriving at inbox queue has no valid context') self._400() self.server.postreq_busy = False return 3 @@ -1597,13 +1599,15 @@ class PubServer(BaseHTTPRequestHandler): # check for blocked domains so that they can be rejected early message_domain = None if not has_actor(message_json, self.server.debug): - print('Message arriving at inbox queue has no actor') + print('INBOX: message arriving at inbox queue has no actor') self._400() self.server.postreq_busy = False return 3 # actor should be a string if not isinstance(message_json['actor'], str): + print('INBOX: ' + + 'actor should be a string ' + str(message_json['actor'])) self._400() self.server.postreq_busy = False return 3 @@ -1614,6 +1618,9 @@ class PubServer(BaseHTTPRequestHandler): if not message_json.get(check_field): continue if not isinstance(message_json[check_field], str): + print('INBOX: ' + + 'id, type and published fields should be strings ' + + check_field + ' ' + str(message_json[check_field])) self._400() self.server.postreq_busy = False return 3 @@ -1624,6 +1631,8 @@ class PubServer(BaseHTTPRequestHandler): if not message_json.get(check_field): continue if not isinstance(message_json[check_field], list): + print('INBOX: To and Cc fields should be strings ' + + check_field + ' ' + str(message_json[check_field])) self._400() self.server.postreq_busy = False return 3 @@ -1637,6 +1646,9 @@ class PubServer(BaseHTTPRequestHandler): if not message_json['object'].get(check_field): continue if not isinstance(message_json['object'][check_field], str): + print('INBOX: ' + + check_field + ' should be a string ' + + str(message_json[check_field])) self._400() self.server.postreq_busy = False return 3 @@ -1646,6 +1658,9 @@ class PubServer(BaseHTTPRequestHandler): if not message_json['object'].get(check_field): continue if not isinstance(message_json['object'][check_field], list): + print('INBOX: ' + + check_field + ' should be a list ' + + str(message_json[check_field])) self._400() self.server.postreq_busy = False return 3 @@ -1653,7 +1668,7 @@ class PubServer(BaseHTTPRequestHandler): # actor should look like a url if '://' not in message_json['actor'] or \ '.' not in message_json['actor']: - print('POST actor does not look like a url ' + + print('INBOX: POST actor does not look like a url ' + message_json['actor']) self._400() self.server.postreq_busy = False @@ -1664,7 +1679,7 @@ class PubServer(BaseHTTPRequestHandler): local_network_pattern_list = get_local_network_addresses() for local_network_pattern in local_network_pattern_list: if local_network_pattern in message_json['actor']: - print('POST actor contains local network address ' + + print('INBOX: POST actor contains local network address ' + message_json['actor']) self._400() self.server.postreq_busy = False @@ -1681,7 +1696,7 @@ class PubServer(BaseHTTPRequestHandler): if is_blocked_domain(self.server.base_dir, message_domain, self.server.blocked_cache): - print('POST from blocked domain ' + message_domain) + print('INBOX: POST from blocked domain ' + message_domain) self._400() self.server.postreq_busy = False return 3 @@ -1689,10 +1704,11 @@ class PubServer(BaseHTTPRequestHandler): # if the inbox queue is full then return a busy code if len(self.server.inbox_queue) >= self.server.max_queue_length: if message_domain: - print('Queue: Inbox queue is full. Incoming post from ' + + print('INBOX: Queue: ' + + 'Inbox queue is full. Incoming post from ' + message_json['actor']) else: - print('Queue: Inbox queue is full') + print('INBOX: Queue: Inbox queue is full') self._503() clear_queue_items(self.server.base_dir, self.server.inbox_queue) if not self.server.restart_inbox_queue_in_progress: @@ -1735,7 +1751,7 @@ class PubServer(BaseHTTPRequestHandler): message_bytes_decoded = message_bytes.decode('utf-8') if contains_invalid_local_links(message_bytes_decoded): - print('WARN: post contains invalid local links ' + + print('INBOX: post contains invalid local links ' + str(original_message_json)) return 5 @@ -4358,8 +4374,10 @@ class PubServer(BaseHTTPRequestHandler): if '/statuses/' in remove_message_id: remove_post_actor = remove_message_id.split('/statuses/')[0] if origin_path_str in remove_post_actor: - toList = ['https://www.w3.org/ns/activitystreams#Public', - remove_post_actor] + toList = [ + 'https://www.w3.org/ns/activitystreams#Public', + remove_post_actor + ] delete_json = { "@context": "https://www.w3.org/ns/activitystreams", 'actor': remove_post_actor, From 3f1e5f38fb0da9f8ebdb373dec04fd2b3baa4998 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 17:25:25 +0000 Subject: [PATCH 28/31] Inbox debug --- daemon.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/daemon.py b/daemon.py index 4586bb172..7e569912b 100644 --- a/daemon.py +++ b/daemon.py @@ -1577,9 +1577,11 @@ class PubServer(BaseHTTPRequestHandler): return True def _update_inbox_queue(self, nickname: str, message_json: {}, - message_bytes: str) -> int: + message_bytes: str, debug: bool) -> int: """Update the inbox queue """ + if debug: + if self.server.restart_inbox_queue_in_progress: self._503() print('INBOX: ' + @@ -19869,27 +19871,28 @@ class PubServer(BaseHTTPRequestHandler): self.server.debug) if self.server.debug: - print('DEBUG: POST saving to inbox queue') + print('INBOX: POST saving to inbox queue') if users_in_path: path_users_section = self.path.split('/users/')[1] if '/' not in path_users_section: if self.server.debug: - print('DEBUG: This is not a users endpoint') + print('INBOX: This is not a users endpoint') else: self.post_to_nickname = path_users_section.split('/')[0] if self.post_to_nickname: queue_status = \ self._update_inbox_queue(self.post_to_nickname, - message_json, message_bytes) + message_json, message_bytes, + self.server.debug) if queue_status >= 0 and queue_status <= 3: self.server.postreq_busy = False return if self.server.debug: - print('_update_inbox_queue exited ' + + print('INBOX: _update_inbox_queue exited ' + 'without doing anything') else: if self.server.debug: - print('self.post_to_nickname is None') + print('INBOX: self.post_to_nickname is None') self.send_response(403) self.end_headers() self.server.postreq_busy = False @@ -19897,10 +19900,11 @@ class PubServer(BaseHTTPRequestHandler): else: if self.path == '/sharedInbox' or self.path == '/inbox': if self.server.debug: - print('DEBUG: POST to shared inbox') + print('INBOX: POST to shared inbox') queue_status = \ self._update_inbox_queue('inbox', message_json, - message_bytes) + message_bytes, + self.server.debug) if queue_status >= 0 and queue_status <= 3: self.server.postreq_busy = False return From fe798e8d444067e8f9481cf89abab2afcec57282 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 17:33:22 +0000 Subject: [PATCH 29/31] Inbox debug --- daemon.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/daemon.py b/daemon.py index 7e569912b..bebd09446 100644 --- a/daemon.py +++ b/daemon.py @@ -1581,7 +1581,7 @@ class PubServer(BaseHTTPRequestHandler): """Update the inbox queue """ if debug: - + print('INBOX: checking inbox queue restart') if self.server.restart_inbox_queue_in_progress: self._503() print('INBOX: ' + @@ -1591,6 +1591,8 @@ class PubServer(BaseHTTPRequestHandler): # check that the incoming message has a fully recognized # linked data context + if debug: + print('INBOX: checking valid context') if not has_valid_context(message_json): print('INBOX: ' + 'message arriving at inbox queue has no valid context') @@ -1599,6 +1601,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # check for blocked domains so that they can be rejected early + if debug: + print('INBOX: checking for actor') message_domain = None if not has_actor(message_json, self.server.debug): print('INBOX: message arriving at inbox queue has no actor') @@ -1607,6 +1611,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # actor should be a string + if debug: + print('INBOX: checking that actor is string') if not isinstance(message_json['actor'], str): print('INBOX: ' + 'actor should be a string ' + str(message_json['actor'])) @@ -1615,6 +1621,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # check that some additional fields are strings + if debug: + print('INBOX: checking fields 1') string_fields = ('id', 'type', 'published') for check_field in string_fields: if not message_json.get(check_field): @@ -1628,6 +1636,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # check that to/cc fields are lists + if debug: + print('INBOX: checking to and cc fields') list_fields = ('to', 'cc') for check_field in list_fields: if not message_json.get(check_field): @@ -1640,6 +1650,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 if has_object_dict(message_json): + if debug: + print('INBOX: checking object fields') string_fields = ( 'id', 'actor', 'type', 'content', 'published', 'summary', 'url', 'attributedTo' @@ -1655,6 +1667,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.postreq_busy = False return 3 # check that some fields are lists + if debug: + print('INBOX: checking object to and cc fields') list_fields = ('to', 'cc', 'attachment') for check_field in list_fields: if not message_json['object'].get(check_field): @@ -1668,6 +1682,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # actor should look like a url + if debug: + print('INBOX: checking that actor looks like a url') if '://' not in message_json['actor'] or \ '.' not in message_json['actor']: print('INBOX: POST actor does not look like a url ' + @@ -1677,6 +1693,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # sent by an actor on a local network address? + if debug: + print('INBOX: checking for local network access') if not self.server.allow_local_network_access: local_network_pattern_list = get_local_network_addresses() for local_network_pattern in local_network_pattern_list: @@ -1696,6 +1714,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.blocked_cache_last_updated, self.server.blocked_cache_update_secs) + if debug: + print('INBOX: checking for blocked domain ' + message_domain) if is_blocked_domain(self.server.base_dir, message_domain, self.server.blocked_cache): print('INBOX: POST from blocked domain ' + message_domain) @@ -1704,6 +1724,8 @@ class PubServer(BaseHTTPRequestHandler): return 3 # if the inbox queue is full then return a busy code + if debug: + print('INBOX: checking for full queue') if len(self.server.inbox_queue) >= self.server.max_queue_length: if message_domain: print('INBOX: Queue: ' + @@ -1752,6 +1774,8 @@ class PubServer(BaseHTTPRequestHandler): # save the json for later queue processing message_bytes_decoded = message_bytes.decode('utf-8') + if debug: + print('INBOX: checking for invalid links') if contains_invalid_local_links(message_bytes_decoded): print('INBOX: post contains invalid local links ' + str(original_message_json)) @@ -1765,6 +1789,8 @@ class PubServer(BaseHTTPRequestHandler): mitm = self._detect_mitm() + if debug: + print('INBOX: saving post to queue') queue_filename = \ save_post_to_inbox_queue(self.server.base_dir, self.server.http_prefix, From 075d4e070efd8821335ac3f493352ccc9fb1360b Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 17:38:44 +0000 Subject: [PATCH 30/31] Remove post debug --- daemon.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/daemon.py b/daemon.py index bebd09446..666ee59cb 100644 --- a/daemon.py +++ b/daemon.py @@ -4401,6 +4401,8 @@ class PubServer(BaseHTTPRequestHandler): month_str = month_str.split('&')[0] if '/statuses/' in remove_message_id: remove_post_actor = remove_message_id.split('/statuses/')[0] + print('origin_path_str: ' + origin_path_str) + print('remove_post_actor: ' + remove_post_actor) if origin_path_str in remove_post_actor: toList = [ 'https://www.w3.org/ns/activitystreams#Public', From eebc60170d5ec9724624ce560f24e3a22f8c4c57 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sat, 12 Mar 2022 18:28:07 +0000 Subject: [PATCH 31/31] Fix missing outbox parameters --- daemon.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/daemon.py b/daemon.py index 666ee59cb..14f70c58a 100644 --- a/daemon.py +++ b/daemon.py @@ -1555,7 +1555,8 @@ class PubServer(BaseHTTPRequestHandler): pass return index - def _post_to_outbox_thread(self, message_json: {}) -> bool: + def _post_to_outbox_thread(self, message_json: {}, + curr_session, proxy_type: str) -> bool: """Creates a thread to send a post """ account_outbox_thread_name = self.post_to_nickname @@ -1570,7 +1571,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.outboxThread[account_outbox_thread_name][index] = \ thread_with_trace(target=self._post_to_outbox, args=(message_json.copy(), - self.server.project_version, None), + self.server.project_version, None, + curr_session, proxy_type), daemon=True) print('Starting outbox thread') self.server.outboxThread[account_outbox_thread_name][index].start() @@ -3176,7 +3178,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, port: int, onion_domain: str, i2p_domain: str, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Confirm to unfollow """ users_path = path.split('/unfollowconfirm')[0] @@ -3251,7 +3254,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.domain, following_nickname, following_domain_full, self.server.debug, group_account) - self._post_to_outbox_thread(unfollow_json) + self._post_to_outbox_thread(unfollow_json, + curr_session, proxy_type) if calling_domain.endswith('.onion') and onion_domain: origin_path_str = 'http://' + onion_domain + users_path @@ -4343,7 +4347,8 @@ class PubServer(BaseHTTPRequestHandler): base_dir: str, http_prefix: str, domain: str, domain_full: str, onion_domain: str, i2p_domain: str, - debug: bool) -> None: + debug: bool, + curr_session, proxy_type: str) -> None: """Endpoint for removing posts after confirmation """ page_number = 1 @@ -4428,7 +4433,8 @@ class PubServer(BaseHTTPRequestHandler): domain, year_int, month_int, remove_message_id) - self._post_to_outbox_thread(delete_json) + self._post_to_outbox_thread(delete_json, + curr_session, proxy_type) if calling_domain.endswith('.onion') and onion_domain: origin_path_str = 'http://' + onion_domain + users_path elif (calling_domain.endswith('.i2p') and i2p_domain): @@ -19374,7 +19380,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.domain_full, self.server.onion_domain, self.server.i2p_domain, - self.server.debug) + self.server.debug, + curr_session, proxy_type) self.server.postreq_busy = False return @@ -19414,7 +19421,8 @@ class PubServer(BaseHTTPRequestHandler): self.server.port, self.server.onion_domain, self.server.i2p_domain, - self.server.debug) + self.server.debug, + curr_session, proxy_type) self.server.postreq_busy = False return