epicyon/inbox.py

3782 lines
160 KiB
Python
Raw Normal View History

2020-04-03 16:27:34 +00:00
__filename__ = "inbox.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
2024-01-21 19:01:20 +00:00
__version__ = "1.5.0"
2020-04-03 16:27:34 +00:00
__maintainer__ = "Bob Mottram"
2021-09-10 16:14:50 +00:00
__email__ = "bob@libreserver.org"
2020-04-03 16:27:34 +00:00
__status__ = "Production"
2021-06-15 15:08:12 +00:00
__module_group__ = "Timeline"
2019-06-28 21:59:54 +00:00
import json
import os
2019-06-29 10:08:59 +00:00
import datetime
2019-07-04 12:23:53 +00:00
import time
import random
from shutil import copyfile
2021-12-29 21:55:09 +00:00
from linked_data_sig import verify_json_signature
from flags import is_system_account
from flags import is_blog_post
from flags import is_recent_post
from flags import is_reply
from flags import is_group_account
from flags import has_group_type
from flags import is_quote_toot
from flags import url_permitted
2024-06-21 20:59:12 +00:00
from utils import harmless_markup
2024-04-20 09:59:56 +00:00
from utils import quote_toots_allowed
2024-02-06 18:35:04 +00:00
from utils import lines_in_file
2023-11-20 22:27:58 +00:00
from utils import date_epoch
from utils import date_utcnow
2023-08-14 18:46:27 +00:00
from utils import contains_statuses
2023-08-03 17:31:47 +00:00
from utils import get_actor_from_post_id
2022-12-18 15:29:54 +00:00
from utils import acct_handle_dir
2022-06-10 11:43:33 +00:00
from utils import text_in_file
from utils import get_media_descriptions_from_post
2022-06-02 17:47:56 +00:00
from utils import get_summary_from_post
2022-02-25 19:12:40 +00:00
from utils import get_account_timezone
2021-12-27 18:28:26 +00:00
from utils import domain_permitted
2021-12-28 10:25:50 +00:00
from utils import get_reply_interval_hours
2021-12-28 12:15:46 +00:00
from utils import can_reply_to
2021-12-26 11:29:40 +00:00
from utils import get_base_content_from_post
2021-12-26 12:02:29 +00:00
from utils import acct_dir
2021-12-26 18:17:37 +00:00
from utils import remove_domain_port
2021-12-26 18:14:21 +00:00
from utils import get_port_from_domain
2021-12-26 10:57:03 +00:00
from utils import has_object_dict
2021-12-26 19:09:04 +00:00
from utils import dm_allowed_from_domain
2021-12-26 14:08:58 +00:00
from utils import get_config_param
2021-12-26 12:19:00 +00:00
from utils import has_users_path
2021-12-26 12:45:03 +00:00
from utils import get_full_domain
2021-12-27 11:20:57 +00:00
from utils import remove_id_ending
2021-12-27 18:00:51 +00:00
from utils import create_inbox_queue_dir
2021-12-27 17:42:35 +00:00
from utils import get_status_number
2021-12-27 19:05:25 +00:00
from utils import get_domain_from_actor
2021-12-27 22:19:18 +00:00
from utils import get_nickname_from_actor
2021-12-26 20:36:08 +00:00
from utils import locate_post
2021-12-28 14:55:45 +00:00
from utils import delete_post
2021-12-26 15:13:34 +00:00
from utils import load_json
2021-12-26 14:47:21 +00:00
from utils import save_json
2021-12-26 10:19:59 +00:00
from utils import local_actor_url
from utils import get_attributed_to
from utils import get_reply_to
2024-01-09 16:59:23 +00:00
from utils import get_actor_from_post
2024-05-12 12:35:26 +00:00
from utils import data_dir
2024-08-31 22:05:31 +00:00
from utils import is_dm
from utils import has_actor
2021-12-29 21:55:09 +00:00
from httpsig import get_digest_algorithm_from_headers
from httpsig import verify_post_headers
2021-12-28 16:56:57 +00:00
from session import create_session
2021-12-28 20:32:11 +00:00
from follow import is_following_actor
2021-12-29 21:55:09 +00:00
from follow import get_followers_of_actor
from follow import is_follower_of_person
from follow import followed_account_accepts
from follow import store_follow_request
from follow import no_of_follow_requests
from follow import get_no_of_followers
from follow import follow_approval_required
2019-07-04 14:36:29 +00:00
from pprint import pprint
2024-08-31 09:31:08 +00:00
from cache import cache_svg_images
2021-12-29 21:55:09 +00:00
from cache import get_person_pub_key
from acceptreject import receive_accept_reject
from blocking import is_blocked
2023-04-29 10:14:21 +00:00
from blocking import is_blocked_nickname
2021-12-28 21:55:38 +00:00
from blocking import is_blocked_domain
2021-12-25 18:38:19 +00:00
from blocking import broch_modeLapses
2021-12-29 21:55:09 +00:00
from filters import is_filtered
from httpsig import message_content_digest
from posts import outbox_message_create_wrap
from posts import convert_post_content_to_html
2021-12-29 21:55:09 +00:00
from posts import edited_post_filename
2021-12-28 18:13:52 +00:00
from posts import save_post_to_box
2021-12-29 21:55:09 +00:00
from posts import is_create_inside_announce
2021-12-28 19:33:29 +00:00
from posts import create_direct_message_post
2021-12-29 21:55:09 +00:00
from posts import is_muted_conv
from posts import is_image_media
from posts import send_signed_json
from posts import send_to_followers_thread
2024-08-31 22:05:31 +00:00
from posts import post_allow_comments
from posts import valid_post_content
2021-12-29 21:55:09 +00:00
from webapp_post import individual_post_as_html
from question import is_vote
2021-12-28 21:36:27 +00:00
from media import replace_you_tube
from media import replace_twitter
2021-12-29 21:55:09 +00:00
from git import receive_git_patch
from followingCalendar import receiving_calendar_events
from happening import save_event_post
from context import has_valid_context
from speaker import update_speaker
from announce import create_announce
from notifyOnPost import notify_when_person_posts
from conversation import update_conversation
from webapp_hashtagswarm import store_hash_tags
2021-12-29 21:55:09 +00:00
from person import valid_sending_actor
2022-04-23 18:26:54 +00:00
from fitnessFunctions import fitness_performance
from content import reject_twitter_summary
2022-07-05 14:40:26 +00:00
from content import load_dogwhistles
2022-07-28 09:59:18 +00:00
from threads import begin_thread
from reading import store_book_events
2024-08-31 22:05:31 +00:00
from inbox_receive import inbox_update_index
from inbox_receive import receive_edit_to_post
from inbox_receive import receive_like
from inbox_receive import receive_reaction
from inbox_receive import receive_zot_reaction
from inbox_receive import receive_bookmark
from inbox_receive import receive_announce
from inbox_receive import receive_delete
from inbox_receive import receive_question_vote
from inbox_receive import receive_move_activity
from inbox_receive import receive_update_activity
from inbox_receive_undo import receive_undo_like
from inbox_receive_undo import receive_undo_reaction
from inbox_receive_undo import receive_undo_bookmark
from inbox_receive_undo import receive_undo_announce
from inbox_receive_undo import receive_undo
2021-12-29 21:55:09 +00:00
def _store_last_post_id(base_dir: str, nickname: str, domain: str,
post_json_object: {}) -> None:
"""Stores the id of the last post made by an actor
2021-10-18 19:42:31 +00:00
When a new post arrives this allows it to be compared against the last
to see if it is an edited post.
It would be great if edited posts contained a back reference id to the
source but we don't live in that ideal world.
"""
2021-12-26 19:47:06 +00:00
actor = post_id = None
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('attributedTo'):
actor_str = \
get_attributed_to(post_json_object['object']['attributedTo'])
if actor_str:
actor = actor_str
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['object']['id'])
if not actor:
2024-01-09 16:59:23 +00:00
actor = get_actor_from_post(post_json_object)
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['id'])
if not actor:
return
2023-04-30 10:12:52 +00:00
account_dir = acct_dir(base_dir, nickname, domain)
lastpost_dir = account_dir + '/lastpost'
2022-01-02 19:57:50 +00:00
if not os.path.isdir(lastpost_dir):
2023-04-30 10:12:52 +00:00
if os.path.isdir(account_dir):
os.mkdir(lastpost_dir)
2022-01-02 19:57:50 +00:00
actor_filename = lastpost_dir + '/' + actor.replace('/', '#')
try:
2022-06-09 14:46:30 +00:00
with open(actor_filename, 'w+', encoding='utf-8') as fp_actor:
2022-01-02 19:57:50 +00:00
fp_actor.write(post_id)
2021-11-25 21:18:53 +00:00
except OSError:
2022-01-02 19:57:50 +00:00
print('EX: Unable to write last post id to ' + actor_filename)
2021-12-29 21:55:09 +00:00
def _inbox_store_post_to_html_cache(recent_posts_cache: {},
max_recent_posts: int,
translate: {},
base_dir: str, http_prefix: str,
session, cached_webfingers: {},
person_cache: {},
nickname: str, domain: str, port: int,
post_json_object: {},
allow_deletion: bool, boxname: str,
show_published_date_only: bool,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int,
signing_priv_key_pem: str,
cw_lists: {},
2022-02-25 19:12:40 +00:00
lists_enabled: str,
timezone: str,
2022-03-24 13:14:41 +00:00
mitm: bool,
2022-07-05 14:40:26 +00:00
bold_reading: bool,
dogwhistles: {},
2023-01-13 15:04:48 +00:00
min_images_for_accounts: [],
buy_sites: {},
auto_cw_cache: {}) -> None:
"""Converts the json post into html and stores it in a cache
This enables the post to be quickly displayed later
"""
2022-01-02 19:57:50 +00:00
page_number = -999
avatar_url = None
if boxname != 'outbox':
2020-10-08 12:28:02 +00:00
boxname = 'inbox'
2020-12-18 18:12:33 +00:00
2022-01-02 19:57:50 +00:00
not_dm = not is_dm(post_json_object)
2021-12-26 14:08:58 +00:00
yt_replace_domain = get_config_param(base_dir, 'youtubedomain')
twitter_replacement_domain = get_config_param(base_dir, 'twitterdomain')
minimize_all_images = False
if nickname in min_images_for_accounts:
minimize_all_images = True
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem,
True, recent_posts_cache, max_recent_posts,
2022-01-02 19:57:50 +00:00
translate, page_number,
2021-12-29 21:55:09 +00:00
base_dir, session, cached_webfingers,
person_cache,
nickname, domain, port, post_json_object,
2022-01-02 19:57:50 +00:00
avatar_url, True, allow_deletion,
2021-12-29 21:55:09 +00:00
http_prefix, __version__, boxname,
yt_replace_domain, twitter_replacement_domain,
show_published_date_only,
peertube_instances, allow_local_network_access,
theme_name, system_language, max_like_count,
2022-01-02 19:57:50 +00:00
not_dm, True, True, False, True, False,
2022-03-24 13:14:41 +00:00
cw_lists, lists_enabled, timezone, mitm,
bold_reading, dogwhistles, minimize_all_images,
None, buy_sites, auto_cw_cache)
2021-12-29 21:55:09 +00:00
def valid_inbox(base_dir: str, nickname: str, domain: str) -> bool:
2019-07-18 11:35:48 +00:00
"""Checks whether files were correctly saved to the inbox
"""
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2022-01-02 19:57:50 +00:00
inbox_dir = acct_dir(base_dir, nickname, domain) + '/inbox'
if not os.path.isdir(inbox_dir):
return True
2022-01-02 19:57:50 +00:00
for subdir, _, files in os.walk(inbox_dir):
for fname in files:
filename = os.path.join(subdir, fname)
if not os.path.isfile(filename):
2020-04-03 16:27:34 +00:00
print('filename: ' + filename)
return False
2022-06-10 13:01:39 +00:00
if text_in_file('postNickname', filename):
2020-04-03 16:27:34 +00:00
print('queue file incorrectly saved to ' + filename)
2019-07-18 11:35:48 +00:00
return False
2020-12-13 22:13:45 +00:00
break
2020-03-22 21:16:02 +00:00
return True
2019-07-18 11:35:48 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def valid_inbox_filenames(base_dir: str, nickname: str, domain: str,
2022-01-02 19:57:50 +00:00
expected_domain: str, expected_port: int) -> bool:
2019-07-18 11:35:48 +00:00
"""Used by unit tests to check that the port number gets appended to
domain names within saved post filenames
"""
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2022-01-02 19:57:50 +00:00
inbox_dir = acct_dir(base_dir, nickname, domain) + '/inbox'
if not os.path.isdir(inbox_dir):
print('Not an inbox directory: ' + inbox_dir)
2019-07-18 11:35:48 +00:00
return True
2022-01-02 19:57:50 +00:00
expected_str = expected_domain + ':' + str(expected_port)
expected_found = False
2021-08-20 11:22:20 +00:00
ctr = 0
2022-01-02 19:57:50 +00:00
for subdir, _, files in os.walk(inbox_dir):
for fname in files:
filename = os.path.join(subdir, fname)
2021-08-20 11:22:20 +00:00
ctr += 1
2019-07-18 11:35:48 +00:00
if not os.path.isfile(filename):
2020-04-03 16:27:34 +00:00
print('filename: ' + filename)
2019-07-18 11:35:48 +00:00
return False
2022-01-02 19:57:50 +00:00
if expected_str in filename:
expected_found = True
2020-12-13 22:13:45 +00:00
break
2021-08-20 11:22:20 +00:00
if ctr == 0:
return True
2022-01-02 19:57:50 +00:00
if not expected_found:
print('Expected file was not found: ' + expected_str)
for subdir, _, files in os.walk(inbox_dir):
for fname in files:
filename = os.path.join(subdir, fname)
2021-08-20 11:22:20 +00:00
print(filename)
break
2021-08-01 13:25:11 +00:00
return False
2020-03-22 21:16:02 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def inbox_message_has_params(message_json: {}) -> bool:
2019-07-02 15:07:27 +00:00
"""Checks whether an incoming message contains expected parameters
"""
2022-01-02 19:57:50 +00:00
expected_params = ['actor', 'type', 'object']
for param in expected_params:
2021-12-25 23:51:19 +00:00
if not message_json.get(param):
2021-12-28 20:32:11 +00:00
# print('inbox_message_has_params: ' +
2021-12-25 23:51:19 +00:00
# param + ' ' + str(message_json))
2019-07-02 15:07:27 +00:00
return False
# actor should be a string
2024-01-09 16:59:23 +00:00
actor_url = get_actor_from_post(message_json)
if not actor_url:
print('WARN: actor should be a string, but is actually: ' +
2024-01-09 16:59:23 +00:00
actor_url)
2021-12-25 23:51:19 +00:00
pprint(message_json)
return False
# type should be a string
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['type'], str):
2024-01-09 16:59:23 +00:00
print('WARN: type from ' + actor_url +
' should be a string, but is actually: ' +
2021-12-25 23:51:19 +00:00
str(message_json['type']))
return False
# object should be a dict or a string
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['object'], str):
2024-01-09 16:59:23 +00:00
print('WARN: object from ' + actor_url +
' should be a dict or string, but is actually: ' +
2021-12-25 23:51:19 +00:00
str(message_json['object']))
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
2022-01-02 19:57:50 +00:00
allowed_without_to_param = ['Like', 'EmojiReact',
'Follow', 'Join', 'Request',
2023-09-23 14:42:04 +00:00
'Accept', 'Capability', 'Undo',
'Move']
2022-01-02 19:57:50 +00:00
if message_json['type'] not in allowed_without_to_param:
2019-07-06 13:49:25 +00:00
return False
2019-07-02 15:07:27 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def inbox_permitted_message(domain: str, message_json: {},
federation_list: []) -> bool:
2019-06-28 21:59:54 +00:00
""" check that we are receiving from a permitted domain
"""
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, False):
2019-06-28 21:59:54 +00:00
return False
2020-08-23 14:45:58 +00:00
2024-01-09 16:59:23 +00:00
actor = get_actor_from_post(message_json)
2019-06-28 21:59:54 +00:00
# always allow the local domain
2019-07-01 11:48:54 +00:00
if domain in actor:
2019-06-28 21:59:54 +00:00
return True
2021-12-27 20:47:05 +00:00
if not url_permitted(actor, federation_list):
2019-06-28 21:59:54 +00:00
return False
2022-01-02 19:57:50 +00:00
always_allowed_types = (
2023-09-23 14:42:04 +00:00
'Follow', 'Join', 'Like', 'EmojiReact', 'Delete', 'Announce', 'Move'
2021-11-10 12:16:03 +00:00
)
2022-01-02 19:57:50 +00:00
if message_json['type'] not in always_allowed_types:
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
2019-11-16 12:32:28 +00:00
return True
reply_id = get_reply_to(message_json['object'])
if reply_id:
in_reply_to = reply_id
2022-01-02 19:57:50 +00:00
if not isinstance(in_reply_to, str):
2020-08-28 14:45:07 +00:00
return False
2022-01-02 19:57:50 +00:00
if not url_permitted(in_reply_to, federation_list):
2019-07-15 09:20:16 +00:00
return False
2019-06-28 21:59:54 +00:00
return True
2019-06-29 10:08:59 +00:00
2020-04-03 16:27:34 +00:00
def _deny_non_follower(base_dir: str, nickname: str, domain: str,
reply_nickname: str, reply_domain: str,
sending_actor: str):
"""Returns true if replying to an account which is not a follower
or mutual.
This only applies if 'Only replies from followers' or
'Only replies from mutuals' is selected on the edit profile screen
"""
# Is this a reply to something written from this account?
if reply_nickname != nickname or reply_domain != domain:
return False
# has this account specified to only receive replies from followers?
account_dir = acct_dir(base_dir, nickname, domain)
if not os.path.isfile(account_dir + '/.repliesFromFollowersOnly'):
if not os.path.isfile(account_dir + '/.repliesFromMutualsOnly'):
return False
# is the sending actor a follower?
follower_nickname = get_nickname_from_actor(sending_actor)
follower_domain, _ = get_domain_from_actor(sending_actor)
if not is_follower_of_person(base_dir, nickname, domain,
follower_nickname, follower_domain):
return True
2024-02-06 13:07:32 +00:00
if os.path.isfile(account_dir + '/.repliesFromMutualsOnly'):
if not is_following_actor(base_dir, nickname, domain,
sending_actor):
return True
return False
2021-12-28 20:32:11 +00:00
def save_post_to_inbox_queue(base_dir: str, http_prefix: str,
nickname: str, domain: str,
post_json_object: {},
2022-01-02 19:57:50 +00:00
original_post_json_object: {},
message_bytes: str,
http_headers: {},
post_path: str, debug: bool,
2024-02-09 21:25:40 +00:00
blocked_cache: [],
block_federated: [],
system_language: str,
2024-11-02 12:51:51 +00:00
mitm: bool,
max_message_bytes: int) -> str:
"""Saves the given json to the inbox queue for the person
2022-01-02 19:57:50 +00:00
key_id specifies the actor sending the post
2019-07-04 10:02:56 +00:00
"""
2024-11-02 12:51:51 +00:00
if len(message_bytes) > max_message_bytes:
2024-02-18 14:24:15 +00:00
print('REJECT: inbox message too long ' +
2022-01-02 19:57:50 +00:00
str(len(message_bytes)) + ' bytes')
return None
2022-01-02 19:57:50 +00:00
original_domain = domain
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
# block at the ealiest stage possible, which means the data
# isn't written to file
2022-01-02 19:57:50 +00:00
post_nickname = None
post_domain = None
2020-04-03 16:27:34 +00:00
actor = None
obj_dict_exists = False
# who is sending the post?
sending_actor = None
if has_object_dict(post_json_object):
obj_dict_exists = True
if post_json_object['object'].get('attributedTo'):
sending_actor = \
get_attributed_to(post_json_object['object']['attributedTo'])
2024-06-04 10:14:28 +00:00
else:
if post_json_object.get('attributedTo'):
sending_actor = \
get_attributed_to(post_json_object['attributedTo'])
if not sending_actor:
if post_json_object.get('actor'):
2024-01-09 16:59:23 +00:00
sending_actor = get_actor_from_post(post_json_object)
# check that the sender is valid
if sending_actor:
if not isinstance(sending_actor, str):
2024-02-18 14:24:15 +00:00
print('REJECT: sending actor is not a string ' +
str(sending_actor))
return None
actor = sending_actor
post_nickname = get_nickname_from_actor(sending_actor)
2022-01-02 19:57:50 +00:00
if not post_nickname:
2024-02-18 14:24:15 +00:00
print('REJECT: No post Nickname in actor ' + sending_actor)
2019-09-01 19:20:28 +00:00
return None
2022-01-02 19:57:50 +00:00
post_domain, post_port = \
get_domain_from_actor(sending_actor)
2022-01-02 19:57:50 +00:00
if not post_domain:
2019-10-29 20:23:49 +00:00
if debug:
2021-12-25 22:09:19 +00:00
pprint(post_json_object)
2024-02-18 14:24:15 +00:00
print('REJECT: No post Domain in actor ' + str(sending_actor))
2019-09-01 19:20:28 +00:00
return None
2021-12-29 21:55:09 +00:00
if is_blocked(base_dir, nickname, domain,
post_nickname, post_domain,
blocked_cache, block_federated):
2024-02-18 14:24:15 +00:00
print('BLOCK: post from ' +
post_nickname + '@' + post_domain + ' blocked')
return None
2022-01-02 19:57:50 +00:00
post_domain = get_full_domain(post_domain, post_port)
2019-07-14 20:50:27 +00:00
# get the content of the post
2023-01-03 15:43:52 +00:00
content_str = \
get_base_content_from_post(post_json_object, system_language)
if obj_dict_exists:
2023-01-03 15:43:52 +00:00
if is_quote_toot(post_json_object, content_str):
2024-04-20 09:59:56 +00:00
allow_quotes = False
if sending_actor:
allow_quotes = \
quote_toots_allowed(base_dir, nickname, domain,
post_nickname, post_domain)
if not allow_quotes:
if post_json_object.get('id'):
print('REJECT: inbox quote toot ' +
2024-06-04 10:30:27 +00:00
nickname + '@' + domain + ' ' +
2024-04-20 09:59:56 +00:00
str(post_json_object['id']))
return None
2023-01-03 17:32:59 +00:00
# is this a reply to a blocked domain or account?
reply_id = get_reply_to(post_json_object['object'])
if reply_id:
if isinstance(reply_id, str):
in_reply_to = reply_id
2022-01-02 19:57:50 +00:00
reply_domain, _ = \
get_domain_from_actor(in_reply_to)
2023-01-15 14:33:18 +00:00
if reply_domain:
if is_blocked_domain(base_dir, reply_domain,
2024-02-09 21:25:40 +00:00
blocked_cache, block_federated):
2024-02-18 14:24:15 +00:00
print('BLOCK: post contains reply from ' +
str(actor) +
' to a blocked domain: ' + reply_domain)
2023-01-15 14:33:18 +00:00
return None
2022-01-02 19:57:50 +00:00
reply_nickname = \
get_nickname_from_actor(in_reply_to)
if reply_nickname and reply_domain:
2023-04-29 10:14:21 +00:00
if is_blocked_nickname(base_dir, reply_domain,
blocked_cache):
2024-02-18 14:24:15 +00:00
print('BLOCK: post contains reply from ' +
str(actor) +
' to a blocked nickname: ' +
reply_nickname + '@' + reply_domain)
2023-04-29 10:14:21 +00:00
return None
2022-01-02 19:57:50 +00:00
if is_blocked(base_dir, nickname, domain,
reply_nickname, reply_domain,
blocked_cache, block_federated):
2024-02-18 14:24:15 +00:00
print('BLOCK: post contains reply from ' +
str(actor) +
' to a blocked account: ' +
reply_nickname + '@' + reply_domain)
2022-01-02 19:57:50 +00:00
return None
if _deny_non_follower(base_dir, nickname, domain,
reply_nickname, reply_domain,
actor):
2024-02-18 14:24:15 +00:00
print('REJECT: post contains reply from ' +
str(actor) +
' who is not a follower of ' +
nickname + '@' + domain)
return None
2022-01-02 19:57:50 +00:00
# filter on the content of the post
if content_str:
summary_str = \
get_summary_from_post(post_json_object,
system_language, [])
media_descriptions = \
get_media_descriptions_from_post(post_json_object)
content_all = \
summary_str + ' ' + content_str + ' ' + media_descriptions
2022-09-25 17:26:11 +00:00
if is_filtered(base_dir, nickname, domain, content_all,
system_language):
2024-02-18 14:24:15 +00:00
if post_json_object.get('id'):
print('REJECT: post was filtered out due to content ' +
str(post_json_object['id']))
return None
if reject_twitter_summary(base_dir, nickname, domain,
summary_str):
2024-02-18 14:24:15 +00:00
if post_json_object.get('id'):
print('REJECT: post was filtered out due to ' +
'twitter summary ' + str(post_json_object['id']))
return None
2022-01-02 19:57:50 +00:00
original_post_id = None
2021-12-25 22:09:19 +00:00
if post_json_object.get('id'):
if not isinstance(post_json_object['id'], str):
2024-02-18 14:24:15 +00:00
print('REJECT: post id is not a string ' +
str(post_json_object['id']))
return None
2022-01-02 19:57:50 +00:00
original_post_id = remove_id_ending(post_json_object['id'])
2019-08-16 15:04:40 +00:00
2023-11-20 22:27:58 +00:00
curr_time = date_utcnow()
2019-08-16 15:04:40 +00:00
2021-12-26 19:47:06 +00:00
post_id = None
2021-12-25 22:09:19 +00:00
if post_json_object.get('id'):
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['id'])
2021-12-26 13:17:46 +00:00
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
2021-12-26 19:47:06 +00:00
if not post_id:
2022-01-02 19:57:50 +00:00
status_number, published = get_status_number()
2019-08-16 15:04:40 +00:00
if actor:
2022-01-02 19:57:50 +00:00
post_id = actor + '/statuses/' + status_number
2019-08-16 15:04:40 +00:00
else:
2021-12-26 19:47:06 +00:00
post_id = \
2022-01-02 19:57:50 +00:00
local_actor_url(http_prefix, nickname, original_domain) + \
'/statuses/' + status_number
2020-03-22 21:16:02 +00:00
2021-12-25 22:09:19 +00:00
# NOTE: don't change post_json_object['id'] before signature check
2020-03-22 21:16:02 +00:00
2022-01-02 19:57:50 +00:00
inbox_queue_dir = create_inbox_queue_dir(nickname, domain, base_dir)
2020-04-03 16:27:34 +00:00
handle = nickname + '@' + domain
2024-05-12 12:35:26 +00:00
destination = data_dir(base_dir) + '/' + \
2021-12-26 19:47:06 +00:00
handle + '/inbox/' + post_id.replace('/', '#') + '.json'
2022-01-02 19:57:50 +00:00
filename = inbox_queue_dir + '/' + post_id.replace('/', '#') + '.json'
2020-04-03 16:27:34 +00:00
2022-01-02 19:57:50 +00:00
shared_inbox_item = False
2020-04-03 16:27:34 +00:00
if nickname == 'inbox':
2022-01-02 19:57:50 +00:00
nickname = original_domain
shared_inbox_item = True
2020-04-03 16:27:34 +00:00
2022-01-02 19:57:50 +00:00
digest_start_time = time.time()
digest_algorithm = get_digest_algorithm_from_headers(http_headers)
digest = message_content_digest(message_bytes, digest_algorithm)
time_diff_str = str(int((time.time() - digest_start_time) * 1000))
2019-11-16 10:12:40 +00:00
if debug:
2022-01-02 19:57:50 +00:00
while len(time_diff_str) < 6:
time_diff_str = '0' + time_diff_str
print('DIGEST|' + time_diff_str + '|' + filename)
2019-11-16 10:07:32 +00:00
2022-01-02 19:57:50 +00:00
new_queue_item = {
2022-10-03 10:39:23 +00:00
"originalId": original_post_id,
"id": post_id,
"actor": actor,
"nickname": nickname,
"domain": domain,
"postNickname": post_nickname,
"postDomain": post_domain,
"sharedInbox": shared_inbox_item,
"published": published,
"httpHeaders": http_headers,
"path": post_path,
"post": post_json_object,
"original": original_post_json_object,
"digest": digest,
"filename": filename,
"destination": destination,
"mitm": mitm
2019-07-04 10:02:56 +00:00
}
2019-07-06 13:49:25 +00:00
if debug:
print('Inbox queue item created')
2022-01-02 19:57:50 +00:00
save_json(new_queue_item, filename)
2019-07-04 10:02:56 +00:00
return filename
2019-07-04 12:23:53 +00:00
2020-04-03 16:27:34 +00:00
2024-02-06 13:17:51 +00:00
def _inbox_post_recipients_add(base_dir: str, to_list: [],
2022-01-02 19:57:50 +00:00
recipients_dict: {},
domain_match: str, domain: str,
2024-02-06 13:17:51 +00:00
debug: bool,
onion_domain: str, i2p_domain: str) -> bool:
2022-05-30 12:09:18 +00:00
"""Given a list of post recipients (to_list) from 'to' or 'cc' parameters
2022-01-02 19:57:50 +00:00
populate a recipients_dict with the handle for each
2019-07-08 22:12:24 +00:00
"""
2022-01-02 19:57:50 +00:00
follower_recipients = False
2022-05-30 12:09:18 +00:00
for recipient in to_list:
2019-09-03 19:53:22 +00:00
if not recipient:
continue
# if the recipient is an onion or i2p address then
# is it an account on a clearnet instance?
# If so then change the onion/i2p to the account domain
if onion_domain:
if onion_domain + '/' in recipient:
recipient = recipient.replace(onion_domain, domain)
if i2p_domain:
if i2p_domain + '/' in recipient:
recipient = recipient.replace(i2p_domain, domain)
# is this a to an account on this instance?
2022-01-02 19:57:50 +00:00
if domain_match in recipient:
# get the handle for the account on this instance
2022-01-02 19:57:50 +00:00
nickname = recipient.split(domain_match)[1]
2021-06-22 12:42:52 +00:00
handle = nickname + '@' + domain
2022-12-18 15:29:54 +00:00
handle_dir = acct_handle_dir(base_dir, handle)
if os.path.isdir(handle_dir):
2022-01-02 19:57:50 +00:00
recipients_dict[handle] = None
2019-07-11 12:29:31 +00:00
else:
if debug:
2024-05-12 12:35:26 +00:00
print('DEBUG: ' + data_dir(base_dir) + '/' +
2020-04-03 16:27:34 +00:00
handle + ' does not exist')
2019-07-11 12:29:31 +00:00
else:
if debug:
2023-02-09 20:40:42 +00:00
if recipient.endswith('#Public') or \
recipient == 'as:Public' or \
recipient == 'Public':
2022-03-11 17:21:31 +00:00
print('DEBUG: #Public recipient is too non-specific. ' +
recipient + ' ' + domain_match)
else:
print('DEBUG: ' + recipient + ' is not local to ' +
domain_match)
2022-05-30 12:09:18 +00:00
print(str(to_list))
2019-07-08 22:12:24 +00:00
if recipient.endswith('followers'):
2019-07-11 12:29:31 +00:00
if debug:
print('DEBUG: followers detected as post recipients')
2022-01-02 19:57:50 +00:00
follower_recipients = True
return follower_recipients, recipients_dict
2019-07-08 22:12:24 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _inbox_post_recipients(base_dir: str, post_json_object: {},
2024-02-06 13:21:36 +00:00
domain: str, port: int,
debug: bool,
onion_domain: str, i2p_domain: str) -> ([], []):
"""Returns dictionaries containing the recipients of the given post
The shared dictionary contains followers
"""
2022-01-02 19:57:50 +00:00
recipients_dict = {}
recipients_dict_followers = {}
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
2019-07-11 12:29:31 +00:00
if debug:
2021-12-25 22:09:19 +00:00
pprint(post_json_object)
2019-07-11 12:29:31 +00:00
print('WARNING: inbox post has no actor')
2022-01-02 19:57:50 +00:00
return recipients_dict, recipients_dict_followers
2019-07-08 22:12:24 +00:00
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2022-01-02 19:57:50 +00:00
domain_base = domain
2021-12-26 12:45:03 +00:00
domain = get_full_domain(domain, port)
2022-01-02 19:57:50 +00:00
domain_match = '/' + domain + '/users/'
2019-07-08 22:12:24 +00:00
2024-01-09 16:59:23 +00:00
actor = get_actor_from_post(post_json_object)
2019-07-08 22:12:24 +00:00
# first get any specific people which the post is addressed to
2020-03-22 21:16:02 +00:00
2022-01-02 19:57:50 +00:00
follower_recipients = False
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('to'):
if isinstance(post_json_object['object']['to'], list):
2022-01-02 19:57:50 +00:00
recipients_list = post_json_object['object']['to']
2019-07-11 12:29:31 +00:00
else:
2022-01-02 19:57:50 +00:00
recipients_list = [post_json_object['object']['to']]
if debug:
print('DEBUG: resolving "to"')
2022-01-02 19:57:50 +00:00
includes_followers, recipients_dict = \
2024-02-06 13:17:51 +00:00
_inbox_post_recipients_add(base_dir,
2022-01-02 19:57:50 +00:00
recipients_list,
recipients_dict,
domain_match, domain_base,
2024-02-06 13:17:51 +00:00
debug,
onion_domain, i2p_domain)
2022-01-02 19:57:50 +00:00
if includes_followers:
follower_recipients = True
else:
if debug:
print('DEBUG: inbox post has no "to"')
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('cc'):
if isinstance(post_json_object['object']['cc'], list):
2022-01-02 19:57:50 +00:00
recipients_list = post_json_object['object']['cc']
2019-07-11 12:29:31 +00:00
else:
2022-01-02 19:57:50 +00:00
recipients_list = [post_json_object['object']['cc']]
includes_followers, recipients_dict = \
2024-02-06 13:17:51 +00:00
_inbox_post_recipients_add(base_dir,
2022-01-02 19:57:50 +00:00
recipients_list,
recipients_dict,
domain_match, domain_base,
2024-02-06 13:17:51 +00:00
debug,
onion_domain, i2p_domain)
2022-01-02 19:57:50 +00:00
if includes_followers:
follower_recipients = True
2019-07-11 12:29:31 +00:00
else:
if debug:
print('DEBUG: inbox post has no cc')
else:
2021-12-25 22:09:19 +00:00
if debug and post_json_object.get('object'):
if isinstance(post_json_object['object'], str):
2023-08-14 18:46:27 +00:00
if contains_statuses(post_json_object['object']):
print('DEBUG: inbox item is a link to a post')
else:
2021-12-25 22:09:19 +00:00
if '/users/' in post_json_object['object']:
print('DEBUG: inbox item is a link to an actor')
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object.get('to'):
if isinstance(post_json_object['to'], list):
2022-01-02 19:57:50 +00:00
recipients_list = post_json_object['to']
2019-08-16 17:51:00 +00:00
else:
2022-01-02 19:57:50 +00:00
recipients_list = [post_json_object['to']]
includes_followers, recipients_dict = \
2024-02-06 13:17:51 +00:00
_inbox_post_recipients_add(base_dir,
2022-01-02 19:57:50 +00:00
recipients_list,
recipients_dict,
domain_match, domain_base,
2024-02-06 13:17:51 +00:00
debug,
onion_domain, i2p_domain)
2022-01-02 19:57:50 +00:00
if includes_followers:
follower_recipients = True
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object.get('cc'):
if isinstance(post_json_object['cc'], list):
2022-01-02 19:57:50 +00:00
recipients_list = post_json_object['cc']
2019-08-16 17:51:00 +00:00
else:
2022-01-02 19:57:50 +00:00
recipients_list = [post_json_object['cc']]
includes_followers, recipients_dict = \
2024-02-06 13:17:51 +00:00
_inbox_post_recipients_add(base_dir,
2022-01-02 19:57:50 +00:00
recipients_list,
recipients_dict,
domain_match, domain_base,
2024-02-06 13:17:51 +00:00
debug,
onion_domain, i2p_domain)
2022-01-02 19:57:50 +00:00
if includes_followers:
follower_recipients = True
2019-07-08 22:12:24 +00:00
2022-01-02 19:57:50 +00:00
if not follower_recipients:
2019-07-11 12:29:31 +00:00
if debug:
print('DEBUG: no followers were resolved')
2022-01-02 19:57:50 +00:00
return recipients_dict, recipients_dict_followers
2019-07-08 22:12:24 +00:00
# now resolve the followers
2022-01-02 19:57:50 +00:00
recipients_dict_followers = \
2021-12-29 21:55:09 +00:00
get_followers_of_actor(base_dir, actor, debug)
2020-04-03 16:27:34 +00:00
2022-01-02 19:57:50 +00:00
return recipients_dict, recipients_dict_followers
2019-07-08 22:12:24 +00:00
2024-01-28 15:17:55 +00:00
def update_edited_post(base_dir: str,
nickname: str, domain: str,
message_json: {},
edited_published: str,
edited_postid: str,
recent_posts_cache: {},
box_name: str,
max_mentions: int, max_emoji: int,
allow_local_network_access: bool,
debug: bool,
system_language: str, http_prefix: str,
domain_full: str, person_cache: {},
signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
session, cached_webfingers: {}, port: int,
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
show_published_date_only: bool,
peertube_instances: [],
theme_name: str, max_like_count: int,
cw_lists: {}, dogwhistles: {},
min_images_for_accounts: [],
max_hashtags: int,
buy_sites: {},
auto_cw_cache: {},
onion_domain: str,
i2p_domain: str) -> None:
2024-01-28 15:17:55 +00:00
""" When an edited post is created this assigns
a published and updated date to it, and uses
the previous id
"""
edited_updated = \
message_json['object']['published']
if edited_published:
message_json['published'] = \
edited_published
message_json['object']['published'] = \
edited_published
message_json['id'] = \
edited_postid + '/activity'
message_json['object']['id'] = \
edited_postid
message_json['object']['url'] = \
edited_postid
message_json['updated'] = \
edited_updated
message_json['object']['updated'] = \
edited_updated
message_json['type'] = 'Update'
message_json2 = message_json.copy()
2024-08-31 22:05:31 +00:00
receive_edit_to_post(recent_posts_cache,
message_json2,
base_dir,
nickname, domain,
max_mentions, max_emoji,
allow_local_network_access,
debug,
system_language, http_prefix,
domain_full, person_cache,
signing_priv_key_pem,
max_recent_posts,
translate,
session,
cached_webfingers,
port,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
theme_name, max_like_count,
cw_lists, dogwhistles,
min_images_for_accounts,
max_hashtags, buy_sites,
auto_cw_cache,
onion_domain, i2p_domain)
2024-01-28 15:17:55 +00:00
# update the index
id_str = edited_postid.split('/')[-1]
index_filename = \
acct_dir(base_dir, nickname, domain) + '/' + box_name + '.index'
if not text_in_file(id_str, index_filename):
try:
with open(index_filename, 'r+',
encoding='utf-8') as fp_index:
content = fp_index.read()
if id_str + '\n' not in content:
fp_index.seek(0, 0)
fp_index.write(id_str + '\n' + content)
except OSError as ex:
print('WARN: Failed to write index after edit ' +
index_filename + ' ' + str(ex))
2024-08-31 22:05:31 +00:00
def populate_replies(base_dir: str, http_prefix: str, domain: str,
message_json: {}, max_replies: int, debug: bool) -> bool:
"""Updates the list of replies for a post on this domain if
a reply to it arrives
2023-09-23 14:42:04 +00:00
"""
2024-08-31 22:05:31 +00:00
if not message_json.get('id'):
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
if not has_object_dict(message_json):
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
reply_to = get_reply_to(message_json['object'])
if not reply_to:
2023-10-19 13:10:24 +00:00
return False
2024-08-31 22:05:31 +00:00
if not message_json['object'].get('to'):
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
if not isinstance(reply_to, str):
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
if debug:
print('DEBUG: post contains a reply')
# is this a reply to a post on this domain?
if not reply_to.startswith(http_prefix + '://' + domain + '/'):
2023-09-23 14:42:04 +00:00
if debug:
2024-08-31 22:05:31 +00:00
print('DEBUG: post is a reply to another not on this domain')
print(reply_to)
print('Expected: ' + http_prefix + '://' + domain + '/')
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
reply_to_nickname = get_nickname_from_actor(reply_to)
if not reply_to_nickname:
print('DEBUG: no nickname found for ' + reply_to)
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
reply_to_domain, _ = get_domain_from_actor(reply_to)
if not reply_to_domain:
if debug:
print('DEBUG: no domain found for ' + reply_to)
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
post_filename = locate_post(base_dir, reply_to_nickname,
reply_to_domain, reply_to)
if not post_filename:
if debug:
print('DEBUG: post may have expired - ' + reply_to)
2023-09-23 14:42:04 +00:00
return False
2024-08-31 22:05:31 +00:00
if not post_allow_comments(post_filename):
2020-08-21 18:32:16 +00:00
if debug:
2022-01-02 19:57:50 +00:00
print('DEBUG: post does not allow comments - ' + reply_to)
2020-08-21 18:32:16 +00:00
return False
2019-07-13 19:28:14 +00:00
# populate a text file containing the ids of replies
2022-01-02 19:57:50 +00:00
post_replies_filename = post_filename.replace('.json', '.replies')
message_id = remove_id_ending(message_json['id'])
if os.path.isfile(post_replies_filename):
2024-02-06 18:35:04 +00:00
num_lines = lines_in_file(post_replies_filename)
2022-01-02 19:57:50 +00:00
if num_lines > max_replies:
2019-07-13 21:00:12 +00:00
return False
2022-06-10 11:43:33 +00:00
if not text_in_file(message_id, post_replies_filename):
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(post_replies_filename, 'a+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_replies:
fp_replies.write(message_id + '\n')
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: populate_replies unable to append ' +
post_replies_filename)
2019-07-13 19:28:14 +00:00
else:
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(post_replies_filename, 'w+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_replies:
fp_replies.write(message_id + '\n')
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: populate_replies unable to write ' +
post_replies_filename)
2019-07-13 19:28:14 +00:00
return True
2019-09-30 09:43:46 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str,
domain: str, onion_domain: str,
i2p_domain: str,
2021-12-29 21:55:09 +00:00
person_cache: {},
post_json_object: {}, debug: bool,
signing_priv_key_pem: str) -> None:
"""Tries to obtain the actor for the person being replied to
so that their avatar can later be shown
"""
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2019-09-30 19:39:48 +00:00
return
2024-08-31 22:05:31 +00:00
reply_id = get_reply_to(post_json_object['object'])
if not reply_id:
return
2020-08-27 09:35:26 +00:00
2024-08-31 22:05:31 +00:00
lookup_actor = reply_id
if not lookup_actor:
2020-07-08 19:49:15 +00:00
return
2020-08-27 09:19:32 +00:00
2024-08-31 22:05:31 +00:00
if not isinstance(lookup_actor, str):
return
2020-07-08 19:49:15 +00:00
2024-08-31 22:05:31 +00:00
if not has_users_path(lookup_actor):
2022-08-19 16:22:53 +00:00
return
2023-01-03 15:43:52 +00:00
2024-08-31 22:05:31 +00:00
if contains_statuses(lookup_actor):
lookup_actor = get_actor_from_post_id(lookup_actor)
2021-11-25 21:18:53 +00:00
2024-08-31 22:05:31 +00:00
if debug:
print('DEBUG: Obtaining actor for reply post ' + lookup_actor)
2020-07-08 19:49:15 +00:00
2024-08-31 22:05:31 +00:00
for tries in range(6):
pub_key = \
get_person_pub_key(base_dir, session, lookup_actor,
person_cache, debug,
__version__, http_prefix,
domain, onion_domain, i2p_domain,
signing_priv_key_pem)
if pub_key:
if not isinstance(pub_key, dict):
if debug:
print('DEBUG: public key obtained for reply: ' +
lookup_actor)
else:
if debug:
print('DEBUG: http error code for public key ' +
'obtained for reply: ' + lookup_actor + ' ' +
str(pub_key))
break
2020-07-08 19:49:15 +00:00
2024-08-31 22:05:31 +00:00
if debug:
print('DEBUG: Retry ' + str(tries + 1) +
' obtaining actor for ' + lookup_actor)
time.sleep(5)
2021-11-10 12:16:03 +00:00
2024-08-31 22:05:31 +00:00
def _dm_notify(base_dir: str, handle: str, url: str) -> None:
"""Creates a notification that a new DM has arrived
"""
2022-12-18 15:29:54 +00:00
account_dir = acct_handle_dir(base_dir, handle)
2024-08-31 22:05:31 +00:00
if not os.path.isdir(account_dir):
2022-08-19 16:22:53 +00:00
return
2024-08-31 22:05:31 +00:00
dm_file = account_dir + '/.newDM'
if not os.path.isfile(dm_file):
try:
2024-08-31 22:05:31 +00:00
with open(dm_file, 'w+', encoding='utf-8') as fp_dm:
fp_dm.write(url)
except OSError:
2024-08-31 22:05:31 +00:00
print('EX: _dm_notify unable to write ' + dm_file)
2021-11-10 12:16:03 +00:00
2021-12-29 21:55:09 +00:00
def _notify_post_arrival(base_dir: str, handle: str, url: str) -> None:
2021-07-07 09:32:48 +00:00
"""Creates a notification that a new post has arrived.
This is for followed accounts with the notify checkbox enabled
on the person options screen
"""
2022-12-18 15:29:54 +00:00
account_dir = acct_handle_dir(base_dir, handle)
2022-01-02 19:57:50 +00:00
if not os.path.isdir(account_dir):
return
2022-01-02 19:57:50 +00:00
notify_file = account_dir + '/.newNotifiedPost'
if os.path.isfile(notify_file):
# check that the same notification is not repeatedly sent
try:
with open(notify_file, 'r', encoding='utf-8') as fp_notify:
existing_notification_message = fp_notify.read()
if url in existing_notification_message:
return
except OSError:
print('EX: _notify_post_arrival unable to read ' + notify_file)
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(notify_file, 'w+', encoding='utf-8') as fp_notify:
2022-01-02 19:57:50 +00:00
fp_notify.write(url)
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _notify_post_arrival unable to write ' + notify_file)
2021-12-29 21:55:09 +00:00
def _reply_notify(base_dir: str, handle: str, url: str) -> None:
"""Creates a notification that a new reply has arrived
"""
2022-12-18 15:29:54 +00:00
account_dir = acct_handle_dir(base_dir, handle)
2022-01-02 19:57:50 +00:00
if not os.path.isdir(account_dir):
return
2022-01-02 19:57:50 +00:00
reply_file = account_dir + '/.newReply'
if not os.path.isfile(reply_file):
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(reply_file, 'w+', encoding='utf-8') as fp_reply:
2022-01-02 19:57:50 +00:00
fp_reply.write(url)
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _reply_notify unable to write ' + reply_file)
2020-04-03 16:27:34 +00:00
2022-06-14 11:42:45 +00:00
def _git_patch_notify(base_dir: str, handle: str, subject: str,
2022-01-02 19:57:50 +00:00
from_nickname: str, from_domain: str) -> None:
"""Creates a notification that a new git patch has arrived
"""
2022-12-18 15:29:54 +00:00
account_dir = acct_handle_dir(base_dir, handle)
2022-01-02 19:57:50 +00:00
if not os.path.isdir(account_dir):
return
2022-01-02 19:57:50 +00:00
patch_file = account_dir + '/.newPatch'
subject = subject.replace('[PATCH]', '').strip()
2022-01-02 19:57:50 +00:00
handle = '@' + from_nickname + '@' + from_domain
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(patch_file, 'w+', encoding='utf-8') as fp_patch:
2022-01-02 19:57:50 +00:00
fp_patch.write('git ' + handle + ' ' + subject)
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _git_patch_notify unable to write ' + patch_file)
2020-05-02 17:16:24 +00:00
2021-12-29 21:55:09 +00:00
def _group_handle(base_dir: str, handle: str) -> bool:
2019-10-04 12:22:56 +00:00
"""Is the given account handle a group?
"""
2022-12-18 15:29:54 +00:00
actor_file = acct_handle_dir(base_dir, handle) + '.json'
2022-01-02 19:57:50 +00:00
if not os.path.isfile(actor_file):
2019-10-04 12:22:56 +00:00
return False
2022-01-02 19:57:50 +00:00
actor_json = load_json(actor_file)
2021-12-26 10:29:52 +00:00
if not actor_json:
2019-10-04 12:22:56 +00:00
return False
2022-03-23 22:24:49 +00:00
if not actor_json.get('type'):
return False
2021-12-26 10:29:52 +00:00
return actor_json['type'] == 'Group'
2020-04-03 16:27:34 +00:00
2019-10-04 12:22:56 +00:00
2022-03-14 13:45:42 +00:00
def _send_to_group_members(server, session, session_onion, session_i2p,
base_dir: str, handle: str, port: int,
2021-12-29 21:55:09 +00:00
post_json_object: {},
http_prefix: str, federation_list: [],
2022-01-02 19:57:50 +00:00
send_threads: [], post_log: [],
2021-12-29 21:55:09 +00:00
cached_webfingers: {},
person_cache: {}, debug: bool,
curr_domain: str,
2021-12-29 21:55:09 +00:00
onion_domain: str, i2p_domain: str,
2023-09-15 21:04:31 +00:00
signing_priv_key_pem: str,
2023-10-25 19:55:40 +00:00
sites_unavailable: [],
system_language: str) -> None:
2019-10-04 12:22:56 +00:00
"""When a post arrives for a group send it out to the group members
"""
2021-08-01 13:25:11 +00:00
if debug:
print('\n\n=========================================================')
print(handle + ' sending to group members')
2022-01-01 15:11:42 +00:00
shared_item_federation_tokens = {}
2021-12-25 18:05:01 +00:00
shared_items_federated_domains = []
2022-01-02 19:57:50 +00:00
shared_items_federated_domains_str = \
2021-12-26 14:08:58 +00:00
get_config_param(base_dir, 'shared_items_federated_domains')
2022-01-02 19:57:50 +00:00
if shared_items_federated_domains_str:
si_federated_domains_list = \
shared_items_federated_domains_str.split(',')
for shared_federated_domain in si_federated_domains_list:
domain_str = shared_federated_domain.strip()
shared_items_federated_domains.append(domain_str)
2022-12-18 15:29:54 +00:00
followers_file = acct_handle_dir(base_dir, handle) + '/followers.txt'
2022-01-02 19:57:50 +00:00
if not os.path.isfile(followers_file):
2019-10-04 12:22:56 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object.get('to'):
2021-08-02 14:19:09 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object.get('object'):
2019-10-04 13:31:30 +00:00
return
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2021-07-30 19:20:49 +00:00
return
2021-08-02 14:19:09 +00:00
nickname = handle.split('@')[0].replace('!', '')
2020-04-03 16:27:34 +00:00
domain = handle.split('@')[1]
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2022-01-02 19:57:50 +00:00
group_actor = local_actor_url(http_prefix, nickname, domain_full)
2023-10-16 18:30:54 +00:00
if isinstance(post_json_object['to'], list):
if group_actor not in post_json_object['to']:
return
else:
if group_actor != post_json_object['to']:
return
2022-01-02 19:57:50 +00:00
cc_str = ''
2021-08-02 14:19:09 +00:00
nickname = handle.split('@')[0].replace('!', '')
# save to the group outbox so that replies will be to the group
# rather than the original sender
2021-12-28 18:13:52 +00:00
save_post_to_box(base_dir, http_prefix, None,
nickname, domain, post_json_object, 'outbox')
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['object']['id'])
2021-08-02 14:19:09 +00:00
if debug:
2021-12-26 19:47:06 +00:00
print('Group announce: ' + post_id)
2022-01-02 19:57:50 +00:00
announce_json = \
2021-12-29 21:55:09 +00:00
create_announce(session, base_dir, federation_list,
nickname, domain, port,
2022-01-02 19:57:50 +00:00
group_actor + '/followers', cc_str,
2021-12-29 21:55:09 +00:00
http_prefix, post_id, False, False,
2022-01-02 19:57:50 +00:00
send_threads, post_log,
2021-12-29 21:55:09 +00:00
person_cache, cached_webfingers,
debug, __version__, signing_priv_key_pem,
2023-09-15 21:04:31 +00:00
curr_domain, onion_domain, i2p_domain,
2023-10-25 19:55:40 +00:00
sites_unavailable, system_language)
2021-08-02 14:19:09 +00:00
2022-03-14 13:45:42 +00:00
send_to_followers_thread(server, session, session_onion, session_i2p,
base_dir, nickname, domain,
2021-12-29 21:55:09 +00:00
onion_domain, i2p_domain, port,
http_prefix, federation_list,
2022-01-02 19:57:50 +00:00
send_threads, post_log,
2021-12-29 21:55:09 +00:00
cached_webfingers, person_cache,
2022-01-02 19:57:50 +00:00
announce_json, debug, __version__,
2021-12-29 21:55:09 +00:00
shared_items_federated_domains,
2022-01-01 15:11:42 +00:00
shared_item_federation_tokens,
2023-09-15 21:04:31 +00:00
signing_priv_key_pem,
2023-10-25 19:55:40 +00:00
sites_unavailable, system_language)
2020-04-03 16:27:34 +00:00
2024-01-09 12:08:23 +00:00
def _inbox_update_calendar_from_tag(base_dir: str, handle: str,
post_json_object: {}) -> None:
2019-10-11 12:31:06 +00:00
"""Detects whether the tag list on a post contains calendar events
and if so saves the post id to a file in the calendar directory
for the account
"""
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
return
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2019-10-11 12:31:06 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('tag'):
2019-10-11 12:31:06 +00:00
return
2021-12-25 22:09:19 +00:00
if not isinstance(post_json_object['object']['tag'], list):
2019-10-11 12:31:06 +00:00
return
2024-01-09 16:59:23 +00:00
actor = get_actor_from_post(post_json_object)
2022-01-02 19:57:50 +00:00
actor_nickname = get_nickname_from_actor(actor)
if not actor_nickname:
return
2022-01-02 19:57:50 +00:00
actor_domain, _ = get_domain_from_actor(actor)
2023-01-15 14:33:18 +00:00
if not actor_domain:
return
2022-01-02 19:57:50 +00:00
handle_nickname = handle.split('@')[0]
handle_domain = handle.split('@')[1]
2021-12-29 21:55:09 +00:00
if not receiving_calendar_events(base_dir,
2022-01-02 19:57:50 +00:00
handle_nickname, handle_domain,
actor_nickname, actor_domain):
return
2020-08-13 09:37:11 +00:00
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['id']).replace('/', '#')
2020-08-13 11:58:05 +00:00
2020-08-13 09:37:11 +00:00
# look for events within the tags list
2022-01-02 19:57:50 +00:00
for tag_dict in post_json_object['object']['tag']:
if not tag_dict.get('type'):
2020-07-11 22:36:52 +00:00
continue
2022-01-02 19:57:50 +00:00
if tag_dict['type'] != 'Event':
2019-10-11 12:31:06 +00:00
continue
2022-01-02 19:57:50 +00:00
if not tag_dict.get('startTime'):
2019-10-11 12:31:06 +00:00
continue
2022-01-02 19:57:50 +00:00
save_event_post(base_dir, handle, post_id, tag_dict)
2020-04-03 16:27:34 +00:00
2024-01-09 12:08:23 +00:00
def _inbox_update_calendar_from_event(base_dir: str, handle: str,
post_json_object: {}) -> None:
"""Detects whether the post contains calendar events
and if so saves the post id to a file in the calendar directory
for the account
This is for Friendica-style calendar events
"""
if not post_json_object.get('actor'):
return
if not has_object_dict(post_json_object):
return
if post_json_object['object']['type'] != 'Event':
return
if not post_json_object['object'].get('startTime'):
return
if not isinstance(post_json_object['object']['startTime'], str):
return
2024-01-09 16:59:23 +00:00
actor = get_actor_from_post(post_json_object)
2024-01-09 12:08:23 +00:00
actor_nickname = get_nickname_from_actor(actor)
if not actor_nickname:
return
actor_domain, _ = get_domain_from_actor(actor)
if not actor_domain:
return
handle_nickname = handle.split('@')[0]
handle_domain = handle.split('@')[1]
if not receiving_calendar_events(base_dir,
handle_nickname, handle_domain,
actor_nickname, actor_domain):
return
post_id = remove_id_ending(post_json_object['id']).replace('/', '#')
save_event_post(base_dir, handle, post_id, post_json_object['object'])
2021-12-29 21:55:09 +00:00
def _update_last_seen(base_dir: str, handle: str, actor: str) -> None:
"""Updates the time when the given handle last saw the given actor
2020-12-13 11:28:23 +00:00
This can later be used to indicate if accounts are dormant/abandoned/moved
"""
if '@' not in handle:
return
nickname = handle.split('@')[0]
domain = handle.split('@')[1]
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2022-01-02 19:57:50 +00:00
account_path = acct_dir(base_dir, nickname, domain)
if not os.path.isdir(account_path):
return
2021-12-28 20:32:11 +00:00
if not is_following_actor(base_dir, nickname, domain, actor):
return
2022-01-02 19:57:50 +00:00
last_seen_path = account_path + '/lastseen'
if not os.path.isdir(last_seen_path):
os.mkdir(last_seen_path)
last_seen_filename = \
last_seen_path + '/' + actor.replace('/', '#') + '.txt'
2023-11-20 22:27:58 +00:00
curr_time = date_utcnow()
days_since_epoch = (curr_time - date_epoch()).days
# has the value changed?
2022-01-02 19:57:50 +00:00
if os.path.isfile(last_seen_filename):
try:
with open(last_seen_filename, 'r',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_last_seen:
days_since_epoch_file = fp_last_seen.read()
if int(days_since_epoch_file) == days_since_epoch:
# value hasn't changed, so we can save writing
# anything to file
return
except OSError:
print('EX: _update_last_seen unable to read ' + last_seen_filename)
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(last_seen_filename, 'w+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_last_seen:
fp_last_seen.write(str(days_since_epoch))
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _update_last_seen unable to write ' + last_seen_filename)
2022-06-14 11:42:45 +00:00
def _bounce_dm(sender_post_id: str, session, http_prefix: str,
2021-12-29 21:55:09 +00:00
base_dir: str, nickname: str, domain: str, port: int,
2022-01-02 19:57:50 +00:00
sending_handle: str, federation_list: [],
send_threads: [], post_log: [],
2021-12-29 21:55:09 +00:00
cached_webfingers: {}, person_cache: {},
translate: {}, debug: bool,
2022-01-02 19:57:50 +00:00
last_bounce_message: [], system_language: str,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem: str,
2023-04-28 17:44:35 +00:00
dm_license_url: str,
languages_understood: [],
bounce_is_chat: bool,
2023-09-15 21:04:31 +00:00
curr_domain: str, onion_domain: str, i2p_domain: str,
sites_unavailable: []) -> bool:
2021-02-24 11:01:44 +00:00
"""Sends a bounce message back to the sending handle
if a DM has been rejected
"""
print(nickname + '@' + domain +
2022-01-02 19:57:50 +00:00
' cannot receive DM from ' + sending_handle +
2021-02-24 11:01:44 +00:00
' because they do not follow them')
# Don't send out bounce messages too frequently.
# Otherwise an adversary could try to DoS your instance
# by continuously sending DMs to you
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
2022-01-02 19:57:50 +00:00
if curr_time - last_bounce_message[0] < 60:
return False
# record the last time that a bounce was generated
2022-01-02 19:57:50 +00:00
last_bounce_message[0] = curr_time
2022-01-02 19:57:50 +00:00
sender_nickname = sending_handle.split('@')[0]
2021-12-26 00:07:44 +00:00
group_account = False
2022-01-02 19:57:50 +00:00
if sending_handle.startswith('!'):
sending_handle = sending_handle[1:]
2021-12-26 00:07:44 +00:00
group_account = True
2022-01-02 19:57:50 +00:00
sender_domain = sending_handle.split('@')[1]
sender_port = port
if ':' in sender_domain:
sender_port = get_port_from_domain(sender_domain)
sender_domain = remove_domain_port(sender_domain)
2021-02-24 11:01:44 +00:00
# create the bounce DM
subject = None
content = translate['DM bounce']
2022-01-02 19:57:50 +00:00
save_to_file = False
2021-12-25 20:39:35 +00:00
client_to_server = False
2022-01-02 19:57:50 +00:00
comments_enabled = False
attach_image_filename = None
media_type = None
image_description = ''
2023-02-18 22:10:15 +00:00
video_transcript = None
2021-05-09 19:29:53 +00:00
city = 'London, England'
2022-06-14 11:42:45 +00:00
in_reply_to = remove_id_ending(sender_post_id)
2022-01-02 19:57:50 +00:00
in_reply_to_atom_uri = None
schedule_post = False
event_date = None
event_time = None
2022-05-23 12:14:36 +00:00
event_end_time = None
2021-02-24 11:01:44 +00:00
location = None
2022-01-02 19:57:50 +00:00
conversation_id = None
2024-10-06 16:22:13 +00:00
convthread_id = None
2021-12-25 18:20:56 +00:00
low_bandwidth = False
2023-01-13 19:19:57 +00:00
buy_url = ''
2023-07-10 17:53:56 +00:00
chat_url = ''
auto_cw_cache = {}
2021-12-25 22:09:19 +00:00
post_json_object = \
2021-12-28 19:33:29 +00:00
create_direct_message_post(base_dir, nickname, domain, port,
2022-05-31 16:51:56 +00:00
http_prefix, content,
2022-01-02 19:57:50 +00:00
save_to_file, client_to_server,
comments_enabled,
attach_image_filename, media_type,
2023-02-18 22:10:15 +00:00
image_description, video_transcript, city,
2022-01-02 19:57:50 +00:00
in_reply_to, in_reply_to_atom_uri,
subject, debug, schedule_post,
2022-05-23 12:14:36 +00:00
event_date, event_time, event_end_time,
location, system_language, conversation_id,
2024-10-06 16:22:13 +00:00
convthread_id,
2023-04-28 17:44:35 +00:00
low_bandwidth, dm_license_url,
dm_license_url, '',
2022-07-18 16:18:04 +00:00
languages_understood, bounce_is_chat,
translate, buy_url, chat_url,
2024-10-15 22:46:47 +00:00
auto_cw_cache, session)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2022-01-02 19:57:50 +00:00
print('WARN: unable to create bounce message to ' + sending_handle)
return False
2023-03-17 21:13:43 +00:00
extra_headers = {}
2021-02-24 11:01:44 +00:00
# bounce DM goes back to the sender
2022-01-02 19:57:50 +00:00
print('Sending bounce DM to ' + sending_handle)
2021-12-29 21:55:09 +00:00
send_signed_json(post_json_object, session, base_dir,
nickname, domain, port,
2022-05-31 13:34:29 +00:00
sender_nickname, sender_domain, sender_port,
http_prefix, False, federation_list,
2022-01-02 19:57:50 +00:00
send_threads, post_log, cached_webfingers,
2021-12-29 21:55:09 +00:00
person_cache, debug, __version__, None, group_account,
signing_priv_key_pem, 7238634,
2023-03-17 21:13:43 +00:00
curr_domain, onion_domain, i2p_domain,
2023-10-25 19:55:40 +00:00
extra_headers, sites_unavailable, system_language)
return True
2021-02-24 11:01:44 +00:00
2021-12-29 21:55:09 +00:00
def _is_valid_dm(base_dir: str, nickname: str, domain: str, port: int,
2022-01-02 19:57:50 +00:00
post_json_object: {}, update_index_list: [],
2021-12-29 21:55:09 +00:00
session, http_prefix: str,
federation_list: [],
2022-01-02 19:57:50 +00:00
send_threads: [], post_log: [],
2021-12-29 21:55:09 +00:00
cached_webfingers: {},
person_cache: {},
translate: {}, debug: bool,
2022-01-02 19:57:50 +00:00
last_bounce_message: [],
2021-12-29 21:55:09 +00:00
handle: str, system_language: str,
signing_priv_key_pem: str,
2023-04-28 17:44:35 +00:00
dm_license_url: str,
languages_understood: [],
2023-09-15 21:04:31 +00:00
curr_domain: str, onion_domain: str, i2p_domain: str,
sites_unavailable: []) -> bool:
2021-06-07 16:34:08 +00:00
"""Is the given message a valid DM?
"""
if nickname == 'inbox':
# going to the shared inbox
return True
# check for the flag file which indicates to
# only receive DMs from people you are following
2022-01-02 19:57:50 +00:00
follow_dms_filename = acct_dir(base_dir, nickname, domain) + '/.followDMs'
if not os.path.isfile(follow_dms_filename):
2021-06-07 16:34:08 +00:00
# dm index will be updated
2022-01-02 19:57:50 +00:00
update_index_list.append('dm')
act_url = local_actor_url(http_prefix, nickname, domain)
_dm_notify(base_dir, handle, act_url + '/dm')
2021-06-07 16:34:08 +00:00
return True
2021-06-07 17:49:10 +00:00
2021-06-07 16:34:08 +00:00
# get the file containing following handles
2022-01-02 19:57:50 +00:00
following_filename = \
acct_dir(base_dir, nickname, domain) + '/following.txt'
2021-06-07 16:34:08 +00:00
# who is sending a DM?
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
2021-06-07 16:34:08 +00:00
return False
2024-01-09 16:59:23 +00:00
sending_actor = get_actor_from_post(post_json_object)
2022-01-02 19:57:50 +00:00
sending_actor_nickname = \
get_nickname_from_actor(sending_actor)
if not sending_actor_nickname:
2021-06-07 16:34:08 +00:00
return False
2022-01-02 19:57:50 +00:00
sending_actor_domain, _ = \
get_domain_from_actor(sending_actor)
if not sending_actor_domain:
2021-06-07 16:34:08 +00:00
return False
# Is this DM to yourself? eg. a reminder
2022-01-02 19:57:50 +00:00
sending_to_self = False
if sending_actor_nickname == nickname and \
sending_actor_domain == domain:
sending_to_self = True
2021-06-07 16:34:08 +00:00
# check that the following file exists
2022-01-02 19:57:50 +00:00
if not sending_to_self:
if not os.path.isfile(following_filename):
2021-06-07 16:34:08 +00:00
print('No following.txt file exists for ' +
nickname + '@' + domain +
' so not accepting DM from ' +
2022-01-02 19:57:50 +00:00
sending_actor_nickname + '@' +
sending_actor_domain)
2021-06-07 16:34:08 +00:00
return False
# Not sending to yourself
2022-01-02 19:57:50 +00:00
if not sending_to_self:
# is this a vote on a question?
2023-01-12 11:35:37 +00:00
if is_vote(base_dir, nickname, domain,
2023-01-12 11:58:41 +00:00
post_json_object, debug):
2023-01-12 11:35:37 +00:00
# make the content the same as the vote answer
post_json_object['object']['content'] = \
post_json_object['object']['name']
# remove any other content
if post_json_object['object'].get("contentMap"):
del post_json_object['object']['contentMap']
# remove any summary / cw
post_json_object['object']['summary'] = None
if post_json_object['object'].get("summaryMap"):
del post_json_object['object']['summaryMap']
return True
2021-06-07 16:34:08 +00:00
# get the handle of the DM sender
2022-01-02 19:57:50 +00:00
send_h = sending_actor_nickname + '@' + sending_actor_domain
2021-06-07 16:34:08 +00:00
# check the follow
2022-01-02 19:57:50 +00:00
if not is_following_actor(base_dir, nickname, domain, send_h):
2021-06-07 16:34:08 +00:00
# DMs may always be allowed from some domains
2021-12-26 19:09:04 +00:00
if not dm_allowed_from_domain(base_dir,
nickname, domain,
2022-01-02 19:57:50 +00:00
sending_actor_domain):
2021-06-07 16:34:08 +00:00
# send back a bounce DM
2021-12-25 22:09:19 +00:00
if post_json_object.get('id') and \
post_json_object.get('object'):
2023-01-12 11:35:37 +00:00
obj_has_dict = has_object_dict(post_json_object)
2021-06-07 16:34:08 +00:00
# don't send bounces back to
# replies to bounce messages
2021-12-25 22:09:19 +00:00
obj = post_json_object['object']
if obj_has_dict and \
not get_reply_to(obj):
bounced_id = \
remove_id_ending(post_json_object['id'])
bounce_chat = False
if obj.get('type'):
if obj['type'] == 'ChatMessage':
bounce_chat = True
_bounce_dm(bounced_id,
session, http_prefix,
base_dir,
nickname, domain,
port, send_h,
federation_list,
send_threads, post_log,
cached_webfingers,
person_cache,
translate, debug,
last_bounce_message,
system_language,
signing_priv_key_pem,
2023-04-28 17:44:35 +00:00
dm_license_url,
languages_understood,
bounce_chat,
curr_domain,
2023-09-15 21:04:31 +00:00
onion_domain, i2p_domain,
sites_unavailable)
2021-06-07 16:34:08 +00:00
return False
# dm index will be updated
2022-01-02 19:57:50 +00:00
update_index_list.append('dm')
act_url = local_actor_url(http_prefix, nickname, domain)
_dm_notify(base_dir, handle, act_url + '/dm')
2021-06-07 16:34:08 +00:00
return True
2021-12-29 21:55:09 +00:00
def _create_reply_notification_file(base_dir: str, nickname: str, domain: str,
2022-01-02 19:57:50 +00:00
handle: str, debug: bool, post_is_dm: bool,
2021-12-29 21:55:09 +00:00
post_json_object: {}, actor: str,
2022-01-02 19:57:50 +00:00
update_index_list: [], http_prefix: str,
2021-12-29 21:55:09 +00:00
default_reply_interval_hrs: int) -> bool:
2021-11-04 12:29:53 +00:00
"""Generates a file indicating that a new reply has arrived
The file can then be used by other systems to create a notification
xmpp, matrix, email, etc
"""
2022-01-02 19:57:50 +00:00
is_reply_to_muted_post = False
if post_is_dm:
return is_reply_to_muted_post
2021-12-26 19:36:40 +00:00
if not is_reply(post_json_object, actor):
2022-01-02 19:57:50 +00:00
return is_reply_to_muted_post
2021-11-04 12:29:53 +00:00
if nickname == 'inbox':
2022-01-02 19:57:50 +00:00
return is_reply_to_muted_post
2021-11-04 12:29:53 +00:00
# replies index will be updated
2022-01-02 19:57:50 +00:00
update_index_list.append('tlreplies')
2021-11-04 12:29:53 +00:00
# Due to lack of AP specification maintenance, a conversation can also be
# referred to as a thread or (confusingly) "context"
2022-01-02 19:57:50 +00:00
conversation_id = None
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('conversation'):
2022-01-02 19:57:50 +00:00
conversation_id = post_json_object['object']['conversation']
elif post_json_object['object'].get('context'):
conversation_id = post_json_object['object']['context']
2024-10-06 16:22:13 +00:00
elif post_json_object['object'].get('thread'):
conversation_id = post_json_object['object']['thread']
2021-11-04 12:29:53 +00:00
in_reply_to = get_reply_to(post_json_object['object'])
2022-01-02 19:57:50 +00:00
if not in_reply_to:
return is_reply_to_muted_post
if not isinstance(in_reply_to, str):
return is_reply_to_muted_post
if not is_muted_conv(base_dir, nickname, domain, in_reply_to,
conversation_id):
2021-11-04 12:29:53 +00:00
# check if the reply is within the allowed time period
# after publication
2022-01-02 19:57:50 +00:00
reply_interval_hours = \
2021-12-28 10:25:50 +00:00
get_reply_interval_hours(base_dir, nickname, domain,
default_reply_interval_hrs)
2022-01-02 19:57:50 +00:00
if can_reply_to(base_dir, nickname, domain, in_reply_to,
reply_interval_hours):
act_url = local_actor_url(http_prefix, nickname, domain)
_reply_notify(base_dir, handle, act_url + '/tlreplies')
2021-11-04 12:29:53 +00:00
else:
if debug:
2022-01-02 19:57:50 +00:00
print('Reply to ' + in_reply_to + ' is outside of the ' +
'permitted interval of ' + str(reply_interval_hours) +
2021-11-04 12:29:53 +00:00
' hours')
return False
else:
2022-01-02 19:57:50 +00:00
is_reply_to_muted_post = True
return is_reply_to_muted_post
2021-11-04 12:29:53 +00:00
2021-12-29 21:55:09 +00:00
def _low_frequency_post_notification(base_dir: str, http_prefix: str,
nickname: str, domain: str,
port: int, handle: str,
2022-01-02 19:57:50 +00:00
post_is_dm: bool, json_obj: {}) -> None:
"""Should we notify that a post from this person has arrived?
This is for cases where the notify checkbox is enabled on the
person options screen
"""
2022-01-02 19:57:50 +00:00
if post_is_dm:
return
2022-01-02 19:57:50 +00:00
if not json_obj:
return
2022-01-02 19:57:50 +00:00
if not json_obj.get('attributedTo'):
return
2022-01-02 19:57:50 +00:00
if not json_obj.get('id'):
return
attributed_to = get_attributed_to(json_obj['attributedTo'])
if not attributed_to:
return
2022-01-02 19:57:50 +00:00
from_nickname = get_nickname_from_actor(attributed_to)
if not from_nickname:
return
2022-01-02 19:57:50 +00:00
from_domain, from_port = get_domain_from_actor(attributed_to)
2023-01-15 14:33:18 +00:00
if not from_domain:
return
2022-01-02 19:57:50 +00:00
from_domain_full = get_full_domain(from_domain, from_port)
2021-12-29 21:55:09 +00:00
if notify_when_person_posts(base_dir, nickname, domain,
2022-01-02 19:57:50 +00:00
from_nickname, from_domain_full):
post_id = remove_id_ending(json_obj['id'])
dom_full = get_full_domain(domain, port)
post_link = \
local_actor_url(http_prefix, nickname, dom_full) + \
2021-12-26 19:47:06 +00:00
'?notifypost=' + post_id.replace('/', '-')
2022-01-02 19:57:50 +00:00
_notify_post_arrival(base_dir, handle, post_link)
2021-12-29 21:55:09 +00:00
def _check_for_git_patches(base_dir: str, nickname: str, domain: str,
2022-01-02 19:57:50 +00:00
handle: str, json_obj: {}) -> int:
2021-11-04 13:05:04 +00:00
"""check for incoming git patches
"""
2022-01-02 19:57:50 +00:00
if not json_obj:
2021-11-04 13:05:04 +00:00
return 0
2022-01-02 19:57:50 +00:00
if not json_obj.get('content'):
2021-11-04 13:05:04 +00:00
return 0
2022-01-02 19:57:50 +00:00
if not json_obj.get('summary'):
2021-11-04 13:05:04 +00:00
return 0
2022-01-02 19:57:50 +00:00
if not json_obj.get('attributedTo'):
2021-11-04 13:05:04 +00:00
return 0
attributed_to = get_attributed_to(json_obj['attributedTo'])
if not attributed_to:
2021-11-04 13:05:04 +00:00
return 0
2022-01-02 19:57:50 +00:00
from_nickname = get_nickname_from_actor(attributed_to)
if not from_nickname:
return 0
2022-01-02 19:57:50 +00:00
from_domain, from_port = get_domain_from_actor(attributed_to)
2023-01-15 14:33:18 +00:00
if not from_domain:
return 0
2022-01-02 19:57:50 +00:00
from_domain_full = get_full_domain(from_domain, from_port)
2021-12-29 21:55:09 +00:00
if receive_git_patch(base_dir, nickname, domain,
2022-01-02 19:57:50 +00:00
json_obj['type'], json_obj['summary'],
json_obj['content'],
from_nickname, from_domain_full):
2022-06-14 11:42:45 +00:00
_git_patch_notify(base_dir, handle, json_obj['summary'],
2022-01-02 19:57:50 +00:00
from_nickname, from_domain_full)
2021-11-04 13:05:04 +00:00
return 1
2022-01-02 19:57:50 +00:00
if '[PATCH]' in json_obj['content']:
print('WARN: git patch not accepted - ' + json_obj['summary'])
2021-11-04 13:05:04 +00:00
return 2
return 0
def _has_former_representations(post_json_object: {}) -> bool:
"""Does the given post contain a list of previous edits?
"""
post_obj = post_json_object['object']
if not isinstance(post_obj, dict):
return False
if not post_obj.get('id'):
return False
if not post_obj.get('formerRepresentations'):
return False
if not isinstance(post_obj['formerRepresentations'], dict):
return False
if not post_obj['formerRepresentations'].get('orderedItems'):
return False
if not isinstance(post_obj['formerRepresentations']['orderedItems'],
list):
return False
return True
def _former_representations_to_edits(base_dir: str,
nickname: str, domain: str,
post_json_object: {},
max_mentions: int,
max_emoji: int,
allow_local_network_access: bool,
debug: bool,
system_language: str,
http_prefix: str,
domain_full: str, person_cache: {},
max_hashtags: int,
port: int,
onion_domain: str,
i2p_domain: str) -> bool:
""" Some instances use formerRepresentations to store
previous edits
"""
if not _has_former_representations(post_json_object):
return False
post_obj = post_json_object['object']
prev_edits_list = post_obj['formerRepresentations']['orderedItems']
post_id = remove_id_ending(post_obj['id'])
post_filename = \
locate_post(base_dir, nickname, domain, post_id, False)
if not post_filename:
return False
post_history_filename = post_filename.replace('.json', '.edits')
post_history_json = {}
if os.path.isfile(post_history_filename):
post_history_json = load_json(post_history_filename)
# check each former post and add it to the edits file if needed
posts_added = False
for prev_post_json in prev_edits_list:
prev_post_obj = prev_post_json
if has_object_dict(prev_post_json):
prev_post_obj = prev_post_json['object']
# get the published date for the previous post
if not prev_post_obj.get('published'):
continue
published_str = prev_post_obj['published']
# was the previous post already logged?
if post_history_json.get(published_str):
continue
# add Create to the previous post if needed
if not has_object_dict(prev_post_json):
prev_post_id = None
if prev_post_json.get('id'):
prev_post_id = prev_post_json['id']
outbox_message_create_wrap(http_prefix,
nickname, domain, port,
prev_post_json)
if prev_post_id:
prev_post_json['id'] = prev_post_id
prev_post_json['object']['id'] = prev_post_id
prev_post_json['object']['url'] = prev_post_id
prev_post_json['object']['atomUri'] = prev_post_id
# validate the previous post
2024-06-21 20:59:12 +00:00
harmless_markup(prev_post_json)
2024-08-31 22:05:31 +00:00
if not valid_post_content(base_dir, nickname, domain,
prev_post_json,
max_mentions, max_emoji,
allow_local_network_access, debug,
system_language, http_prefix,
domain_full, person_cache,
max_hashtags, onion_domain, i2p_domain):
continue
post_history_json[published_str] = prev_post_json
posts_added = True
if posts_added:
save_json(post_history_json, post_history_filename)
print('formerRepresentations updated for ' + post_filename)
return True
2022-04-23 18:26:54 +00:00
def _inbox_after_initial(server, inbox_start_time,
2022-03-14 13:45:42 +00:00
recent_posts_cache: {}, max_recent_posts: int,
session, session_onion, session_i2p,
key_id: str, handle: str, message_json: {},
2021-12-29 21:55:09 +00:00
base_dir: str, http_prefix: str, send_threads: [],
2022-01-02 19:57:50 +00:00
post_log: [], cached_webfingers: {}, person_cache: {},
2022-06-14 11:42:45 +00:00
domain: str, onion_domain: str, i2p_domain: str,
port: int, federation_list: [], debug: bool,
2022-01-02 19:57:50 +00:00
queue_filename: str, destination_filename: str,
2021-12-29 21:55:09 +00:00
max_replies: int, allow_deletion: bool,
max_mentions: int, max_emoji: int, translate: {},
unit_test: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
show_published_date_only: bool,
allow_local_network_access: bool,
peertube_instances: [],
2022-01-02 19:57:50 +00:00
last_bounce_message: [],
2021-12-29 21:55:09 +00:00
theme_name: str, system_language: str,
max_like_count: int,
signing_priv_key_pem: str,
default_reply_interval_hrs: int,
cw_lists: {}, lists_enabled: str,
2023-04-28 17:44:35 +00:00
dm_license_url: str,
languages_understood: [],
2022-07-05 14:40:26 +00:00
mitm: bool, bold_reading: bool,
2022-11-19 18:03:55 +00:00
dogwhistles: {},
2023-09-15 21:04:31 +00:00
max_hashtags: int, buy_sites: {},
sites_unavailable: []) -> bool:
2020-09-27 18:35:35 +00:00
""" Anything which needs to be done after initial checks have passed
"""
# if this is a clearnet instance then replace any onion/i2p
# domains with the account domain
if onion_domain or i2p_domain:
message_str = json.dumps(message_json, ensure_ascii=False)
if onion_domain:
if onion_domain in message_str:
message_str = message_str.replace(onion_domain, domain)
try:
message_json = json.loads(message_str)
except json.decoder.JSONDecodeError as ex:
print('EX: json decode error ' + str(ex) +
' from _inbox_after_initial onion ' +
str(message_str))
inbox_start_time = time.time()
return False
if i2p_domain:
if i2p_domain in message_str:
message_str = message_str.replace(i2p_domain, domain)
try:
message_json = json.loads(message_str)
except json.decoder.JSONDecodeError as ex:
print('EX: json decode error ' + str(ex) +
' from _inbox_after_initial i2p ' +
str(message_str))
inbox_start_time = time.time()
return False
2022-01-02 19:57:50 +00:00
actor = key_id
2019-09-29 10:41:21 +00:00
if '#' in actor:
2022-01-02 19:57:50 +00:00
actor = key_id.split('#')[0]
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
_update_last_seen(base_dir, handle, actor)
2022-01-02 19:57:50 +00:00
post_is_dm = False
is_group = _group_handle(base_dir, handle)
2022-04-23 20:06:38 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_group_handle',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-12-29 21:55:09 +00:00
2022-03-24 13:14:41 +00:00
handle_name = handle.split('@')[0]
2024-08-31 22:05:31 +00:00
if receive_like(recent_posts_cache,
session, handle,
base_dir, http_prefix,
domain, port,
onion_domain, i2p_domain,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
server.min_images_for_accounts,
buy_sites, server.auto_cw_cache):
2019-07-10 12:40:31 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Like accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_like',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-07-10 12:40:31 +00:00
return False
2024-08-31 22:05:31 +00:00
if receive_undo_like(recent_posts_cache,
2022-06-14 11:42:45 +00:00
session, handle,
2021-12-29 21:55:09 +00:00
base_dir, http_prefix,
domain, port,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
2022-03-24 13:14:41 +00:00
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
2023-01-13 15:04:48 +00:00
server.min_images_for_accounts,
buy_sites, server.auto_cw_cache):
2024-08-31 22:05:31 +00:00
if debug:
print('DEBUG: Undo like accepted from ' + actor)
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_undo_like',
debug)
inbox_start_time = time.time()
return False
if receive_reaction(recent_posts_cache,
session, handle,
base_dir, http_prefix,
domain, port,
onion_domain,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
server.min_images_for_accounts,
buy_sites, server.auto_cw_cache):
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: Reaction accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_reaction',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-11-10 12:16:03 +00:00
return False
2024-08-31 22:05:31 +00:00
if receive_zot_reaction(recent_posts_cache,
session, handle,
base_dir, http_prefix,
domain, port,
onion_domain,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
server.min_images_for_accounts,
buy_sites, server.auto_cw_cache):
if debug:
print('DEBUG: Zot reaction accepted from ' + actor)
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_zot_reaction',
debug)
inbox_start_time = time.time()
return False
if receive_undo_reaction(recent_posts_cache,
2022-07-29 15:10:34 +00:00
session, handle,
base_dir, http_prefix,
domain, port,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
2023-01-13 15:04:48 +00:00
server.min_images_for_accounts,
buy_sites, server.auto_cw_cache):
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: Undo reaction accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_undo_reaction',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-11-10 12:16:03 +00:00
return False
2024-08-31 22:05:31 +00:00
if receive_bookmark(recent_posts_cache,
session, handle,
base_dir, http_prefix,
domain, port,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
server.min_images_for_accounts,
server.buy_sites,
server.auto_cw_cache):
2019-11-17 14:01:49 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Bookmark accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_bookmark',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-11-17 14:01:49 +00:00
return False
2024-08-31 22:05:31 +00:00
if receive_undo_bookmark(recent_posts_cache,
session, handle,
base_dir, http_prefix,
domain, port,
cached_webfingers,
person_cache,
message_json,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles,
server.min_images_for_accounts,
server.buy_sites,
server.auto_cw_cache):
2019-11-17 14:01:49 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Undo bookmark accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_undo_bookmark',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-11-17 14:01:49 +00:00
return False
2020-03-22 21:16:02 +00:00
2021-12-29 21:55:09 +00:00
if is_create_inside_announce(message_json):
2021-12-25 23:51:19 +00:00
message_json = message_json['object']
2022-04-23 20:06:38 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'is_create_inside_announce',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-09-11 14:30:37 +00:00
# store any bookwyrm type notes
store_book_events(base_dir,
message_json,
system_language,
languages_understood,
translate, debug,
2023-12-31 11:19:46 +00:00
server.max_recent_books,
server.books_cache,
server.max_cached_readers)
2024-08-31 22:05:31 +00:00
if receive_announce(recent_posts_cache,
session, handle,
base_dir, http_prefix,
domain, onion_domain, i2p_domain, port,
cached_webfingers,
person_cache,
message_json,
debug, translate,
yt_replace_domain,
twitter_replacement_domain,
allow_local_network_access,
theme_name, system_language,
signing_priv_key_pem,
max_recent_posts,
allow_deletion,
peertube_instances,
max_like_count, cw_lists, lists_enabled,
bold_reading, dogwhistles, mitm,
server.min_images_for_accounts,
server.buy_sites,
languages_understood,
server.auto_cw_cache,
server.block_federated):
2019-07-11 19:31:02 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Announce accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_announce',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-03 16:27:34 +00:00
2024-08-31 22:05:31 +00:00
if receive_undo_announce(recent_posts_cache,
handle, base_dir, domain,
message_json, debug):
2019-07-12 09:41:57 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Undo announce accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_undo_announce',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-07-12 11:35:03 +00:00
return False
2019-07-12 09:41:57 +00:00
2024-08-31 22:05:31 +00:00
if receive_delete(handle,
base_dir, http_prefix,
domain, port,
message_json,
debug, allow_deletion,
recent_posts_cache):
2019-08-12 18:02:29 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Delete accepted from ' + actor)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_delete',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-08-12 18:02:29 +00:00
return False
2019-07-10 13:32:47 +00:00
if debug:
2020-09-27 18:35:35 +00:00
print('DEBUG: initial checks passed')
2022-01-02 19:57:50 +00:00
print('copy queue file from ' + queue_filename +
' to ' + destination_filename)
2019-08-16 22:04:45 +00:00
2022-01-02 19:57:50 +00:00
if os.path.isfile(destination_filename):
return True
2019-10-04 09:58:02 +00:00
2021-12-25 23:51:19 +00:00
if message_json.get('postNickname'):
post_json_object = message_json['post']
else:
2021-12-25 23:51:19 +00:00
post_json_object = message_json
2020-04-03 16:27:34 +00:00
nickname = handle.split('@')[0]
2023-01-12 11:58:41 +00:00
if is_vote(base_dir, nickname, domain, post_json_object, debug):
2024-08-31 22:05:31 +00:00
receive_question_vote(server, base_dir, nickname, domain,
http_prefix, handle, debug,
post_json_object, recent_posts_cache,
session, session_onion, session_i2p,
onion_domain, i2p_domain, port,
federation_list, send_threads, post_log,
cached_webfingers, person_cache,
signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count,
cw_lists, lists_enabled,
bold_reading, dogwhistles,
server.min_images_for_accounts,
server.buy_sites,
server.sites_unavailable,
server.auto_cw_cache)
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_question_vote',
debug)
inbox_start_time = time.time()
2022-01-02 19:57:50 +00:00
json_obj = None
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
convert_post_content_to_html(post_json_object)
2024-06-21 20:59:12 +00:00
# neutralise anything harmful
harmless_markup(post_json_object)
2024-08-31 22:05:31 +00:00
if valid_post_content(base_dir, nickname, domain,
post_json_object, max_mentions, max_emoji,
allow_local_network_access, debug,
system_language, http_prefix,
domain_full, person_cache,
max_hashtags, onion_domain, i2p_domain):
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
2024-08-31 22:05:31 +00:00
'INBOX', 'valid_post_content',
2022-04-23 18:26:54 +00:00
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
# is the sending actor valid?
2021-12-29 21:55:09 +00:00
if not valid_sending_actor(session, base_dir, nickname, domain,
person_cache, post_json_object,
2022-09-25 17:26:11 +00:00
signing_priv_key_pem, debug, unit_test,
system_language):
2022-03-30 17:24:22 +00:00
if debug:
print('Inbox sending actor is not valid ' +
str(post_json_object))
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'not_valid_sending_actor',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
return False
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'valid_sending_actor',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-05-02 13:17:02 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object.get('object'):
2022-01-02 19:57:50 +00:00
json_obj = post_json_object['object']
if not isinstance(json_obj, dict):
json_obj = None
2020-08-23 14:45:58 +00:00
else:
2022-01-02 19:57:50 +00:00
json_obj = post_json_object
2021-11-04 13:05:04 +00:00
2021-12-29 21:55:09 +00:00
if _check_for_git_patches(base_dir, nickname, domain,
2022-01-02 19:57:50 +00:00
handle, json_obj) == 2:
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_check_for_git_patches',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-11-04 13:05:04 +00:00
return False
2020-05-02 11:08:38 +00:00
# replace YouTube links, so they get less tracking data
2021-12-28 21:36:27 +00:00
replace_you_tube(post_json_object, yt_replace_domain, system_language)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'replace_you_tube',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-09-18 17:08:14 +00:00
# replace twitter link domains, so that you can view twitter posts
# without having an account
2021-12-28 21:36:27 +00:00
replace_twitter(post_json_object, twitter_replacement_domain,
system_language)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'replace_you_twitter',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-10-22 20:07:12 +00:00
# list of indexes to be updated
2022-01-02 19:57:50 +00:00
update_index_list = ['inbox']
2021-12-28 20:32:11 +00:00
populate_replies(base_dir, http_prefix, domain, post_json_object,
max_replies, debug)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'populate_replies',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-11-29 19:22:11 +00:00
2022-01-02 19:57:50 +00:00
is_reply_to_muted_post = False
2020-08-27 17:40:09 +00:00
2022-01-02 19:57:50 +00:00
if not is_group:
2019-10-04 12:22:56 +00:00
# create a DM notification file if needed
2022-01-02 19:57:50 +00:00
post_is_dm = is_dm(post_json_object)
if post_is_dm:
2021-12-29 21:55:09 +00:00
if not _is_valid_dm(base_dir, nickname, domain, port,
2022-01-02 19:57:50 +00:00
post_json_object, update_index_list,
2021-12-29 21:55:09 +00:00
session, http_prefix,
federation_list,
2022-01-02 19:57:50 +00:00
send_threads, post_log,
2021-12-29 21:55:09 +00:00
cached_webfingers,
person_cache,
translate, debug,
2022-01-02 19:57:50 +00:00
last_bounce_message,
2021-12-29 21:55:09 +00:00
handle, system_language,
signing_priv_key_pem,
2023-04-28 17:44:35 +00:00
dm_license_url,
languages_understood,
domain,
2023-09-15 21:04:31 +00:00
onion_domain, i2p_domain,
server.sites_unavailable):
2022-03-30 17:24:22 +00:00
if debug:
print('Invalid DM ' + str(post_json_object))
2021-06-07 16:34:08 +00:00
return False
2022-04-23 19:05:45 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_is_valid_dm',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-10-04 12:22:56 +00:00
# get the actor being replied to
2021-12-26 10:19:59 +00:00
actor = local_actor_url(http_prefix, nickname, domain_full)
2022-04-23 19:05:45 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'local_actor_url',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-10-04 12:22:56 +00:00
# create a reply notification file if needed
2022-01-02 19:57:50 +00:00
is_reply_to_muted_post = \
2021-12-29 21:55:09 +00:00
_create_reply_notification_file(base_dir, nickname, domain,
2022-01-02 19:57:50 +00:00
handle, debug, post_is_dm,
2021-12-29 21:55:09 +00:00
post_json_object, actor,
2022-01-02 19:57:50 +00:00
update_index_list, http_prefix,
2021-12-29 21:55:09 +00:00
default_reply_interval_hrs)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_create_reply_notification_file',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-12-29 21:55:09 +00:00
show_vote_posts = True
show_vote_file = acct_dir(base_dir, nickname, domain) + '/.noVotes'
if os.path.isfile(show_vote_file):
show_vote_posts = False
2021-12-29 21:55:09 +00:00
if is_image_media(session, base_dir, http_prefix,
nickname, domain, post_json_object,
yt_replace_domain,
twitter_replacement_domain,
allow_local_network_access,
recent_posts_cache, debug, system_language,
2022-03-24 14:40:28 +00:00
domain_full, person_cache, signing_priv_key_pem,
bold_reading, show_vote_posts,
languages_understood):
2019-10-22 20:30:43 +00:00
# media index will be updated
2022-01-02 19:57:50 +00:00
update_index_list.append('tlmedia')
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'is_image_media',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-12-28 13:49:44 +00:00
if is_blog_post(post_json_object):
2020-02-24 14:39:25 +00:00
# blogs index will be updated
2022-01-02 19:57:50 +00:00
update_index_list.append('tlblogs')
2019-10-22 20:30:43 +00:00
2019-10-04 10:00:57 +00:00
# get the avatar for a reply/announce
2021-12-29 21:55:09 +00:00
_obtain_avatar_for_reply_post(session, base_dir,
http_prefix, domain,
onion_domain, i2p_domain,
2021-12-29 21:55:09 +00:00
person_cache, post_json_object, debug,
signing_priv_key_pem)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_obtain_avatar_for_reply_post',
debug)
# cache any svg image attachments locally
# This is so that any scripts can be removed
2022-05-26 15:14:48 +00:00
cache_svg_images(session, base_dir, http_prefix,
2024-02-06 13:05:13 +00:00
domain, domain_full,
2022-05-26 15:14:48 +00:00
onion_domain, i2p_domain,
post_json_object,
federation_list, debug, None)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-10-04 10:00:57 +00:00
# save the post to file
2022-01-02 19:57:50 +00:00
if save_json(post_json_object, destination_filename):
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'save_json',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
if mitm:
# write a file to indicate that this post was delivered
# via a third party
destination_filename_mitm = \
destination_filename.replace('.json', '') + '.mitm'
try:
2022-06-09 14:46:30 +00:00
with open(destination_filename_mitm, 'w+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_mitm:
fp_mitm.write('\n')
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _inbox_after_initial unable to write ' +
destination_filename_mitm)
2021-12-29 21:55:09 +00:00
_low_frequency_post_notification(base_dir, http_prefix,
nickname, domain, port,
2022-01-02 19:57:50 +00:00
handle, post_is_dm, json_obj)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_low_frequency_post_notification',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-08-27 17:40:09 +00:00
# If this is a reply to a muted post then also mute it.
# This enables you to ignore a threat that's getting boring
2022-01-02 19:57:50 +00:00
if is_reply_to_muted_post:
print('MUTE REPLY: ' + destination_filename)
destination_filename_muted = destination_filename + '.muted'
2021-11-25 21:18:53 +00:00
try:
2022-06-10 14:32:48 +00:00
with open(destination_filename_muted, 'w+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_mute:
fp_mute.write('\n')
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _inbox_after_initial unable to write 2 ' +
destination_filename_muted)
2020-08-27 17:40:09 +00:00
# is this an edit of a previous post?
# in Mastodon "delete and redraft"
# NOTE: this must be done before update_conversation is called
edited_filename, edited_json = \
edited_post_filename(base_dir, handle_name, domain,
post_json_object, debug, 300,
system_language)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'edited_post_filename',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
# handle any previous edits
if _former_representations_to_edits(base_dir,
nickname, domain,
post_json_object,
max_mentions,
max_emoji,
allow_local_network_access,
debug,
system_language,
http_prefix,
domain_full,
person_cache,
max_hashtags, port,
onion_domain,
i2p_domain):
# ensure that there is an updated entry
# for the publication date
if post_json_object['object'].get('published') and \
not post_json_object['object'].get('updated'):
post_json_object['object']['updated'] = \
post_json_object['object']['published']
save_json(post_json_object, destination_filename)
# If this was an edit then update the edits json file and
# delete the previous version of the post
if edited_filename and edited_json:
prev_edits_filename = \
edited_filename.replace('.json', '.edits')
edits_filename = \
destination_filename.replace('.json', '.edits')
modified = edited_json['object']['published']
if os.path.isfile(edits_filename):
edits_json = load_json(edits_filename)
if edits_json:
if not edits_json.get(modified):
edits_json[modified] = edited_json
save_json(edits_json, edits_filename)
else:
if os.path.isfile(prev_edits_filename):
if prev_edits_filename != edits_filename:
try:
copyfile(prev_edits_filename, edits_filename)
except OSError:
print('EX: failed to copy edits file')
edits_json = load_json(edits_filename)
if edits_json:
if not edits_json.get(modified):
edits_json[modified] = edited_json
save_json(edits_json, edits_filename)
else:
edits_json = {
modified: edited_json
}
save_json(edits_json, edits_filename)
if edited_filename != destination_filename:
delete_post(base_dir, http_prefix,
nickname, domain, edited_filename,
debug, recent_posts_cache, True)
2019-10-22 20:07:12 +00:00
# update the indexes for different timelines
2022-01-02 19:57:50 +00:00
for boxname in update_index_list:
2022-04-23 19:09:02 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', 'box_' + boxname,
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-12-29 21:55:09 +00:00
if not inbox_update_index(boxname, base_dir, handle,
2022-01-02 19:57:50 +00:00
destination_filename, debug):
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', 'inbox_update_index',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-03 16:27:34 +00:00
print('ERROR: unable to update ' + boxname + ' index')
else:
if boxname == 'inbox':
2021-12-26 20:43:03 +00:00
if is_recent_post(post_json_object, 3):
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-29 21:55:09 +00:00
update_speaker(base_dir, http_prefix,
nickname, domain, domain_full,
post_json_object, person_cache,
2022-05-12 10:13:55 +00:00
translate, None, theme_name,
2022-05-12 11:56:45 +00:00
system_language, boxname)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', 'update_speaker',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-12-25 21:32:15 +00:00
if not unit_test:
2020-08-26 11:19:32 +00:00
if debug:
print('Saving inbox post as html to cache')
2022-01-02 19:57:50 +00:00
html_cache_start_time = time.time()
2021-12-29 21:55:09 +00:00
allow_local_net_access = allow_local_network_access
show_pub_date_only = show_published_date_only
2022-03-24 13:14:41 +00:00
timezone = \
get_account_timezone(base_dir, handle_name, domain)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', 'get_account_timezone',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
min_img_for_accounts = \
server.min_images_for_accounts
2021-12-29 21:55:09 +00:00
_inbox_store_post_to_html_cache(recent_posts_cache,
max_recent_posts,
translate, base_dir,
http_prefix,
session,
cached_webfingers,
person_cache,
2022-01-02 19:57:50 +00:00
handle_name,
2021-12-29 21:55:09 +00:00
domain, port,
post_json_object,
allow_deletion,
boxname,
show_pub_date_only,
peertube_instances,
allow_local_net_access,
theme_name,
system_language,
max_like_count,
signing_priv_key_pem,
cw_lists,
2022-02-25 19:12:40 +00:00
lists_enabled,
2022-03-24 13:14:41 +00:00
timezone, mitm,
2022-07-05 14:40:26 +00:00
bold_reading,
dogwhistles,
2023-01-13 15:04:48 +00:00
min_img_for_accounts,
buy_sites,
server.auto_cw_cache)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX',
'_inbox_store_post_to_html_cache',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-08-26 11:19:32 +00:00
if debug:
2021-12-31 21:18:12 +00:00
time_diff = \
2022-01-02 19:57:50 +00:00
str(int((time.time() - html_cache_start_time) *
2020-08-26 11:19:32 +00:00
1000))
2022-01-02 19:57:50 +00:00
print('Saved ' +
boxname + ' post as html to cache in ' +
2021-12-31 21:18:12 +00:00
time_diff + ' mS')
2019-10-20 10:25:38 +00:00
2022-01-02 19:57:50 +00:00
update_conversation(base_dir, handle_name, domain,
post_json_object)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', 'update_conversation',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-08-12 10:22:04 +00:00
# store the id of the last post made by this actor
2021-12-29 21:55:09 +00:00
_store_last_post_id(base_dir, nickname, domain, post_json_object)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', '_store_last_post_id',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2024-01-09 12:08:23 +00:00
_inbox_update_calendar_from_tag(base_dir, handle, post_json_object)
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', '_inbox_update_calendar_from_tag',
debug)
inbox_start_time = time.time()
_inbox_update_calendar_from_event(base_dir, handle,
post_json_object)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
2024-01-09 12:08:23 +00:00
'INBOX', '_inbox_update_calendar_from_event',
2022-04-23 18:26:54 +00:00
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-10-19 18:08:47 +00:00
2022-01-02 19:57:50 +00:00
store_hash_tags(base_dir, handle_name, domain,
2021-12-29 21:55:09 +00:00
http_prefix, domain_full,
2024-10-15 22:46:47 +00:00
post_json_object, translate, session)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', 'store_hash_tags',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-12-12 17:34:31 +00:00
2019-10-19 13:00:46 +00:00
# send the post out to group members
2022-01-02 19:57:50 +00:00
if is_group:
2022-03-14 13:45:42 +00:00
_send_to_group_members(server,
session, session_onion, session_i2p,
base_dir, handle, port,
2021-12-29 21:55:09 +00:00
post_json_object,
http_prefix, federation_list,
send_threads,
2022-01-02 19:57:50 +00:00
post_log, cached_webfingers,
2022-06-14 11:42:45 +00:00
person_cache, debug,
domain, onion_domain, i2p_domain,
2023-09-15 21:04:31 +00:00
signing_priv_key_pem,
2023-10-25 19:55:40 +00:00
sites_unavailable,
system_language)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
'INBOX', '_send_to_group_members',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2022-03-30 17:24:22 +00:00
else:
if debug:
print("Inbox post is not valid " + str(post_json_object))
2022-04-23 20:06:38 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'invalid_post',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-10-04 12:22:56 +00:00
2019-10-04 10:00:57 +00:00
# if the post wasn't saved
2022-01-02 19:57:50 +00:00
if not os.path.isfile(destination_filename):
2022-03-30 17:24:22 +00:00
if debug:
print("Inbox post was not saved " + destination_filename)
2019-08-17 12:26:09 +00:00
return False
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time,
server.fitness,
2022-04-23 19:55:30 +00:00
'INBOX', 'end_after_initial',
2022-04-23 18:26:54 +00:00
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-08-17 12:26:09 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-28 19:33:29 +00:00
def clear_queue_items(base_dir: str, queue: []) -> None:
2020-05-22 12:57:15 +00:00
"""Clears the queue for each account
2020-05-22 11:48:13 +00:00
"""
ctr = 0
queue.clear()
2024-05-12 12:35:26 +00:00
dir_str = data_dir(base_dir)
for _, dirs, _ in os.walk(dir_str):
2020-05-22 11:48:13 +00:00
for account in dirs:
2024-05-12 12:35:26 +00:00
queue_dir = dir_str + '/' + account + '/queue'
2022-01-02 19:57:50 +00:00
if not os.path.isdir(queue_dir):
2020-06-02 09:05:55 +00:00
continue
2022-01-02 19:57:50 +00:00
for _, _, queuefiles in os.walk(queue_dir):
2020-06-02 09:05:55 +00:00
for qfile in queuefiles:
try:
2022-01-02 19:57:50 +00:00
os.remove(os.path.join(queue_dir, qfile))
2020-06-02 09:05:55 +00:00
ctr += 1
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 19:33:29 +00:00
print('EX: clear_queue_items unable to delete ' +
qfile)
2021-06-05 21:09:11 +00:00
break
2020-12-13 22:13:45 +00:00
break
2020-05-22 11:48:13 +00:00
if ctr > 0:
print('Removed ' + str(ctr) + ' inbox queue items')
2021-12-29 21:55:09 +00:00
def _restore_queue_items(base_dir: str, queue: []) -> None:
2019-07-12 21:09:23 +00:00
"""Checks the queue for each account and appends filenames
"""
2019-08-15 16:45:07 +00:00
queue.clear()
2024-05-12 12:35:26 +00:00
dir_str = data_dir(base_dir)
for _, dirs, _ in os.walk(dir_str):
2019-07-12 21:09:23 +00:00
for account in dirs:
2024-05-12 12:35:26 +00:00
queue_dir = dir_str + '/' + account + '/queue'
2022-01-02 19:57:50 +00:00
if not os.path.isdir(queue_dir):
2020-06-02 09:05:55 +00:00
continue
2022-01-02 19:57:50 +00:00
for _, _, queuefiles in os.walk(queue_dir):
2020-06-02 09:05:55 +00:00
for qfile in queuefiles:
2022-01-02 19:57:50 +00:00
queue.append(os.path.join(queue_dir, qfile))
2021-06-05 21:09:11 +00:00
break
2020-12-13 22:13:45 +00:00
break
2020-04-03 16:27:34 +00:00
if len(queue) > 0:
print('Restored ' + str(len(queue)) + ' inbox queue items')
2019-09-02 21:52:43 +00:00
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def run_inbox_queue_watchdog(project_version: str, httpd) -> None:
2019-09-02 21:52:43 +00:00
"""This tries to keep the inbox thread running even if it dies
"""
2022-06-14 11:42:45 +00:00
print('THREAD: Starting inbox queue watchdog ' + project_version)
2022-01-02 19:57:50 +00:00
inbox_queue_original = httpd.thrInboxQueue.clone(run_inbox_queue)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrInboxQueue, 'run_inbox_queue_watchdog')
2019-09-02 21:52:43 +00:00
while True:
2020-03-22 21:16:02 +00:00
time.sleep(20)
2022-01-01 15:11:42 +00:00
if not httpd.thrInboxQueue.is_alive() or httpd.restart_inbox_queue:
httpd.restart_inbox_queue_in_progress = True
2019-09-02 21:52:43 +00:00
httpd.thrInboxQueue.kill()
2022-03-13 11:01:07 +00:00
print('THREAD: restarting inbox queue watchdog')
2022-01-02 19:57:50 +00:00
httpd.thrInboxQueue = inbox_queue_original.clone(run_inbox_queue)
2021-12-25 23:23:29 +00:00
httpd.inbox_queue.clear()
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrInboxQueue, 'run_inbox_queue_watchdog 2')
2019-09-02 21:52:43 +00:00
print('Restarting inbox queue...')
2022-01-01 15:11:42 +00:00
httpd.restart_inbox_queue_in_progress = False
httpd.restart_inbox_queue = False
2019-09-02 21:52:43 +00:00
2020-04-03 16:27:34 +00:00
2022-01-02 19:57:50 +00:00
def _inbox_quota_exceeded(queue: {}, queue_filename: str,
queue_json: {}, quotas_daily: {}, quotas_per_min: {},
2021-12-29 21:55:09 +00:00
domain_max_posts_per_day: int,
account_max_posts_per_day: int,
debug: bool) -> bool:
"""limit the number of posts which can arrive per domain per day
"""
2022-01-02 19:57:50 +00:00
post_domain = queue_json['postDomain']
if not post_domain:
return False
2021-12-25 21:13:55 +00:00
if domain_max_posts_per_day > 0:
2022-01-02 19:57:50 +00:00
if quotas_daily['domains'].get(post_domain):
if quotas_daily['domains'][post_domain] > \
2021-12-25 21:13:55 +00:00
domain_max_posts_per_day:
print('Queue: Quota per day - Maximum posts for ' +
2022-01-02 19:57:50 +00:00
post_domain + ' reached (' +
2021-12-25 21:13:55 +00:00
str(domain_max_posts_per_day) + ')')
if len(queue) > 0:
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2022-11-13 20:16:54 +00:00
print('EX: _inbox_quota_exceeded unable to delete 1 ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
queue.pop(0)
return True
2022-01-02 19:57:50 +00:00
quotas_daily['domains'][post_domain] += 1
else:
2022-01-02 19:57:50 +00:00
quotas_daily['domains'][post_domain] = 1
2022-01-02 19:57:50 +00:00
if quotas_per_min['domains'].get(post_domain):
domain_max_posts_per_min = \
2021-12-25 21:13:55 +00:00
int(domain_max_posts_per_day / (24 * 60))
2022-01-02 19:57:50 +00:00
if domain_max_posts_per_min < 5:
domain_max_posts_per_min = 5
if quotas_per_min['domains'][post_domain] > \
domain_max_posts_per_min:
print('Queue: Quota per min - Maximum posts for ' +
2022-01-02 19:57:50 +00:00
post_domain + ' reached (' +
str(domain_max_posts_per_min) + ')')
if len(queue) > 0:
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2022-11-13 20:16:54 +00:00
print('EX: _inbox_quota_exceeded unable to delete 2 ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
queue.pop(0)
return True
2022-01-02 19:57:50 +00:00
quotas_per_min['domains'][post_domain] += 1
else:
2022-01-02 19:57:50 +00:00
quotas_per_min['domains'][post_domain] = 1
2021-12-25 21:18:07 +00:00
if account_max_posts_per_day > 0:
2022-01-02 19:57:50 +00:00
post_handle = queue_json['postNickname'] + '@' + post_domain
if quotas_daily['accounts'].get(post_handle):
if quotas_daily['accounts'][post_handle] > \
2021-12-25 21:18:07 +00:00
account_max_posts_per_day:
print('Queue: Quota account posts per day -' +
' Maximum posts for ' +
2022-01-02 19:57:50 +00:00
post_handle + ' reached (' +
2021-12-25 21:18:07 +00:00
str(account_max_posts_per_day) + ')')
if len(queue) > 0:
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2022-11-13 20:16:54 +00:00
print('EX: _inbox_quota_exceeded unable to delete 3 ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
queue.pop(0)
return True
2022-01-02 19:57:50 +00:00
quotas_daily['accounts'][post_handle] += 1
else:
2022-01-02 19:57:50 +00:00
quotas_daily['accounts'][post_handle] = 1
2022-01-02 19:57:50 +00:00
if quotas_per_min['accounts'].get(post_handle):
account_max_posts_per_min = \
2021-12-25 21:18:07 +00:00
int(account_max_posts_per_day / (24 * 60))
2022-05-30 12:09:18 +00:00
account_max_posts_per_min = max(account_max_posts_per_min, 5)
2022-01-02 19:57:50 +00:00
if quotas_per_min['accounts'][post_handle] > \
account_max_posts_per_min:
print('Queue: Quota account posts per min -' +
' Maximum posts for ' +
2022-01-02 19:57:50 +00:00
post_handle + ' reached (' +
str(account_max_posts_per_min) + ')')
if len(queue) > 0:
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2022-11-13 20:16:54 +00:00
print('EX: _inbox_quota_exceeded unable to delete 4 ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
queue.pop(0)
return True
2022-01-02 19:57:50 +00:00
quotas_per_min['accounts'][post_handle] += 1
else:
2022-01-02 19:57:50 +00:00
quotas_per_min['accounts'][post_handle] = 1
if debug:
2021-12-25 21:18:07 +00:00
if account_max_posts_per_day > 0 or domain_max_posts_per_day > 0:
2022-01-02 19:57:50 +00:00
pprint(quotas_daily)
return False
2022-01-02 19:57:50 +00:00
def _check_json_signature(base_dir: str, queue_json: {}) -> (bool, bool):
"""check if a json signature exists on this post
"""
2022-01-02 19:57:50 +00:00
has_json_signature = False
jwebsig_type = None
original_json = queue_json['original']
if not original_json.get('@context') or \
not original_json.get('signature'):
return has_json_signature, jwebsig_type
if not isinstance(original_json['signature'], dict):
return has_json_signature, jwebsig_type
# see https://tools.ietf.org/html/rfc7515
2022-01-02 19:57:50 +00:00
jwebsig = original_json['signature']
# signature exists and is of the expected type
if not jwebsig.get('type') or \
not jwebsig.get('signatureValue'):
2022-01-02 19:57:50 +00:00
return has_json_signature, jwebsig_type
jwebsig_type = jwebsig['type']
if jwebsig_type == 'RsaSignature2017':
if has_valid_context(original_json):
has_json_signature = True
else:
2022-01-02 19:57:50 +00:00
unknown_contexts_file = \
2024-05-12 12:35:26 +00:00
data_dir(base_dir) + '/unknownContexts.txt'
2022-01-02 19:57:50 +00:00
unknown_context = str(original_json['@context'])
2022-01-02 19:57:50 +00:00
print('unrecognized @context: ' + unknown_context)
2022-01-02 19:57:50 +00:00
already_unknown = False
if os.path.isfile(unknown_contexts_file):
if text_in_file(unknown_context, unknown_contexts_file):
2022-01-02 19:57:50 +00:00
already_unknown = True
2022-01-02 19:57:50 +00:00
if not already_unknown:
2021-11-25 21:18:53 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(unknown_contexts_file, 'a+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_unknown:
fp_unknown.write(unknown_context + '\n')
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _check_json_signature unable to append ' +
unknown_contexts_file)
else:
2022-01-02 19:57:50 +00:00
print('Unrecognized jsonld signature type: ' + jwebsig_type)
2022-01-02 19:57:50 +00:00
unknown_signatures_file = \
2024-05-12 12:35:26 +00:00
data_dir(base_dir) + '/unknownJsonSignatures.txt'
2022-01-02 19:57:50 +00:00
already_unknown = False
if os.path.isfile(unknown_signatures_file):
if text_in_file(jwebsig_type, unknown_signatures_file):
2022-01-02 19:57:50 +00:00
already_unknown = True
2022-01-02 19:57:50 +00:00
if not already_unknown:
2021-11-25 21:18:53 +00:00
try:
2022-06-10 14:32:48 +00:00
with open(unknown_signatures_file, 'a+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_unknown:
fp_unknown.write(jwebsig_type + '\n')
2021-11-25 21:18:53 +00:00
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _check_json_signature unable to append ' +
unknown_signatures_file)
2022-01-02 19:57:50 +00:00
return has_json_signature, jwebsig_type
def _receive_follow_request(session, session_onion, session_i2p,
base_dir: str, http_prefix: str,
2022-01-02 19:57:50 +00:00
port: int, send_threads: [], post_log: [],
2021-12-29 21:55:09 +00:00
cached_webfingers: {}, person_cache: {},
message_json: {}, federation_list: [],
debug: bool, project_version: str,
max_followers: int,
2022-03-13 22:55:39 +00:00
this_domain: str, onion_domain: str,
i2p_domain: str, signing_priv_key_pem: str,
unit_test: bool, system_language: str,
2023-09-15 21:04:31 +00:00
followers_sync_cache: {},
sites_unavailable: []) -> bool:
"""Receives a follow request within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if not message_json['type'].startswith('Follow'):
if not message_json['type'].startswith('Join'):
return False
print('Receiving follow request')
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
return True
2024-01-09 16:59:23 +00:00
actor_url = get_actor_from_post(message_json)
if not has_users_path(actor_url):
if debug:
2022-03-28 21:45:11 +00:00
print('DEBUG: ' +
'users/profile/author/accounts/channel missing from actor')
return True
2024-01-09 16:59:23 +00:00
domain, temp_port = get_domain_from_actor(actor_url)
2023-01-15 14:33:18 +00:00
if not domain:
if debug:
print('DEBUG: receive follow request actor without domain ' +
2024-01-09 16:59:23 +00:00
actor_url)
return True
2022-01-02 19:57:50 +00:00
from_port = port
domain_full = get_full_domain(domain, temp_port)
if temp_port:
from_port = temp_port
2021-12-27 18:28:26 +00:00
if not domain_permitted(domain, federation_list):
if debug:
print('DEBUG: follower from domain not permitted - ' + domain)
return True
2024-01-09 16:59:23 +00:00
nickname = get_nickname_from_actor(actor_url)
if not nickname:
# single user instance
nickname = 'dev'
if debug:
print('DEBUG: follow request does not contain a ' +
'nickname. Assuming single user instance.')
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
message_json['to'] = message_json['object']
2024-03-29 15:33:37 +00:00
if not isinstance(message_json['to'], list):
message_json['to'] = [message_json['to']]
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['object']):
if debug:
2022-03-28 21:45:11 +00:00
print('DEBUG: users/profile/author/channel/accounts ' +
'not found within object')
return True
2022-01-02 19:57:50 +00:00
domain_to_follow, temp_port = get_domain_from_actor(message_json['object'])
2023-01-15 14:33:18 +00:00
if not domain_to_follow:
if debug:
print('DEBUG: receive follow request no domain found in object ' +
message_json['object'])
return True
# switch to the local domain rather than its onion or i2p version
if onion_domain:
if domain_to_follow.endswith(onion_domain):
2022-03-13 22:55:39 +00:00
domain_to_follow = this_domain
if i2p_domain:
if domain_to_follow.endswith(i2p_domain):
2022-03-13 22:55:39 +00:00
domain_to_follow = this_domain
2022-01-02 19:57:50 +00:00
if not domain_permitted(domain_to_follow, federation_list):
if debug:
2022-01-02 19:57:50 +00:00
print('DEBUG: follow domain not permitted ' + domain_to_follow)
return True
2022-01-02 19:57:50 +00:00
domain_to_follow_full = get_full_domain(domain_to_follow, temp_port)
nickname_to_follow = get_nickname_from_actor(message_json['object'])
if not nickname_to_follow:
if debug:
print('DEBUG: follow request does not contain a ' +
'nickname for the account followed')
return True
2022-01-02 19:57:50 +00:00
if is_system_account(nickname_to_follow):
if debug:
print('DEBUG: Cannot follow system account - ' +
2022-01-02 19:57:50 +00:00
nickname_to_follow)
return True
2021-12-25 19:37:10 +00:00
if max_followers > 0:
if get_no_of_followers(base_dir, nickname_to_follow,
domain_to_follow) > max_followers:
2022-01-02 19:57:50 +00:00
print('WARN: ' + nickname_to_follow +
' has reached their maximum number of followers')
return True
2022-01-02 19:57:50 +00:00
handle_to_follow = nickname_to_follow + '@' + domain_to_follow
if domain_to_follow == domain:
2022-12-18 15:29:54 +00:00
handle_dir = acct_handle_dir(base_dir, handle_to_follow)
if not os.path.isdir(handle_dir):
if debug:
print('DEBUG: followed account not found - ' +
2022-12-18 15:29:54 +00:00
handle_dir)
return True
is_already_follower = False
2021-12-29 21:55:09 +00:00
if is_follower_of_person(base_dir,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow_full,
2021-12-29 21:55:09 +00:00
nickname, domain_full):
if debug:
print('DEBUG: ' + nickname + '@' + domain +
' is already a follower of ' +
2022-01-02 19:57:50 +00:00
nickname_to_follow + '@' + domain_to_follow)
is_already_follower = True
2022-01-02 19:57:50 +00:00
approve_handle = nickname + '@' + domain_full
curr_session = session
curr_http_prefix = http_prefix
curr_domain = domain
curr_port = from_port
if onion_domain and \
not curr_domain.endswith('.onion') and \
domain_to_follow.endswith('.onion'):
curr_session = session_onion
curr_http_prefix = 'http'
curr_domain = onion_domain
curr_port = 80
port = 80
2022-03-13 23:21:29 +00:00
if debug:
print('Domain switched from ' + domain + ' to ' + curr_domain)
elif (i2p_domain and
not curr_domain.endswith('.i2p') and
domain_to_follow.endswith('.i2p')):
curr_session = session_i2p
curr_http_prefix = 'http'
curr_domain = i2p_domain
curr_port = 80
port = 80
2022-03-13 23:21:29 +00:00
if debug:
print('Domain switched from ' + domain + ' to ' + curr_domain)
# is the actor sending the request valid?
if not valid_sending_actor(curr_session, base_dir,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow,
2021-12-29 21:55:09 +00:00
person_cache, message_json,
2022-09-25 17:26:11 +00:00
signing_priv_key_pem, debug, unit_test,
system_language):
2022-01-02 19:57:50 +00:00
print('REJECT spam follow request ' + approve_handle)
return True
# what is the followers policy?
if not is_already_follower and \
follow_approval_required(base_dir, nickname_to_follow,
2022-01-02 19:57:50 +00:00
domain_to_follow, debug, approve_handle):
print('Follow approval is required')
if domain.endswith('.onion'):
2021-12-29 21:55:09 +00:00
if no_of_follow_requests(base_dir,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow,
2021-12-29 21:55:09 +00:00
'onion') > 5:
print('Too many follow requests from onion addresses')
return True
elif domain.endswith('.i2p'):
2021-12-29 21:55:09 +00:00
if no_of_follow_requests(base_dir,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow,
2021-12-29 21:55:09 +00:00
'i2p') > 5:
print('Too many follow requests from i2p addresses')
return True
else:
2021-12-29 21:55:09 +00:00
if no_of_follow_requests(base_dir,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow,
2021-12-29 21:55:09 +00:00
'') > 10:
print('Too many follow requests')
return True
# Get the actor for the follower and add it to the cache.
# Getting their public key has the same result
if debug:
2024-01-09 16:59:23 +00:00
print('Obtaining the following actor: ' + actor_url)
pubkey_result = \
get_person_pub_key(base_dir, curr_session,
2024-01-09 16:59:23 +00:00
actor_url,
person_cache, debug, project_version,
curr_http_prefix,
this_domain, onion_domain,
i2p_domain, signing_priv_key_pem)
if not pubkey_result:
if debug:
print('Unable to obtain following actor: ' +
2024-01-09 16:59:23 +00:00
actor_url)
elif isinstance(pubkey_result, dict):
if debug:
print('http error code trying to obtain following actor: ' +
2024-01-09 16:59:23 +00:00
actor_url + ' ' + str(pubkey_result))
2021-12-26 00:07:44 +00:00
group_account = \
2024-01-09 16:59:23 +00:00
has_group_type(base_dir, actor_url, person_cache)
2021-12-26 17:33:24 +00:00
if group_account and is_group_account(base_dir, nickname, domain):
print('Group cannot follow a group')
return True
print('Storing follow request for approval')
2021-12-29 21:55:09 +00:00
return store_follow_request(base_dir,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow, port,
nickname, domain, from_port,
2024-01-09 16:59:23 +00:00
message_json, debug, actor_url,
2021-12-29 21:55:09 +00:00
group_account)
else:
if is_already_follower:
print(approve_handle + ' is already a follower. ' +
'Re-sending Accept.')
else:
print('Follow request does not require approval ' +
approve_handle)
# update the followers
2022-01-02 19:57:50 +00:00
account_to_be_followed = \
acct_dir(base_dir, nickname_to_follow, domain_to_follow)
if os.path.isdir(account_to_be_followed):
followers_filename = account_to_be_followed + '/followers.txt'
# for actors which don't follow the mastodon
# /users/ path convention store the full actor
2024-01-09 16:59:23 +00:00
if '/users/' not in actor_url:
approve_handle = actor_url
# Get the actor for the follower and add it to the cache.
# Getting their public key has the same result
if debug:
2021-12-25 23:51:19 +00:00
print('Obtaining the following actor: ' +
2024-01-09 16:59:23 +00:00
actor_url)
pubkey_result = \
get_person_pub_key(base_dir, curr_session,
2024-01-09 16:59:23 +00:00
actor_url,
person_cache, debug, project_version,
curr_http_prefix, this_domain,
onion_domain, i2p_domain,
signing_priv_key_pem)
if not pubkey_result:
if debug:
print('Unable to obtain following actor: ' +
2024-01-09 16:59:23 +00:00
actor_url)
elif isinstance(pubkey_result, dict):
if debug:
print('http error code trying to obtain ' +
2024-01-09 16:59:23 +00:00
'following actor: ' + actor_url +
' ' + str(pubkey_result))
print('Updating followers file: ' +
2022-01-02 19:57:50 +00:00
followers_filename + ' adding ' + approve_handle)
if os.path.isfile(followers_filename):
2022-06-10 11:43:33 +00:00
if not text_in_file(approve_handle, followers_filename):
2021-12-26 00:07:44 +00:00
group_account = \
2021-12-26 17:53:07 +00:00
has_group_type(base_dir,
2024-01-09 16:59:23 +00:00
actor_url, person_cache)
if debug:
2024-01-09 16:59:23 +00:00
print(approve_handle + ' / ' + actor_url +
2021-12-26 00:07:44 +00:00
' is Group: ' + str(group_account))
if group_account and \
2021-12-26 17:33:24 +00:00
is_group_account(base_dir, nickname, domain):
print('Group cannot follow a group')
return True
try:
2022-06-09 14:46:30 +00:00
with open(followers_filename, 'r+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_followers:
content = fp_followers.read()
2022-01-02 19:57:50 +00:00
if approve_handle + '\n' not in content:
2024-07-14 13:01:46 +00:00
fp_followers.seek(0, 0)
2021-12-26 00:07:44 +00:00
if not group_account:
2024-07-14 13:01:46 +00:00
fp_followers.write(approve_handle +
'\n' + content)
else:
2024-07-14 13:01:46 +00:00
fp_followers.write('!' + approve_handle +
'\n' + content)
2022-05-30 12:09:18 +00:00
except OSError as ex:
print('WARN: ' +
'Failed to write entry to followers file ' +
2021-12-25 15:28:52 +00:00
str(ex))
else:
try:
2022-06-09 14:46:30 +00:00
with open(followers_filename, 'w+',
2024-07-14 13:01:46 +00:00
encoding='utf-8') as fp_followers:
fp_followers.write(approve_handle + '\n')
except OSError:
2024-07-02 22:16:13 +00:00
print('EX: _receive_follow_request unable to write ' +
followers_filename)
2022-03-13 13:36:24 +00:00
else:
print('ACCEPT: Follow Accept account directory not found: ' +
account_to_be_followed)
print('Beginning follow accept')
return followed_account_accepts(curr_session, base_dir, curr_http_prefix,
2022-01-02 19:57:50 +00:00
nickname_to_follow, domain_to_follow, port,
nickname, curr_domain, curr_port,
2024-01-09 16:59:23 +00:00
actor_url, federation_list,
2022-01-02 19:57:50 +00:00
message_json, send_threads, post_log,
2021-12-29 21:55:09 +00:00
cached_webfingers, person_cache,
debug, project_version, True,
signing_priv_key_pem,
this_domain, onion_domain, i2p_domain,
2023-10-25 19:55:40 +00:00
followers_sync_cache, sites_unavailable,
system_language)
2022-03-14 13:45:42 +00:00
def run_inbox_queue(server,
recent_posts_cache: {}, max_recent_posts: int,
2021-12-28 20:32:11 +00:00
project_version: str,
base_dir: str, http_prefix: str,
2022-01-02 19:57:50 +00:00
send_threads: [], post_log: [],
2021-12-28 20:32:11 +00:00
cached_webfingers: {}, person_cache: {}, queue: [],
domain: str,
onion_domain: str, i2p_domain: str,
port: int, proxy_type: str,
federation_list: [], max_replies: int,
domain_max_posts_per_day: int,
account_max_posts_per_day: int,
allow_deletion: bool, debug: bool, max_mentions: int,
max_emoji: int, translate: {}, unit_test: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
show_published_date_only: bool,
max_followers: int,
allow_local_network_access: bool,
peertube_instances: [],
verify_all_signatures: bool,
theme_name: str, system_language: str,
max_like_count: int, signing_priv_key_pem: str,
default_reply_interval_hrs: int,
2022-11-19 18:03:55 +00:00
cw_lists: {}, max_hashtags: int) -> None:
"""Processes received items and moves them to the appropriate
directories
2019-07-04 12:23:53 +00:00
"""
2022-04-23 18:26:54 +00:00
inbox_start_time = time.time()
print('Starting new session when starting inbox queue')
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'start', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2022-04-23 18:26:54 +00:00
2022-01-02 19:57:50 +00:00
curr_session_time = int(time.time())
session_last_update = 0
2021-12-28 16:56:57 +00:00
session = create_session(proxy_type)
if session:
session_last_update = curr_session_time
2023-04-13 10:09:37 +00:00
session_restart_interval_secs = random.randrange(18000, 20000)
# is this is a clearnet instance then optionally start sessions
# for onion and i2p domains
session_onion = None
session_i2p = None
session_last_update_onion = 0
session_last_update_i2p = 0
if proxy_type != 'tor' and onion_domain:
print('Starting onion session when starting inbox queue')
session_onion = create_session('tor')
if session_onion:
session_onion = curr_session_time
if proxy_type != 'i2p' and i2p_domain:
print('Starting i2p session when starting inbox queue')
session_i2p = create_session('i2p')
if session_i2p:
session_i2p = curr_session_time
2022-01-02 19:57:50 +00:00
inbox_handle = 'inbox@' + domain
2019-07-04 12:23:53 +00:00
if debug:
print('DEBUG: Inbox queue running')
2019-07-12 21:09:23 +00:00
# if queue processing was interrupted (eg server crash)
# then this loads any outstanding items back into the queue
2021-12-29 21:55:09 +00:00
_restore_queue_items(base_dir, queue)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_restore_queue_items', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-07-15 10:22:19 +00:00
2020-03-25 10:21:25 +00:00
# keep track of numbers of incoming posts per day
2022-01-02 19:57:50 +00:00
quotas_last_update_daily = int(time.time())
quotas_daily = {
2019-07-15 10:22:19 +00:00
'domains': {},
'accounts': {}
}
2022-01-02 19:57:50 +00:00
quotas_last_update_per_min = int(time.time())
quotas_per_min = {
2020-03-25 10:36:37 +00:00
'domains': {},
'accounts': {}
}
2022-01-02 19:57:50 +00:00
heart_beat_ctr = 0
queue_restore_ctr = 0
2019-09-03 08:46:26 +00:00
# time when the last DM bounce message was sent
# This is in a list so that it can be changed by reference
2021-12-29 21:55:09 +00:00
# within _bounce_dm
2022-01-02 19:57:50 +00:00
last_bounce_message = [int(time.time())]
# how long it takes for broch mode to lapse
2022-01-02 19:57:50 +00:00
broch_lapse_days = random.randrange(7, 14)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'while_loop_start', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-07-04 12:23:53 +00:00
while True:
2020-04-16 18:25:59 +00:00
time.sleep(1)
2022-04-23 18:41:17 +00:00
inbox_start_time = time.time()
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'while_loop_itteration', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-09-03 08:46:26 +00:00
# heartbeat to monitor whether the inbox queue is running
2022-01-02 19:57:50 +00:00
heart_beat_ctr += 1
if heart_beat_ctr >= 10:
2021-02-15 22:26:25 +00:00
# turn off broch mode after it has timed out
2022-01-02 19:57:50 +00:00
if broch_modeLapses(base_dir, broch_lapse_days):
broch_lapse_days = random.randrange(7, 14)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'broch_modeLapses', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 10:14:05 +00:00
print('>>> Heartbeat Q:' + str(len(queue)) + ' ' +
2020-04-03 16:27:34 +00:00
'{:%F %T}'.format(datetime.datetime.now()))
2022-01-02 19:57:50 +00:00
heart_beat_ctr = 0
2020-04-03 16:27:34 +00:00
if len(queue) == 0:
2019-09-03 09:11:33 +00:00
# restore any remaining queue items
2022-01-02 19:57:50 +00:00
queue_restore_ctr += 1
if queue_restore_ctr >= 30:
queue_restore_ctr = 0
2021-12-29 21:55:09 +00:00
_restore_queue_items(base_dir, queue)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'restore_queue', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
continue
2020-04-16 10:14:05 +00:00
2020-04-16 09:49:57 +00:00
# oldest item first
queue.sort()
2022-01-02 19:57:50 +00:00
queue_filename = queue[0]
if not os.path.isfile(queue_filename):
2020-04-16 09:49:57 +00:00
print("Queue: queue item rejected because it has no file: " +
2022-01-02 19:57:50 +00:00
queue_filename)
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2019-07-04 12:23:53 +00:00
2021-03-14 19:22:58 +00:00
if debug:
2022-01-02 19:57:50 +00:00
print('Loading queue item ' + queue_filename)
2020-03-22 21:16:02 +00:00
2020-04-16 09:49:57 +00:00
# Load the queue json
2024-06-20 10:47:58 +00:00
queue_json = load_json(queue_filename)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'load_queue_json', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2022-01-02 19:57:50 +00:00
if not queue_json:
2021-12-28 20:32:11 +00:00
print('Queue: run_inbox_queue failed to load inbox queue item ' +
2022-01-02 19:57:50 +00:00
queue_filename)
2020-04-16 09:49:57 +00:00
# Assume that the file is probably corrupt/unreadable
if len(queue) > 0:
queue.pop(0)
# delete the queue file
2022-01-02 19:57:50 +00:00
if os.path.isfile(queue_filename):
2020-04-16 09:49:57 +00:00
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 1 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
continue
2020-03-22 21:16:02 +00:00
curr_time = int(time.time())
2020-04-16 09:49:57 +00:00
# clear the daily quotas for maximum numbers of received posts
2022-01-02 19:57:50 +00:00
if curr_time - quotas_last_update_daily > 60 * 60 * 24:
quotas_daily = {
2020-04-16 09:49:57 +00:00
'domains': {},
'accounts': {}
}
2022-01-02 19:57:50 +00:00
quotas_last_update_daily = curr_time
2020-04-16 09:49:57 +00:00
2022-01-02 19:57:50 +00:00
if curr_time - quotas_last_update_per_min > 60:
# clear the per minute quotas for maximum numbers of received posts
2022-01-02 19:57:50 +00:00
quotas_per_min = {
2020-04-16 09:49:57 +00:00
'domains': {},
'accounts': {}
}
# also check if the json signature enforcement has changed
2022-01-02 19:57:50 +00:00
verify_all_sigs = get_config_param(base_dir, "verifyAllSignatures")
if verify_all_sigs is not None:
verify_all_signatures = verify_all_sigs
# change the last time that this was done
2022-01-02 19:57:50 +00:00
quotas_last_update_per_min = curr_time
2020-04-16 09:49:57 +00:00
2022-01-02 19:57:50 +00:00
if _inbox_quota_exceeded(queue, queue_filename,
queue_json, quotas_daily, quotas_per_min,
2021-12-29 21:55:09 +00:00
domain_max_posts_per_day,
account_max_posts_per_day, debug):
continue
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_inbox_quota_exceeded', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-07-15 10:22:19 +00:00
# recreate the session periodically
2023-04-13 09:41:42 +00:00
time_diff = curr_time - session_last_update
if not session or time_diff > session_restart_interval_secs:
print('Regenerating inbox queue session at 5hr interval')
session = create_session(proxy_type)
if session:
session_last_update = curr_time
else:
print('WARN: inbox session not created')
continue
if onion_domain:
2023-04-13 09:41:42 +00:00
time_diff = curr_time - session_last_update_onion
if not session_onion or time_diff > session_restart_interval_secs:
print('Regenerating inbox queue onion session at 5hr interval')
session_onion = create_session('tor')
if session_onion:
session_last_update_onion = curr_time
else:
print('WARN: inbox onion session not created')
continue
if i2p_domain:
2023-04-13 09:41:42 +00:00
time_diff = curr_time - session_last_update_i2p
if not session_i2p or time_diff > session_restart_interval_secs:
2023-04-13 10:09:37 +00:00
print('Regenerating inbox queue i2p session at 5hr interval')
session_i2p = create_session('i2p')
if session_i2p:
session_last_update_i2p = curr_time
else:
print('WARN: inbox i2p session not created')
continue
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'recreate_session', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
curr_session = session
if queue_json.get('actor'):
if isinstance(queue_json['actor'], str):
sender_domain, _ = get_domain_from_actor(queue_json['actor'])
2023-01-15 14:33:18 +00:00
if sender_domain:
if sender_domain.endswith('.onion') and \
session_onion and proxy_type != 'tor':
curr_session = session_onion
elif (sender_domain.endswith('.i2p') and
session_i2p and proxy_type != 'i2p'):
curr_session = session_i2p
2022-01-02 19:57:50 +00:00
if debug and queue_json.get('actor'):
print('Obtaining public key for actor ' + queue_json['actor'])
2020-03-22 21:16:02 +00:00
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'start_get_pubkey', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
# Try a few times to obtain the public key
2022-01-02 19:57:50 +00:00
pub_key = None
key_id = None
2020-04-16 09:49:57 +00:00
for tries in range(8):
2022-01-02 19:57:50 +00:00
key_id = None
signature_params = \
queue_json['httpHeaders']['signature'].split(',')
for signature_item in signature_params:
if signature_item.startswith('keyId='):
if '"' in signature_item:
key_id = signature_item.split('"')[1]
2020-04-16 09:49:57 +00:00
break
2022-01-02 19:57:50 +00:00
if not key_id:
2020-04-16 09:49:57 +00:00
print('Queue: No keyId in signature: ' +
2022-01-02 19:57:50 +00:00
queue_json['httpHeaders']['signature'])
pub_key = None
2020-04-16 09:49:57 +00:00
break
2022-01-02 19:57:50 +00:00
pub_key = \
get_person_pub_key(base_dir, curr_session, key_id,
2021-12-29 21:55:09 +00:00
person_cache, debug,
project_version, http_prefix,
domain, onion_domain, i2p_domain,
signing_priv_key_pem)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'get_person_pub_key', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2022-01-02 19:57:50 +00:00
if pub_key:
if not isinstance(pub_key, dict):
if debug:
print('DEBUG: public key: ' + str(pub_key))
else:
if debug:
print('DEBUG: http code error for public key: ' +
str(pub_key))
2023-08-13 10:58:28 +00:00
pub_key = None
2020-04-16 09:49:57 +00:00
break
2019-07-04 17:31:41 +00:00
2019-08-15 08:36:49 +00:00
if debug:
2020-04-16 09:49:57 +00:00
print('DEBUG: Retry ' + str(tries+1) +
2022-01-02 19:57:50 +00:00
' obtaining public key for ' + key_id)
2020-04-16 18:25:59 +00:00
time.sleep(1)
2019-07-04 12:23:53 +00:00
2022-01-02 19:57:50 +00:00
if not pub_key:
2021-03-14 19:39:00 +00:00
if debug:
2022-01-02 19:57:50 +00:00
print('Queue: public key could not be obtained from ' + key_id)
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 2 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2021-01-03 09:44:33 +00:00
# check the http header signature
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'begin_check_signature', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
if debug:
2021-01-03 09:44:33 +00:00
print('DEBUG: checking http header signature')
2022-01-02 19:57:50 +00:00
pprint(queue_json['httpHeaders'])
post_str = json.dumps(queue_json['post'])
http_signature_failed = False
if not verify_post_headers(http_prefix, pub_key,
queue_json['httpHeaders'],
queue_json['path'], False,
queue_json['digest'],
post_str, debug):
http_signature_failed = True
2020-04-16 09:49:57 +00:00
print('Queue: Header signature check failed')
2022-01-02 19:57:50 +00:00
pprint(queue_json['httpHeaders'])
else:
if debug:
print('DEBUG: http header signature check success')
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'verify_post_headers', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-03-22 21:16:02 +00:00
# check if a json signature exists on this post
2022-01-02 19:57:50 +00:00
has_json_signature, jwebsig_type = \
_check_json_signature(base_dir, queue_json)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_check_json_signature', debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2021-01-05 10:54:50 +00:00
# strict enforcement of json signatures
2022-01-02 19:57:50 +00:00
if not has_json_signature:
if http_signature_failed:
if jwebsig_type:
2021-02-14 15:45:42 +00:00
print('Queue: Header signature check failed and does ' +
'not have a recognised jsonld signature type ' +
2022-01-02 19:57:50 +00:00
jwebsig_type)
2021-02-14 15:45:42 +00:00
else:
print('Queue: Header signature check failed and ' +
'does not have jsonld signature')
if debug:
2022-01-02 19:57:50 +00:00
pprint(queue_json['httpHeaders'])
2021-12-25 18:40:32 +00:00
if verify_all_signatures:
2022-01-02 19:57:50 +00:00
original_json = queue_json['original']
print('Queue: inbox post does not have a jsonld signature ' +
2022-01-02 19:57:50 +00:00
key_id + ' ' + str(original_json))
2022-01-02 19:57:50 +00:00
if http_signature_failed or verify_all_signatures:
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 3 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
if len(queue) > 0:
queue.pop(0)
continue
else:
2022-01-02 19:57:50 +00:00
if http_signature_failed or verify_all_signatures:
# use the original json message received, not one which
# may have been modified along the way
2022-01-02 19:57:50 +00:00
original_json = queue_json['original']
if not verify_json_signature(original_json, pub_key):
if debug:
print('WARN: jsonld inbox signature check failed ' +
2022-01-02 19:57:50 +00:00
key_id + ' ' + pub_key + ' ' +
str(original_json))
else:
print('WARN: jsonld inbox signature check failed ' +
2022-01-02 19:57:50 +00:00
key_id)
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 4 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
if len(queue) > 0:
queue.pop(0)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'not_verify_signature',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
continue
2022-05-30 12:09:18 +00:00
if http_signature_failed:
print('jsonld inbox signature check success ' +
'via relay ' + key_id)
else:
2022-05-30 12:09:18 +00:00
print('jsonld inbox signature check success ' + key_id)
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'verify_signature_success',
debug)
inbox_start_time = time.time()
2024-05-12 12:35:26 +00:00
dogwhistles_filename = data_dir(base_dir) + '/dogwhistles.txt'
2022-07-05 14:40:26 +00:00
if not os.path.isfile(dogwhistles_filename):
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
dogwhistles = load_dogwhistles(dogwhistles_filename)
2020-04-16 09:49:57 +00:00
# set the id to the same as the post filename
# This makes the filename and the id consistent
2022-01-02 19:57:50 +00:00
# if queue_json['post'].get('id'):
# queue_json['post']['id'] = queue_json['id']
2020-04-16 09:49:57 +00:00
2024-08-31 22:05:31 +00:00
if receive_undo(base_dir, queue_json['post'],
debug, domain, onion_domain, i2p_domain):
2022-01-02 19:57:50 +00:00
print('Queue: Undo accepted from ' + key_id)
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 5 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_undo',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
continue
2019-07-17 10:34:00 +00:00
2020-04-16 09:49:57 +00:00
if debug:
print('DEBUG: checking for follow requests')
if _receive_follow_request(curr_session, session_onion, session_i2p,
2021-12-29 21:55:09 +00:00
base_dir, http_prefix, port,
2022-01-02 19:57:50 +00:00
send_threads, post_log,
2021-12-29 21:55:09 +00:00
cached_webfingers,
person_cache,
2022-01-02 19:57:50 +00:00
queue_json['post'],
2021-12-29 21:55:09 +00:00
federation_list,
debug, project_version,
max_followers, domain,
onion_domain, i2p_domain,
2022-09-25 17:26:11 +00:00
signing_priv_key_pem, unit_test,
system_language,
2023-09-15 21:04:31 +00:00
server.followers_sync_cache,
server.sites_unavailable):
2022-01-02 19:57:50 +00:00
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 6 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
2022-01-02 19:57:50 +00:00
print('Queue: Follow activity for ' + key_id +
2020-06-28 19:04:43 +00:00
' removed from queue')
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_follow_request',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
continue
2022-05-30 12:09:18 +00:00
if debug:
print('DEBUG: No follow requests')
2020-04-16 09:49:57 +00:00
2022-06-09 10:28:03 +00:00
if receive_accept_reject(base_dir, domain, queue_json['post'],
federation_list, debug,
domain, onion_domain, i2p_domain):
2022-01-02 19:57:50 +00:00
print('Queue: Accept/Reject received from ' + key_id)
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 7 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'receive_accept_reject',
debug)
2023-09-23 14:42:04 +00:00
inbox_start_time = time.time()
continue
2024-08-31 22:05:31 +00:00
if receive_move_activity(curr_session, base_dir,
http_prefix, domain, port,
cached_webfingers,
person_cache,
queue_json['post'],
queue_json['postNickname'],
debug,
signing_priv_key_pem,
send_threads,
post_log,
federation_list,
onion_domain,
i2p_domain,
server.sites_unavailable,
server.blocked_cache,
server.block_federated,
server.system_language):
2023-09-23 14:42:04 +00:00
if debug:
print('Queue: _receive_move_activity ' + key_id)
if os.path.isfile(queue_filename):
try:
os.remove(queue_filename)
except OSError:
print('EX: run_inbox_queue 8 unable to receive move ' +
str(queue_filename))
if len(queue) > 0:
queue.pop(0)
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_move_activity',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
continue
2019-07-06 15:17:21 +00:00
2024-08-31 22:05:31 +00:00
if receive_update_activity(recent_posts_cache, curr_session,
base_dir, http_prefix,
domain, port,
cached_webfingers,
person_cache,
queue_json['post'],
queue_json['postNickname'],
debug,
max_mentions, max_emoji,
allow_local_network_access,
system_language,
signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
theme_name, max_like_count,
cw_lists, dogwhistles,
server.min_images_for_accounts,
max_hashtags, server.buy_sites,
server.auto_cw_cache,
onion_domain,
i2p_domain):
2021-03-14 19:46:46 +00:00
if debug:
2022-01-02 19:57:50 +00:00
print('Queue: Update accepted from ' + key_id)
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 8 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_receive_update_activity',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
continue
# get recipients list
2022-01-02 19:57:50 +00:00
recipients_dict, recipients_dict_followers = \
_inbox_post_recipients(base_dir, queue_json['post'],
2024-02-06 13:21:36 +00:00
domain, port, debug,
onion_domain, i2p_domain)
2022-01-02 19:57:50 +00:00
if len(recipients_dict.items()) == 0 and \
len(recipients_dict_followers.items()) == 0:
2021-03-14 19:22:58 +00:00
if debug:
print('Queue: no recipients were resolved ' +
'for post arriving in inbox')
2022-01-02 19:57:50 +00:00
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 9 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2022-04-23 19:26:46 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', '_post_recipients',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2019-07-09 14:20:23 +00:00
2020-04-16 09:49:57 +00:00
# if there are only a small number of followers then
# process them as if they were specifically
# addresses to particular accounts
2022-01-02 19:57:50 +00:00
no_of_follow_items = len(recipients_dict_followers.items())
if no_of_follow_items > 0:
2020-04-16 09:49:57 +00:00
# always deliver to individual inboxes
2022-01-02 19:57:50 +00:00
if no_of_follow_items < 999999:
2020-04-16 09:49:57 +00:00
if debug:
2022-01-02 19:57:50 +00:00
print('DEBUG: moving ' + str(no_of_follow_items) +
2020-04-16 09:49:57 +00:00
' inbox posts addressed to followers')
2022-01-02 19:57:50 +00:00
for handle, post_item in recipients_dict_followers.items():
recipients_dict[handle] = post_item
recipients_dict_followers = {}
# recipients_list = [recipients_dict, recipients_dict_followers]
2020-04-16 09:49:57 +00:00
if debug:
print('*************************************')
print('Resolved recipients list:')
2022-01-02 19:57:50 +00:00
pprint(recipients_dict)
2020-04-16 09:49:57 +00:00
print('Resolved followers list:')
2022-01-02 19:57:50 +00:00
pprint(recipients_dict_followers)
2020-04-16 09:49:57 +00:00
print('*************************************')
# Copy any posts addressed to followers into the shared inbox
# this avoid copying file multiple times to potentially many
# individual inboxes
2022-01-02 19:57:50 +00:00
if len(recipients_dict_followers) > 0:
shared_inbox_post_filename = \
queue_json['destination'].replace(inbox_handle, inbox_handle)
if not os.path.isfile(shared_inbox_post_filename):
save_json(queue_json['post'], shared_inbox_post_filename)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'shared_inbox_save',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-04-16 09:49:57 +00:00
2021-12-30 13:56:38 +00:00
lists_enabled = get_config_param(base_dir, "listsEnabled")
2023-04-28 17:44:35 +00:00
dm_license_url = ''
2022-04-23 19:26:46 +00:00
fitness_performance(inbox_start_time, server.fitness,
'INBOX', 'distribute_post',
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2022-04-23 19:26:46 +00:00
2020-04-16 09:49:57 +00:00
# for posts addressed to specific accounts
2022-01-02 19:57:50 +00:00
for handle, _ in recipients_dict.items():
2020-04-16 09:49:57 +00:00
destination = \
2022-01-02 19:57:50 +00:00
queue_json['destination'].replace(inbox_handle, handle)
languages_understood = []
mitm = False
if queue_json.get('mitm'):
mitm = True
bold_reading = False
bold_reading_filename = \
2022-12-18 15:29:54 +00:00
acct_handle_dir(base_dir, handle) + '/.boldReading'
if os.path.isfile(bold_reading_filename):
bold_reading = True
2022-04-23 18:26:54 +00:00
_inbox_after_initial(server, inbox_start_time,
2022-03-14 13:45:42 +00:00
recent_posts_cache,
2021-12-29 21:55:09 +00:00
max_recent_posts,
session, session_onion, session_i2p,
key_id, handle,
2022-01-02 19:57:50 +00:00
queue_json['post'],
2021-12-29 21:55:09 +00:00
base_dir, http_prefix,
2022-01-02 19:57:50 +00:00
send_threads, post_log,
2021-12-29 21:55:09 +00:00
cached_webfingers,
2022-06-14 11:42:45 +00:00
person_cache, domain,
2021-12-29 21:55:09 +00:00
onion_domain, i2p_domain,
2022-06-14 11:42:45 +00:00
port, federation_list,
2021-12-29 21:55:09 +00:00
debug,
2022-01-02 19:57:50 +00:00
queue_filename, destination,
2021-12-29 21:55:09 +00:00
max_replies, allow_deletion,
max_mentions, max_emoji,
translate, unit_test,
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
allow_local_network_access,
peertube_instances,
2022-01-02 19:57:50 +00:00
last_bounce_message,
2021-12-29 21:55:09 +00:00
theme_name, system_language,
max_like_count,
signing_priv_key_pem,
default_reply_interval_hrs,
cw_lists, lists_enabled,
2023-04-28 17:44:35 +00:00
dm_license_url,
languages_understood, mitm,
2022-11-19 18:03:55 +00:00
bold_reading, dogwhistles,
2023-09-15 21:04:31 +00:00
max_hashtags, server.buy_sites,
server.sites_unavailable)
2022-04-23 18:26:54 +00:00
fitness_performance(inbox_start_time, server.fitness,
2022-04-23 19:55:30 +00:00
'INBOX', 'handle_after_initial',
2022-04-23 18:26:54 +00:00
debug)
2022-04-23 21:20:20 +00:00
inbox_start_time = time.time()
2020-09-27 18:35:35 +00:00
if debug:
2022-01-02 19:57:50 +00:00
pprint(queue_json['post'])
2021-03-14 19:22:58 +00:00
print('Queue: Queue post accepted')
2022-01-02 19:57:50 +00:00
if os.path.isfile(queue_filename):
try:
2022-01-02 19:57:50 +00:00
os.remove(queue_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 10 unable to delete ' +
2022-01-02 19:57:50 +00:00
str(queue_filename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)