2020-04-03 16:27:34 +00:00
|
|
|
__filename__ = "inbox.py"
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
__license__ = "AGPL3+"
|
2022-02-03 13:58:20 +00:00
|
|
|
__version__ = "1.3.0"
|
2020-04-03 16:27:34 +00:00
|
|
|
__maintainer__ = "Bob Mottram"
|
2021-09-10 16:14:50 +00:00
|
|
|
__email__ = "bob@libreserver.org"
|
2020-04-03 16:27:34 +00:00
|
|
|
__status__ = "Production"
|
2021-06-15 15:08:12 +00:00
|
|
|
__module_group__ = "Timeline"
|
2019-06-28 21:59:54 +00:00
|
|
|
|
|
|
|
import json
|
|
|
|
import os
|
2019-06-29 10:08:59 +00:00
|
|
|
import datetime
|
2019-07-04 12:23:53 +00:00
|
|
|
import time
|
2021-06-05 13:36:03 +00:00
|
|
|
import random
|
2021-12-29 21:55:09 +00:00
|
|
|
from linked_data_sig import verify_json_signature
|
|
|
|
from languages import understood_post_language
|
|
|
|
from like import update_likes_collection
|
|
|
|
from reaction import update_reaction_collection
|
|
|
|
from reaction import valid_emoji_content
|
2022-02-25 19:12:40 +00:00
|
|
|
from utils import get_account_timezone
|
2021-12-27 18:28:26 +00:00
|
|
|
from utils import domain_permitted
|
2021-12-26 17:33:24 +00:00
|
|
|
from utils import is_group_account
|
2021-12-27 15:41:04 +00:00
|
|
|
from utils import is_system_account
|
2021-12-26 19:15:36 +00:00
|
|
|
from utils import invalid_ciphertext
|
2021-12-27 15:43:22 +00:00
|
|
|
from utils import remove_html
|
2021-12-28 14:01:37 +00:00
|
|
|
from utils import file_last_modified
|
2021-12-26 17:12:07 +00:00
|
|
|
from utils import has_object_string
|
2021-12-26 15:54:46 +00:00
|
|
|
from utils import has_object_string_object
|
2021-12-28 10:25:50 +00:00
|
|
|
from utils import get_reply_interval_hours
|
2021-12-28 12:15:46 +00:00
|
|
|
from utils import can_reply_to
|
2021-12-26 12:24:40 +00:00
|
|
|
from utils import get_user_paths
|
2021-12-26 11:29:40 +00:00
|
|
|
from utils import get_base_content_from_post
|
2021-12-26 12:02:29 +00:00
|
|
|
from utils import acct_dir
|
2021-12-26 18:17:37 +00:00
|
|
|
from utils import remove_domain_port
|
2021-12-26 18:14:21 +00:00
|
|
|
from utils import get_port_from_domain
|
2021-12-26 10:57:03 +00:00
|
|
|
from utils import has_object_dict
|
2021-12-26 19:09:04 +00:00
|
|
|
from utils import dm_allowed_from_domain
|
2021-12-26 20:43:03 +00:00
|
|
|
from utils import is_recent_post
|
2021-12-26 14:08:58 +00:00
|
|
|
from utils import get_config_param
|
2021-12-26 12:19:00 +00:00
|
|
|
from utils import has_users_path
|
2021-12-26 12:31:47 +00:00
|
|
|
from utils import valid_post_date
|
2021-12-26 12:45:03 +00:00
|
|
|
from utils import get_full_domain
|
2021-12-27 11:20:57 +00:00
|
|
|
from utils import remove_id_ending
|
2021-12-27 17:20:01 +00:00
|
|
|
from utils import get_protocol_prefixes
|
2021-12-28 13:49:44 +00:00
|
|
|
from utils import is_blog_post
|
2021-12-27 17:35:58 +00:00
|
|
|
from utils import remove_avatar_from_cache
|
2021-12-28 14:41:10 +00:00
|
|
|
from utils import is_public_post
|
2021-12-26 23:41:34 +00:00
|
|
|
from utils import get_cached_post_filename
|
2021-12-27 11:05:24 +00:00
|
|
|
from utils import remove_post_from_cache
|
2021-12-27 20:47:05 +00:00
|
|
|
from utils import url_permitted
|
2021-12-27 18:00:51 +00:00
|
|
|
from utils import create_inbox_queue_dir
|
2021-12-27 17:42:35 +00:00
|
|
|
from utils import get_status_number
|
2021-12-27 19:05:25 +00:00
|
|
|
from utils import get_domain_from_actor
|
2021-12-27 22:19:18 +00:00
|
|
|
from utils import get_nickname_from_actor
|
2021-12-26 20:36:08 +00:00
|
|
|
from utils import locate_post
|
2021-12-28 14:55:45 +00:00
|
|
|
from utils import delete_post
|
2021-12-28 13:12:10 +00:00
|
|
|
from utils import remove_moderation_post_from_index
|
2021-12-26 15:13:34 +00:00
|
|
|
from utils import load_json
|
2021-12-26 14:47:21 +00:00
|
|
|
from utils import save_json
|
2021-12-27 23:23:07 +00:00
|
|
|
from utils import undo_likes_collection_entry
|
2021-12-27 23:02:50 +00:00
|
|
|
from utils import undo_reaction_collection_entry
|
2021-12-26 17:53:07 +00:00
|
|
|
from utils import has_group_type
|
2021-12-26 10:19:59 +00:00
|
|
|
from utils import local_actor_url
|
2021-12-26 17:12:07 +00:00
|
|
|
from utils import has_object_stringType
|
2022-01-13 15:10:41 +00:00
|
|
|
from utils import valid_hash_tag
|
2021-12-29 21:55:09 +00:00
|
|
|
from categories import get_hashtag_categories
|
|
|
|
from categories import set_hashtag_category
|
|
|
|
from httpsig import get_digest_algorithm_from_headers
|
|
|
|
from httpsig import verify_post_headers
|
2021-12-28 16:56:57 +00:00
|
|
|
from session import create_session
|
2021-12-28 20:32:11 +00:00
|
|
|
from follow import follower_approval_active
|
|
|
|
from follow import is_following_actor
|
2021-12-29 21:55:09 +00:00
|
|
|
from follow import get_followers_of_actor
|
|
|
|
from follow import unfollower_of_account
|
|
|
|
from follow import is_follower_of_person
|
|
|
|
from follow import followed_account_accepts
|
|
|
|
from follow import store_follow_request
|
|
|
|
from follow import no_of_follow_requests
|
|
|
|
from follow import get_no_of_followers
|
|
|
|
from follow import follow_approval_required
|
2019-07-04 14:36:29 +00:00
|
|
|
from pprint import pprint
|
2021-12-29 21:55:09 +00:00
|
|
|
from cache import store_person_in_cache
|
|
|
|
from cache import get_person_pub_key
|
|
|
|
from acceptreject import receive_accept_reject
|
|
|
|
from bookmarks import update_bookmarks_collection
|
|
|
|
from bookmarks import undo_bookmarks_collection_entry
|
|
|
|
from blocking import is_blocked
|
2021-12-28 21:55:38 +00:00
|
|
|
from blocking import is_blocked_domain
|
2021-12-25 18:38:19 +00:00
|
|
|
from blocking import broch_modeLapses
|
2021-12-29 21:55:09 +00:00
|
|
|
from filters import is_filtered
|
2021-12-26 23:41:34 +00:00
|
|
|
from utils import update_announce_collection
|
2021-12-27 10:55:48 +00:00
|
|
|
from utils import undo_announce_collection_entry
|
2021-12-27 21:42:08 +00:00
|
|
|
from utils import dangerous_markup
|
2021-12-26 20:12:18 +00:00
|
|
|
from utils import is_dm
|
2021-12-26 19:36:40 +00:00
|
|
|
from utils import is_reply
|
2021-12-26 17:15:04 +00:00
|
|
|
from utils import has_actor
|
2021-12-29 21:55:09 +00:00
|
|
|
from httpsig import message_content_digest
|
|
|
|
from posts import edited_post_filename
|
2021-12-28 18:13:52 +00:00
|
|
|
from posts import save_post_to_box
|
2021-12-29 21:55:09 +00:00
|
|
|
from posts import is_create_inside_announce
|
2021-12-28 19:33:29 +00:00
|
|
|
from posts import create_direct_message_post
|
2021-12-29 21:55:09 +00:00
|
|
|
from posts import valid_content_warning
|
|
|
|
from posts import download_announce
|
|
|
|
from posts import is_muted_conv
|
|
|
|
from posts import is_image_media
|
|
|
|
from posts import send_signed_json
|
|
|
|
from posts import send_to_followers_thread
|
|
|
|
from webapp_post import individual_post_as_html
|
|
|
|
from question import question_update_votes
|
2021-12-28 21:36:27 +00:00
|
|
|
from media import replace_you_tube
|
|
|
|
from media import replace_twitter
|
2021-12-29 21:55:09 +00:00
|
|
|
from git import is_git_patch
|
|
|
|
from git import receive_git_patch
|
|
|
|
from followingCalendar import receiving_calendar_events
|
|
|
|
from happening import save_event_post
|
|
|
|
from delete import remove_old_hashtags
|
|
|
|
from categories import guess_hashtag_category
|
|
|
|
from context import has_valid_context
|
|
|
|
from speaker import update_speaker
|
|
|
|
from announce import is_self_announce
|
|
|
|
from announce import create_announce
|
|
|
|
from notifyOnPost import notify_when_person_posts
|
|
|
|
from conversation import update_conversation
|
|
|
|
from webapp_hashtagswarm import html_hash_tag_swarm
|
|
|
|
from person import valid_sending_actor
|
|
|
|
|
|
|
|
|
|
|
|
def _store_last_post_id(base_dir: str, nickname: str, domain: str,
|
|
|
|
post_json_object: {}) -> None:
|
2021-10-18 15:20:22 +00:00
|
|
|
"""Stores the id of the last post made by an actor
|
2021-10-18 19:42:31 +00:00
|
|
|
When a new post arrives this allows it to be compared against the last
|
|
|
|
to see if it is an edited post.
|
|
|
|
It would be great if edited posts contained a back reference id to the
|
|
|
|
source but we don't live in that ideal world.
|
2021-10-18 15:20:22 +00:00
|
|
|
"""
|
2021-12-26 19:47:06 +00:00
|
|
|
actor = post_id = None
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('attributedTo'):
|
|
|
|
if isinstance(post_json_object['object']['attributedTo'], str):
|
|
|
|
actor = post_json_object['object']['attributedTo']
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['object']['id'])
|
2021-10-18 15:20:22 +00:00
|
|
|
if not actor:
|
2021-12-25 22:09:19 +00:00
|
|
|
actor = post_json_object['actor']
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['id'])
|
2021-10-18 15:20:22 +00:00
|
|
|
if not actor:
|
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
lastpost_dir = acct_dir(base_dir, nickname, domain) + '/lastpost'
|
|
|
|
if not os.path.isdir(lastpost_dir):
|
|
|
|
os.mkdir(lastpost_dir)
|
|
|
|
actor_filename = lastpost_dir + '/' + actor.replace('/', '#')
|
2021-10-18 15:20:22 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(actor_filename, 'w+') as fp_actor:
|
|
|
|
fp_actor.write(post_id)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: Unable to write last post id to ' + actor_filename)
|
2021-10-18 15:20:22 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _update_cached_hashtag_swarm(base_dir: str, nickname: str, domain: str,
|
|
|
|
http_prefix: str, domain_full: str,
|
|
|
|
translate: {}) -> bool:
|
2021-10-20 13:33:34 +00:00
|
|
|
"""Updates the hashtag swarm stored as a file
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_hashtag_swarm_filename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
acct_dir(base_dir, nickname, domain) + '/.hashtagSwarm'
|
2022-01-02 19:57:50 +00:00
|
|
|
save_swarm = True
|
|
|
|
if os.path.isfile(cached_hashtag_swarm_filename):
|
|
|
|
last_modified = file_last_modified(cached_hashtag_swarm_filename)
|
|
|
|
modified_date = None
|
2021-10-20 13:33:34 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
modified_date = \
|
|
|
|
datetime.datetime.strptime(last_modified, "%Y-%m-%dT%H:%M:%SZ")
|
2021-10-20 13:33:34 +00:00
|
|
|
except BaseException:
|
2021-10-29 18:48:15 +00:00
|
|
|
print('EX: unable to parse last modified cache date ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(last_modified))
|
|
|
|
if modified_date:
|
|
|
|
curr_date = datetime.datetime.utcnow()
|
|
|
|
time_diff = curr_date - modified_date
|
|
|
|
diff_mins = int(time_diff.total_seconds() / 60)
|
|
|
|
if diff_mins < 10:
|
2021-10-20 13:33:34 +00:00
|
|
|
# was saved recently, so don't save again
|
|
|
|
# This avoids too much disk I/O
|
2022-01-02 19:57:50 +00:00
|
|
|
save_swarm = False
|
2021-10-20 13:33:34 +00:00
|
|
|
else:
|
|
|
|
print('Updating cached hashtag swarm, last changed ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(diff_mins) + ' minutes ago')
|
2021-10-20 14:53:00 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('WARN: no modified date for ' + str(last_modified))
|
|
|
|
if save_swarm:
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
2022-01-02 19:57:50 +00:00
|
|
|
new_swarm_str = html_hash_tag_swarm(base_dir, actor, translate)
|
|
|
|
if new_swarm_str:
|
2021-10-20 13:33:34 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(cached_hashtag_swarm_filename, 'w+') as fp_swarm:
|
|
|
|
fp_swarm.write(new_swarm_str)
|
2021-10-20 13:33:34 +00:00
|
|
|
return True
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
print('EX: unable to write cached hashtag swarm ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_hashtag_swarm_filename)
|
2021-10-20 13:33:34 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def store_hash_tags(base_dir: str, nickname: str, domain: str,
|
|
|
|
http_prefix: str, domain_full: str,
|
|
|
|
post_json_object: {}, translate: {}) -> None:
|
2019-12-12 17:34:31 +00:00
|
|
|
"""Extracts hashtags from an incoming post and updates the
|
|
|
|
relevant tags files.
|
|
|
|
"""
|
2021-12-28 14:41:10 +00:00
|
|
|
if not is_public_post(post_json_object):
|
2019-12-12 17:34:31 +00:00
|
|
|
return
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2019-12-12 17:34:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('tag'):
|
2019-12-12 17:34:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('id'):
|
2019-12-12 17:34:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not isinstance(post_json_object['object']['tag'], list):
|
2019-12-12 17:34:31 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
tags_dir = base_dir + '/tags'
|
2020-09-03 09:09:58 +00:00
|
|
|
|
|
|
|
# add tags directory if it doesn't exist
|
2022-01-02 19:57:50 +00:00
|
|
|
if not os.path.isdir(tags_dir):
|
2020-09-03 09:09:58 +00:00
|
|
|
print('Creating tags directory')
|
2022-01-02 19:57:50 +00:00
|
|
|
os.mkdir(tags_dir)
|
2020-09-03 09:09:58 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
hashtag_categories = get_hashtag_categories(base_dir)
|
2020-12-05 11:11:32 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
hashtags_ctr = 0
|
2021-12-25 22:09:19 +00:00
|
|
|
for tag in post_json_object['object']['tag']:
|
2019-12-12 17:34:31 +00:00
|
|
|
if not tag.get('type'):
|
|
|
|
continue
|
2021-02-08 15:06:26 +00:00
|
|
|
if not isinstance(tag['type'], str):
|
|
|
|
continue
|
2020-04-03 16:27:34 +00:00
|
|
|
if tag['type'] != 'Hashtag':
|
2019-12-12 17:34:31 +00:00
|
|
|
continue
|
|
|
|
if not tag.get('name'):
|
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
tag_name = tag['name'].replace('#', '').strip()
|
|
|
|
if not valid_hash_tag(tag_name):
|
2021-09-15 19:48:01 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
tags_filename = tags_dir + '/' + tag_name + '.txt'
|
|
|
|
post_url = remove_id_ending(post_json_object['id'])
|
|
|
|
post_url = post_url.replace('/', '#')
|
|
|
|
days_diff = datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)
|
|
|
|
days_since_epoch = days_diff.days
|
|
|
|
tag_line = \
|
|
|
|
str(days_since_epoch) + ' ' + nickname + ' ' + post_url + '\n'
|
|
|
|
hashtags_ctr += 1
|
|
|
|
if not os.path.isfile(tags_filename):
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(tags_filename, 'w+') as tags_file:
|
|
|
|
tags_file.write(tag_line)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + tags_filename)
|
2019-12-12 17:34:31 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
if post_url not in open(tags_filename).read():
|
2019-12-12 17:47:16 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(tags_filename, 'r+') as tags_file:
|
|
|
|
content = tags_file.read()
|
|
|
|
if tag_line not in content:
|
|
|
|
tags_file.seek(0, 0)
|
|
|
|
tags_file.write(tag_line + content)
|
2021-12-25 15:28:52 +00:00
|
|
|
except OSError as ex:
|
2021-11-25 22:22:54 +00:00
|
|
|
print('EX: Failed to write entry to tags file ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
tags_filename + ' ' + str(ex))
|
2021-12-29 21:55:09 +00:00
|
|
|
remove_old_hashtags(base_dir, 3)
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-05 11:11:32 +00:00
|
|
|
# automatically assign a category to the tag if possible
|
2022-01-02 19:57:50 +00:00
|
|
|
category_filename = tags_dir + '/' + tag_name + '.category'
|
|
|
|
if not os.path.isfile(category_filename):
|
|
|
|
category_str = \
|
|
|
|
guess_hashtag_category(tag_name, hashtag_categories)
|
|
|
|
if category_str:
|
|
|
|
set_hashtag_category(base_dir, tag_name, category_str, False)
|
2020-12-05 11:11:32 +00:00
|
|
|
|
2021-10-20 13:33:34 +00:00
|
|
|
# if some hashtags were found then recalculate the swarm
|
|
|
|
# ready for later display
|
2022-01-02 19:57:50 +00:00
|
|
|
if hashtags_ctr > 0:
|
2021-12-29 21:55:09 +00:00
|
|
|
_update_cached_hashtag_swarm(base_dir, nickname, domain,
|
|
|
|
http_prefix, domain_full, translate)
|
|
|
|
|
|
|
|
|
|
|
|
def _inbox_store_post_to_html_cache(recent_posts_cache: {},
|
|
|
|
max_recent_posts: int,
|
|
|
|
translate: {},
|
|
|
|
base_dir: str, http_prefix: str,
|
|
|
|
session, cached_webfingers: {},
|
|
|
|
person_cache: {},
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
post_json_object: {},
|
|
|
|
allow_deletion: bool, boxname: str,
|
|
|
|
show_published_date_only: bool,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
cw_lists: {},
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled: str,
|
|
|
|
timezone: str) -> None:
|
2019-10-19 12:37:35 +00:00
|
|
|
"""Converts the json post into html and stores it in a cache
|
|
|
|
This enables the post to be quickly displayed later
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
page_number = -999
|
|
|
|
avatar_url = None
|
2021-07-01 21:30:36 +00:00
|
|
|
if boxname != 'outbox':
|
2020-10-08 12:28:02 +00:00
|
|
|
boxname = 'inbox'
|
2020-12-18 18:12:33 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
not_dm = not is_dm(post_json_object)
|
2021-12-26 14:08:58 +00:00
|
|
|
yt_replace_domain = get_config_param(base_dir, 'youtubedomain')
|
|
|
|
twitter_replacement_domain = get_config_param(base_dir, 'twitterdomain')
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem,
|
|
|
|
True, recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, session, cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
nickname, domain, port, post_json_object,
|
2022-01-02 19:57:50 +00:00
|
|
|
avatar_url, True, allow_deletion,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix, __version__, boxname,
|
|
|
|
yt_replace_domain, twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances, allow_local_network_access,
|
|
|
|
theme_name, system_language, max_like_count,
|
2022-01-02 19:57:50 +00:00
|
|
|
not_dm, True, True, False, True, False,
|
2022-02-25 19:12:40 +00:00
|
|
|
cw_lists, lists_enabled, timezone)
|
2021-12-29 21:55:09 +00:00
|
|
|
|
|
|
|
|
|
|
|
def valid_inbox(base_dir: str, nickname: str, domain: str) -> bool:
|
2019-07-18 11:35:48 +00:00
|
|
|
"""Checks whether files were correctly saved to the inbox
|
|
|
|
"""
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
inbox_dir = acct_dir(base_dir, nickname, domain) + '/inbox'
|
|
|
|
if not os.path.isdir(inbox_dir):
|
2019-07-18 09:26:47 +00:00
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
for subdir, _, files in os.walk(inbox_dir):
|
|
|
|
for fname in files:
|
|
|
|
filename = os.path.join(subdir, fname)
|
2019-07-18 09:26:47 +00:00
|
|
|
if not os.path.isfile(filename):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('filename: ' + filename)
|
2019-07-18 09:26:47 +00:00
|
|
|
return False
|
|
|
|
if 'postNickname' in open(filename).read():
|
2020-04-03 16:27:34 +00:00
|
|
|
print('queue file incorrectly saved to ' + filename)
|
2019-07-18 11:35:48 +00:00
|
|
|
return False
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-03-22 21:16:02 +00:00
|
|
|
return True
|
2019-07-18 11:35:48 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def valid_inbox_filenames(base_dir: str, nickname: str, domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
expected_domain: str, expected_port: int) -> bool:
|
2019-07-18 11:35:48 +00:00
|
|
|
"""Used by unit tests to check that the port number gets appended to
|
|
|
|
domain names within saved post filenames
|
|
|
|
"""
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
inbox_dir = acct_dir(base_dir, nickname, domain) + '/inbox'
|
|
|
|
if not os.path.isdir(inbox_dir):
|
|
|
|
print('Not an inbox directory: ' + inbox_dir)
|
2019-07-18 11:35:48 +00:00
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
expected_str = expected_domain + ':' + str(expected_port)
|
|
|
|
expected_found = False
|
2021-08-20 11:22:20 +00:00
|
|
|
ctr = 0
|
2022-01-02 19:57:50 +00:00
|
|
|
for subdir, _, files in os.walk(inbox_dir):
|
|
|
|
for fname in files:
|
|
|
|
filename = os.path.join(subdir, fname)
|
2021-08-20 11:22:20 +00:00
|
|
|
ctr += 1
|
2019-07-18 11:35:48 +00:00
|
|
|
if not os.path.isfile(filename):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('filename: ' + filename)
|
2019-07-18 11:35:48 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if expected_str in filename:
|
|
|
|
expected_found = True
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2021-08-20 11:22:20 +00:00
|
|
|
if ctr == 0:
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
if not expected_found:
|
|
|
|
print('Expected file was not found: ' + expected_str)
|
|
|
|
for subdir, _, files in os.walk(inbox_dir):
|
|
|
|
for fname in files:
|
|
|
|
filename = os.path.join(subdir, fname)
|
2021-08-20 11:22:20 +00:00
|
|
|
print(filename)
|
|
|
|
break
|
2021-08-01 13:25:11 +00:00
|
|
|
return False
|
2020-03-22 21:16:02 +00:00
|
|
|
return True
|
2019-07-18 09:26:47 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-28 20:32:11 +00:00
|
|
|
def inbox_message_has_params(message_json: {}) -> bool:
|
2019-07-02 15:07:27 +00:00
|
|
|
"""Checks whether an incoming message contains expected parameters
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
expected_params = ['actor', 'type', 'object']
|
|
|
|
for param in expected_params:
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get(param):
|
2021-12-28 20:32:11 +00:00
|
|
|
# print('inbox_message_has_params: ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
# param + ' ' + str(message_json))
|
2019-07-02 15:07:27 +00:00
|
|
|
return False
|
2021-02-08 15:06:26 +00:00
|
|
|
|
|
|
|
# actor should be a string
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['actor'], str):
|
2021-02-08 15:06:26 +00:00
|
|
|
print('WARN: actor should be a string, but is actually: ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
str(message_json['actor']))
|
|
|
|
pprint(message_json)
|
2021-02-08 15:06:26 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# type should be a string
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['type'], str):
|
|
|
|
print('WARN: type from ' + str(message_json['actor']) +
|
2021-02-08 15:06:26 +00:00
|
|
|
' should be a string, but is actually: ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
str(message_json['type']))
|
2021-02-08 15:06:26 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# object should be a dict or a string
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(message_json):
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['object'], str):
|
|
|
|
print('WARN: object from ' + str(message_json['actor']) +
|
2021-02-08 15:06:26 +00:00
|
|
|
' should be a dict or string, but is actually: ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
str(message_json['object']))
|
2021-02-08 15:06:26 +00:00
|
|
|
return False
|
|
|
|
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('to'):
|
2022-01-02 19:57:50 +00:00
|
|
|
allowed_without_to_param = ['Like', 'EmojiReact',
|
|
|
|
'Follow', 'Join', 'Request',
|
|
|
|
'Accept', 'Capability', 'Undo']
|
|
|
|
if message_json['type'] not in allowed_without_to_param:
|
2019-07-06 13:49:25 +00:00
|
|
|
return False
|
2019-07-02 15:07:27 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-28 20:32:11 +00:00
|
|
|
def inbox_permitted_message(domain: str, message_json: {},
|
|
|
|
federation_list: []) -> bool:
|
2019-06-28 21:59:54 +00:00
|
|
|
""" check that we are receiving from a permitted domain
|
|
|
|
"""
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, False):
|
2019-06-28 21:59:54 +00:00
|
|
|
return False
|
2020-08-23 14:45:58 +00:00
|
|
|
|
2021-12-25 23:51:19 +00:00
|
|
|
actor = message_json['actor']
|
2019-06-28 21:59:54 +00:00
|
|
|
# always allow the local domain
|
2019-07-01 11:48:54 +00:00
|
|
|
if domain in actor:
|
2019-06-28 21:59:54 +00:00
|
|
|
return True
|
|
|
|
|
2021-12-27 20:47:05 +00:00
|
|
|
if not url_permitted(actor, federation_list):
|
2019-06-28 21:59:54 +00:00
|
|
|
return False
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
always_allowed_types = (
|
2021-11-10 12:16:03 +00:00
|
|
|
'Follow', 'Join', 'Like', 'EmojiReact', 'Delete', 'Announce'
|
|
|
|
)
|
2022-01-02 19:57:50 +00:00
|
|
|
if message_json['type'] not in always_allowed_types:
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(message_json):
|
2019-11-16 12:32:28 +00:00
|
|
|
return True
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('inReplyTo'):
|
2022-01-02 19:57:50 +00:00
|
|
|
in_reply_to = message_json['object']['inReplyTo']
|
|
|
|
if not isinstance(in_reply_to, str):
|
2020-08-28 14:45:07 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if not url_permitted(in_reply_to, federation_list):
|
2019-07-15 09:20:16 +00:00
|
|
|
return False
|
2019-06-28 21:59:54 +00:00
|
|
|
|
|
|
|
return True
|
2019-06-29 10:08:59 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-28 20:32:11 +00:00
|
|
|
def save_post_to_inbox_queue(base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
post_json_object: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
original_post_json_object: {},
|
|
|
|
message_bytes: str,
|
|
|
|
http_headers: {},
|
|
|
|
post_path: str, debug: bool,
|
2022-03-11 17:44:53 +00:00
|
|
|
blocked_cache: [], system_language: str,
|
|
|
|
mitm: bool) -> str:
|
2021-07-20 13:33:27 +00:00
|
|
|
"""Saves the given json to the inbox queue for the person
|
2022-01-02 19:57:50 +00:00
|
|
|
key_id specifies the actor sending the post
|
2019-07-04 10:02:56 +00:00
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
if len(message_bytes) > 10240:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: inbox message too long ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(len(message_bytes)) + ' bytes')
|
2019-11-15 23:43:07 +00:00
|
|
|
return None
|
2022-01-02 19:57:50 +00:00
|
|
|
original_domain = domain
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2019-07-14 20:12:05 +00:00
|
|
|
|
|
|
|
# block at the ealiest stage possible, which means the data
|
|
|
|
# isn't written to file
|
2022-01-02 19:57:50 +00:00
|
|
|
post_nickname = None
|
|
|
|
post_domain = None
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('actor'):
|
|
|
|
if not isinstance(post_json_object['actor'], str):
|
2020-09-05 16:46:03 +00:00
|
|
|
return None
|
2021-12-25 22:09:19 +00:00
|
|
|
actor = post_json_object['actor']
|
2022-01-02 19:57:50 +00:00
|
|
|
post_nickname = get_nickname_from_actor(post_json_object['actor'])
|
|
|
|
if not post_nickname:
|
2021-12-25 22:09:19 +00:00
|
|
|
print('No post Nickname in actor ' + post_json_object['actor'])
|
2019-09-01 19:20:28 +00:00
|
|
|
return None
|
2022-01-02 19:57:50 +00:00
|
|
|
post_domain, post_port = \
|
|
|
|
get_domain_from_actor(post_json_object['actor'])
|
|
|
|
if not post_domain:
|
2019-10-29 20:23:49 +00:00
|
|
|
if debug:
|
2021-12-25 22:09:19 +00:00
|
|
|
pprint(post_json_object)
|
2019-09-01 19:20:28 +00:00
|
|
|
print('No post Domain in actor')
|
|
|
|
return None
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_blocked(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_nickname, post_domain, blocked_cache):
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: post from ' + post_nickname + ' blocked')
|
2019-07-14 20:12:05 +00:00
|
|
|
return None
|
2022-01-02 19:57:50 +00:00
|
|
|
post_domain = get_full_domain(post_domain, post_port)
|
2019-07-14 20:50:27 +00:00
|
|
|
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('inReplyTo'):
|
|
|
|
if isinstance(post_json_object['object']['inReplyTo'], str):
|
2022-01-02 19:57:50 +00:00
|
|
|
in_reply_to = \
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object']['inReplyTo']
|
2022-01-02 19:57:50 +00:00
|
|
|
reply_domain, _ = \
|
|
|
|
get_domain_from_actor(in_reply_to)
|
|
|
|
if is_blocked_domain(base_dir, reply_domain, blocked_cache):
|
2021-06-22 15:45:59 +00:00
|
|
|
if debug:
|
|
|
|
print('WARN: post contains reply from ' +
|
|
|
|
str(actor) +
|
2022-01-02 19:57:50 +00:00
|
|
|
' to a blocked domain: ' + reply_domain)
|
2021-06-22 15:45:59 +00:00
|
|
|
return None
|
2022-01-02 19:57:50 +00:00
|
|
|
|
|
|
|
reply_nickname = \
|
|
|
|
get_nickname_from_actor(in_reply_to)
|
|
|
|
if reply_nickname and reply_domain:
|
|
|
|
if is_blocked(base_dir, nickname, domain,
|
|
|
|
reply_nickname, reply_domain,
|
|
|
|
blocked_cache):
|
|
|
|
if debug:
|
|
|
|
print('WARN: post contains reply from ' +
|
|
|
|
str(actor) +
|
|
|
|
' to a blocked account: ' +
|
|
|
|
reply_nickname + '@' + reply_domain)
|
|
|
|
return None
|
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('content'):
|
2022-01-02 19:57:50 +00:00
|
|
|
content_str = \
|
2021-12-26 11:29:40 +00:00
|
|
|
get_base_content_from_post(post_json_object, system_language)
|
2022-01-02 19:57:50 +00:00
|
|
|
if content_str:
|
|
|
|
if is_filtered(base_dir, nickname, domain, content_str):
|
2021-06-22 15:45:59 +00:00
|
|
|
if debug:
|
|
|
|
print('WARN: post was filtered out due to content')
|
|
|
|
return None
|
2022-01-02 19:57:50 +00:00
|
|
|
original_post_id = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('id'):
|
|
|
|
if not isinstance(post_json_object['id'], str):
|
2020-09-05 16:46:03 +00:00
|
|
|
return None
|
2022-01-02 19:57:50 +00:00
|
|
|
original_post_id = remove_id_ending(post_json_object['id'])
|
2019-08-16 15:04:40 +00:00
|
|
|
|
2021-12-26 13:17:46 +00:00
|
|
|
curr_time = datetime.datetime.utcnow()
|
2019-08-16 15:04:40 +00:00
|
|
|
|
2021-12-26 19:47:06 +00:00
|
|
|
post_id = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('id'):
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['id'])
|
2021-12-26 13:17:46 +00:00
|
|
|
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
2021-12-26 19:47:06 +00:00
|
|
|
if not post_id:
|
2022-01-02 19:57:50 +00:00
|
|
|
status_number, published = get_status_number()
|
2019-08-16 15:04:40 +00:00
|
|
|
if actor:
|
2022-01-02 19:57:50 +00:00
|
|
|
post_id = actor + '/statuses/' + status_number
|
2019-08-16 15:04:40 +00:00
|
|
|
else:
|
2021-12-26 19:47:06 +00:00
|
|
|
post_id = \
|
2022-01-02 19:57:50 +00:00
|
|
|
local_actor_url(http_prefix, nickname, original_domain) + \
|
|
|
|
'/statuses/' + status_number
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
# NOTE: don't change post_json_object['id'] before signature check
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
inbox_queue_dir = create_inbox_queue_dir(nickname, domain, base_dir)
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
destination = base_dir + '/accounts/' + \
|
2021-12-26 19:47:06 +00:00
|
|
|
handle + '/inbox/' + post_id.replace('/', '#') + '.json'
|
2022-01-02 19:57:50 +00:00
|
|
|
filename = inbox_queue_dir + '/' + post_id.replace('/', '#') + '.json'
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
shared_inbox_item = False
|
2020-04-03 16:27:34 +00:00
|
|
|
if nickname == 'inbox':
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname = original_domain
|
|
|
|
shared_inbox_item = True
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
digest_start_time = time.time()
|
|
|
|
digest_algorithm = get_digest_algorithm_from_headers(http_headers)
|
|
|
|
digest = message_content_digest(message_bytes, digest_algorithm)
|
|
|
|
time_diff_str = str(int((time.time() - digest_start_time) * 1000))
|
2019-11-16 10:12:40 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
while len(time_diff_str) < 6:
|
|
|
|
time_diff_str = '0' + time_diff_str
|
|
|
|
print('DIGEST|' + time_diff_str + '|' + filename)
|
2019-11-16 10:07:32 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
new_queue_item = {
|
|
|
|
'originalId': original_post_id,
|
2021-12-26 19:47:06 +00:00
|
|
|
'id': post_id,
|
2019-08-16 09:35:06 +00:00
|
|
|
'actor': actor,
|
2019-07-07 15:51:04 +00:00
|
|
|
'nickname': nickname,
|
|
|
|
'domain': domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
'postNickname': post_nickname,
|
|
|
|
'postDomain': post_domain,
|
|
|
|
'sharedInbox': shared_inbox_item,
|
2019-07-04 10:09:27 +00:00
|
|
|
'published': published,
|
2022-01-02 19:57:50 +00:00
|
|
|
'httpHeaders': http_headers,
|
|
|
|
'path': post_path,
|
2021-12-25 22:09:19 +00:00
|
|
|
'post': post_json_object,
|
2022-01-02 19:57:50 +00:00
|
|
|
'original': original_post_json_object,
|
2019-11-16 10:07:32 +00:00
|
|
|
'digest': digest,
|
2019-07-04 10:19:15 +00:00
|
|
|
'filename': filename,
|
2022-03-11 17:44:53 +00:00
|
|
|
'destination': destination,
|
|
|
|
'mitm': mitm
|
2019-07-04 10:02:56 +00:00
|
|
|
}
|
2019-07-06 13:49:25 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('Inbox queue item created')
|
2022-01-02 19:57:50 +00:00
|
|
|
save_json(new_queue_item, filename)
|
2019-07-04 10:02:56 +00:00
|
|
|
return filename
|
2019-07-04 12:23:53 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _inbox_post_recipients_add(base_dir: str, http_prefix: str, toList: [],
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_dict: {},
|
|
|
|
domain_match: str, domain: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
actor: str, debug: bool) -> bool:
|
2019-07-08 22:12:24 +00:00
|
|
|
"""Given a list of post recipients (toList) from 'to' or 'cc' parameters
|
2022-01-02 19:57:50 +00:00
|
|
|
populate a recipients_dict with the handle for each
|
2019-07-08 22:12:24 +00:00
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
follower_recipients = False
|
2019-07-08 22:12:24 +00:00
|
|
|
for recipient in toList:
|
2019-09-03 19:53:22 +00:00
|
|
|
if not recipient:
|
|
|
|
continue
|
2019-07-08 22:12:24 +00:00
|
|
|
# is this a to a local account?
|
2022-01-02 19:57:50 +00:00
|
|
|
if domain_match in recipient:
|
2019-07-08 22:12:24 +00:00
|
|
|
# get the handle for the local account
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname = recipient.split(domain_match)[1]
|
2021-06-22 12:42:52 +00:00
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
if os.path.isdir(base_dir + '/accounts/' + handle):
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_dict[handle] = None
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
2021-12-25 16:17:53 +00:00
|
|
|
print('DEBUG: ' + base_dir + '/accounts/' +
|
2020-04-03 16:27:34 +00:00
|
|
|
handle + ' does not exist')
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
2022-03-11 17:21:31 +00:00
|
|
|
if recipient.endswith('#Public'):
|
|
|
|
print('DEBUG: #Public recipient is too non-specific. ' +
|
|
|
|
recipient + ' ' + domain_match)
|
|
|
|
else:
|
|
|
|
print('DEBUG: ' + recipient + ' is not local to ' +
|
|
|
|
domain_match)
|
2019-07-11 12:29:31 +00:00
|
|
|
print(str(toList))
|
2019-07-08 22:12:24 +00:00
|
|
|
if recipient.endswith('followers'):
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: followers detected as post recipients')
|
2022-01-02 19:57:50 +00:00
|
|
|
follower_recipients = True
|
|
|
|
return follower_recipients, recipients_dict
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _inbox_post_recipients(base_dir: str, post_json_object: {},
|
|
|
|
http_prefix: str, domain: str, port: int,
|
|
|
|
debug: bool) -> ([], []):
|
2019-07-10 15:33:19 +00:00
|
|
|
"""Returns dictionaries containing the recipients of the given post
|
|
|
|
The shared dictionary contains followers
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_dict = {}
|
|
|
|
recipients_dict_followers = {}
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('actor'):
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
2021-12-25 22:09:19 +00:00
|
|
|
pprint(post_json_object)
|
2019-07-11 12:29:31 +00:00
|
|
|
print('WARNING: inbox post has no actor')
|
2022-01-02 19:57:50 +00:00
|
|
|
return recipients_dict, recipients_dict_followers
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_base = domain
|
2021-12-26 12:45:03 +00:00
|
|
|
domain = get_full_domain(domain, port)
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_match = '/' + domain + '/users/'
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
actor = post_json_object['actor']
|
2019-07-08 22:12:24 +00:00
|
|
|
# first get any specific people which the post is addressed to
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
follower_recipients = False
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('to'):
|
|
|
|
if isinstance(post_json_object['object']['to'], list):
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = post_json_object['object']['to']
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = [post_json_object['object']['to']]
|
2021-06-22 15:45:59 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: resolving "to"')
|
2022-01-02 19:57:50 +00:00
|
|
|
includes_followers, recipients_dict = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_post_recipients_add(base_dir, http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list,
|
|
|
|
recipients_dict,
|
|
|
|
domain_match, domain_base,
|
2021-12-29 21:55:09 +00:00
|
|
|
actor, debug)
|
2022-01-02 19:57:50 +00:00
|
|
|
if includes_followers:
|
|
|
|
follower_recipients = True
|
2021-06-22 15:45:59 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox post has no "to"')
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('cc'):
|
|
|
|
if isinstance(post_json_object['object']['cc'], list):
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = post_json_object['object']['cc']
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = [post_json_object['object']['cc']]
|
|
|
|
includes_followers, recipients_dict = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_post_recipients_add(base_dir, http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list,
|
|
|
|
recipients_dict,
|
|
|
|
domain_match, domain_base,
|
2021-12-29 21:55:09 +00:00
|
|
|
actor, debug)
|
2022-01-02 19:57:50 +00:00
|
|
|
if includes_followers:
|
|
|
|
follower_recipients = True
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
2021-06-22 15:45:59 +00:00
|
|
|
print('DEBUG: inbox post has no cc')
|
|
|
|
else:
|
2021-12-25 22:09:19 +00:00
|
|
|
if debug and post_json_object.get('object'):
|
|
|
|
if isinstance(post_json_object['object'], str):
|
|
|
|
if '/statuses/' in post_json_object['object']:
|
2021-06-22 15:45:59 +00:00
|
|
|
print('DEBUG: inbox item is a link to a post')
|
|
|
|
else:
|
2021-12-25 22:09:19 +00:00
|
|
|
if '/users/' in post_json_object['object']:
|
2021-06-22 15:45:59 +00:00
|
|
|
print('DEBUG: inbox item is a link to an actor')
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('to'):
|
|
|
|
if isinstance(post_json_object['to'], list):
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = post_json_object['to']
|
2019-08-16 17:51:00 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = [post_json_object['to']]
|
|
|
|
includes_followers, recipients_dict = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_post_recipients_add(base_dir, http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list,
|
|
|
|
recipients_dict,
|
|
|
|
domain_match, domain_base,
|
2021-12-29 21:55:09 +00:00
|
|
|
actor, debug)
|
2022-01-02 19:57:50 +00:00
|
|
|
if includes_followers:
|
|
|
|
follower_recipients = True
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('cc'):
|
|
|
|
if isinstance(post_json_object['cc'], list):
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = post_json_object['cc']
|
2019-08-16 17:51:00 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list = [post_json_object['cc']]
|
|
|
|
includes_followers, recipients_dict = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_post_recipients_add(base_dir, http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_list,
|
|
|
|
recipients_dict,
|
|
|
|
domain_match, domain_base,
|
2021-12-29 21:55:09 +00:00
|
|
|
actor, debug)
|
2022-01-02 19:57:50 +00:00
|
|
|
if includes_followers:
|
|
|
|
follower_recipients = True
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not follower_recipients:
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: no followers were resolved')
|
2022-01-02 19:57:50 +00:00
|
|
|
return recipients_dict, recipients_dict_followers
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
# now resolve the followers
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_dict_followers = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_followers_of_actor(base_dir, actor, debug)
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
return recipients_dict, recipients_dict_followers
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_undo_follow(session, base_dir: str, http_prefix: str,
|
|
|
|
port: int, message_json: {},
|
|
|
|
federation_list: [],
|
|
|
|
debug: bool) -> bool:
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('actor'):
|
2019-07-17 10:34:00 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: follow request has no actor within object')
|
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['object']['actor']):
|
2019-07-17 10:34:00 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing ' +
|
|
|
|
'from actor within object')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['actor'] != message_json['actor']:
|
2019-07-17 10:34:00 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: actors do not match')
|
|
|
|
return False
|
2019-08-15 17:05:22 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_follower = \
|
2021-12-27 22:19:18 +00:00
|
|
|
get_nickname_from_actor(message_json['object']['actor'])
|
2022-01-02 19:57:50 +00:00
|
|
|
if not nickname_follower:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: unable to find nickname in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['actor'])
|
2019-09-02 09:43:43 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_follower, port_follower = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(message_json['object']['actor'])
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_follower_full = get_full_domain(domain_follower, port_follower)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_following = \
|
2021-12-27 22:19:18 +00:00
|
|
|
get_nickname_from_actor(message_json['object']['object'])
|
2022-01-02 19:57:50 +00:00
|
|
|
if not nickname_following:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: unable to find nickname in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['object'])
|
2019-09-02 09:43:43 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_following, port_following = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(message_json['object']['object'])
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_following_full = get_full_domain(domain_following, port_following)
|
2019-07-17 10:34:00 +00:00
|
|
|
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = \
|
2021-12-26 17:53:07 +00:00
|
|
|
has_group_type(base_dir, message_json['object']['actor'], None)
|
2021-12-29 21:55:09 +00:00
|
|
|
if unfollower_of_account(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_following, domain_following_full,
|
|
|
|
nickname_follower, domain_follower_full,
|
2021-12-29 21:55:09 +00:00
|
|
|
debug, group_account):
|
2022-01-02 19:57:50 +00:00
|
|
|
print(nickname_following + '@' + domain_following_full + ': '
|
|
|
|
'Follower ' + nickname_follower + '@' + domain_follower_full +
|
2020-08-20 12:11:07 +00:00
|
|
|
' was removed')
|
2019-07-17 11:54:13 +00:00
|
|
|
return True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-17 11:54:13 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Follower ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_follower + '@' + domain_follower_full +
|
2020-04-03 16:27:34 +00:00
|
|
|
' was not removed')
|
2019-07-17 11:54:13 +00:00
|
|
|
return False
|
2019-07-17 10:34:00 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_undo(session, base_dir: str, http_prefix: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
port: int, send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
message_json: {}, federation_list: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-17 10:34:00 +00:00
|
|
|
"""Receives an undo request within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['type'].startswith('Undo'):
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
2019-07-17 11:24:11 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Undo activity received')
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-17 10:34:00 +00:00
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(message_json, debug):
|
2021-07-19 09:03:56 +00:00
|
|
|
return False
|
2021-12-26 15:54:46 +00:00
|
|
|
if not has_object_string_object(message_json, debug):
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] == 'Follow' or \
|
|
|
|
message_json['object']['type'] == 'Join':
|
2021-12-29 21:55:09 +00:00
|
|
|
return _receive_undo_follow(session, base_dir, http_prefix,
|
|
|
|
port, message_json,
|
|
|
|
federation_list, debug)
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _person_receive_update(base_dir: str,
|
|
|
|
domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
update_nickname: str, update_domain: str,
|
|
|
|
update_port: int,
|
|
|
|
person_json: {}, person_cache: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
debug: bool) -> bool:
|
2019-08-22 18:36:07 +00:00
|
|
|
"""Changes an actor. eg: avatar or display name change
|
2019-08-20 19:41:58 +00:00
|
|
|
"""
|
2021-03-14 19:42:25 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Receiving actor update for ' + person_json['url'] +
|
|
|
|
' ' + str(person_json))
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2022-01-02 19:57:50 +00:00
|
|
|
update_domain_full = get_full_domain(update_domain, update_port)
|
|
|
|
users_paths = get_user_paths()
|
|
|
|
users_str_found = False
|
|
|
|
for users_str in users_paths:
|
|
|
|
actor = update_domain_full + users_str + update_nickname
|
|
|
|
if actor in person_json['id']:
|
|
|
|
users_str_found = True
|
2020-12-23 22:18:19 +00:00
|
|
|
break
|
2022-01-02 19:57:50 +00:00
|
|
|
if not users_str_found:
|
2020-12-23 22:18:19 +00:00
|
|
|
if debug:
|
|
|
|
print('actor: ' + actor)
|
2022-01-02 19:57:50 +00:00
|
|
|
print('id: ' + person_json['id'])
|
2020-12-23 22:18:19 +00:00
|
|
|
print('DEBUG: Actor does not match id')
|
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if update_domain_full == domain_full:
|
2019-08-22 17:49:57 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: You can only receive actor updates ' +
|
|
|
|
'for domains other than your own')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if not person_json.get('publicKey'):
|
2019-08-20 19:41:58 +00:00
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: actor update does not contain a public key')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if not person_json['publicKey'].get('publicKeyPem'):
|
2019-08-20 19:41:58 +00:00
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: actor update does not contain a public key Pem')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_filename = base_dir + '/cache/actors/' + \
|
|
|
|
person_json['id'].replace('/', '#') + '.json'
|
2019-08-20 19:41:58 +00:00
|
|
|
# check that the public keys match.
|
|
|
|
# If they don't then this may be a nefarious attempt to hack an account
|
2022-01-02 19:57:50 +00:00
|
|
|
idx = person_json['id']
|
2021-12-25 22:17:49 +00:00
|
|
|
if person_cache.get(idx):
|
|
|
|
if person_cache[idx]['actor']['publicKey']['publicKeyPem'] != \
|
2022-01-02 19:57:50 +00:00
|
|
|
person_json['publicKey']['publicKeyPem']:
|
2019-08-20 19:41:58 +00:00
|
|
|
if debug:
|
|
|
|
print('WARN: Public key does not match when updating actor')
|
|
|
|
return False
|
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(actor_filename):
|
|
|
|
existing_person_json = load_json(actor_filename)
|
|
|
|
if existing_person_json:
|
|
|
|
if existing_person_json['publicKey']['publicKeyPem'] != \
|
|
|
|
person_json['publicKey']['publicKeyPem']:
|
2019-08-20 19:41:58 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: Public key does not match ' +
|
|
|
|
'cached actor when updating')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
|
|
|
# save to cache in memory
|
2022-01-02 19:57:50 +00:00
|
|
|
store_person_in_cache(base_dir, person_json['id'], person_json,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, True)
|
2021-03-14 19:46:46 +00:00
|
|
|
# save to cache on file
|
2022-01-02 19:57:50 +00:00
|
|
|
if save_json(person_json, actor_filename):
|
2021-03-14 19:42:25 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('actor updated for ' + person_json['id'])
|
2019-09-14 18:58:55 +00:00
|
|
|
|
|
|
|
# remove avatar if it exists so that it will be refreshed later
|
|
|
|
# when a timeline is constructed
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_str = person_json['id'].replace('/', '-')
|
|
|
|
remove_avatar_from_cache(base_dir, actor_str)
|
2019-08-20 19:41:58 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_update_to_question(recent_posts_cache: {}, message_json: {},
|
|
|
|
base_dir: str,
|
|
|
|
nickname: str, domain: str) -> None:
|
2019-11-26 10:43:37 +00:00
|
|
|
"""Updating a question as new votes arrive
|
|
|
|
"""
|
|
|
|
# message url of the question
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('id'):
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, False):
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
message_id = remove_id_ending(message_json['id'])
|
|
|
|
if '#' in message_id:
|
|
|
|
message_id = message_id.split('#', 1)[0]
|
2019-11-26 10:43:37 +00:00
|
|
|
# find the question post
|
2022-01-02 19:57:50 +00:00
|
|
|
post_filename = locate_post(base_dir, nickname, domain, message_id)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
|
|
|
# load the json for the question
|
2021-12-26 23:41:34 +00:00
|
|
|
post_json_object = load_json(post_filename, 1)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('actor'):
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
|
|
|
# does the actor match?
|
2021-12-25 23:51:19 +00:00
|
|
|
if post_json_object['actor'] != message_json['actor']:
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
2021-12-26 23:41:34 +00:00
|
|
|
save_json(message_json, post_filename)
|
2019-11-26 10:43:37 +00:00
|
|
|
# ensure that the cached post is removed if it exists, so
|
|
|
|
# that it then will be recreated
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
2021-12-26 23:41:34 +00:00
|
|
|
get_cached_post_filename(base_dir, nickname, domain, message_json)
|
2022-01-02 19:57:50 +00:00
|
|
|
if cached_post_filename:
|
|
|
|
if os.path.isfile(cached_post_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(cached_post_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _receive_update_to_question unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename)
|
2019-11-26 10:43:37 +00:00
|
|
|
# remove from memory cache
|
2021-12-27 11:05:24 +00:00
|
|
|
remove_post_from_cache(message_json, recent_posts_cache)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_update_activity(recent_posts_cache: {}, session, base_dir: str,
|
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {},
|
|
|
|
person_cache: {}, message_json: {},
|
|
|
|
federation_list: [],
|
|
|
|
nickname: str, debug: bool) -> bool:
|
2019-07-09 14:20:23 +00:00
|
|
|
"""Receives an Update activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Update':
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(message_json, debug):
|
2021-07-19 09:07:20 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-09 14:20:23 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
2019-08-22 17:25:12 +00:00
|
|
|
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] == 'Question':
|
2021-12-29 21:55:09 +00:00
|
|
|
_receive_update_to_question(recent_posts_cache, message_json,
|
|
|
|
base_dir, nickname, domain)
|
2019-11-25 14:05:59 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Question update was received')
|
|
|
|
return True
|
|
|
|
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] == 'Person' or \
|
|
|
|
message_json['object']['type'] == 'Application' or \
|
|
|
|
message_json['object']['type'] == 'Group' or \
|
|
|
|
message_json['object']['type'] == 'Service':
|
|
|
|
if message_json['object'].get('url') and \
|
|
|
|
message_json['object'].get('id'):
|
2021-03-14 19:42:25 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('Request to update actor: ' + str(message_json))
|
2022-01-02 19:57:50 +00:00
|
|
|
update_nickname = get_nickname_from_actor(message_json['actor'])
|
|
|
|
if update_nickname:
|
|
|
|
update_domain, update_port = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(message_json['actor'])
|
2021-12-29 21:55:09 +00:00
|
|
|
if _person_receive_update(base_dir,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
update_nickname, update_domain,
|
|
|
|
update_port,
|
2021-12-29 21:55:09 +00:00
|
|
|
message_json['object'],
|
|
|
|
person_cache, debug):
|
2021-12-25 23:51:19 +00:00
|
|
|
print('Person Update: ' + str(message_json))
|
2019-09-02 09:43:43 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Profile update was received for ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['url'])
|
2019-09-02 09:43:43 +00:00
|
|
|
return True
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_like(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
|
|
|
onion_domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [], cached_webfingers: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {}, message_json: {}, federation_list: [],
|
|
|
|
debug: bool,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: str) -> bool:
|
2019-07-10 12:40:31 +00:00
|
|
|
"""Receives a Like activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Like':
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_string(message_json, debug):
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('to'):
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']:
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from object in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unknown recipient of like - ' + handle)
|
2019-07-10 18:00:14 +00:00
|
|
|
# if this post in the outbox of the person?
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
|
|
|
post_liked_id = message_json['object']
|
|
|
|
post_filename = \
|
|
|
|
locate_post(base_dir, handle_name, handle_dom, post_liked_id)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: post not found in inbox or outbox')
|
2022-01-02 19:57:50 +00:00
|
|
|
print(post_liked_id)
|
2019-07-10 12:40:31 +00:00
|
|
|
return True
|
|
|
|
if debug:
|
2019-07-11 12:59:00 +00:00
|
|
|
print('DEBUG: liked post found in inbox')
|
2019-10-19 17:50:05 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
like_actor = message_json['actor']
|
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _already_liked(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name, handle_dom,
|
|
|
|
post_liked_id,
|
|
|
|
like_actor):
|
2021-12-29 21:55:09 +00:00
|
|
|
_like_notify(base_dir, domain, onion_domain, handle,
|
2022-01-02 19:57:50 +00:00
|
|
|
like_actor, post_liked_id)
|
2021-12-29 21:55:09 +00:00
|
|
|
update_likes_collection(recent_posts_cache, base_dir, post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_liked_id, like_actor,
|
|
|
|
handle_name, domain, debug, None)
|
2021-09-03 22:04:50 +00:00
|
|
|
# regenerate the html
|
2022-01-02 19:57:50 +00:00
|
|
|
liked_post_json = load_json(post_filename, 0, 1)
|
|
|
|
if liked_post_json:
|
|
|
|
if liked_post_json.get('type'):
|
|
|
|
if liked_post_json['type'] == 'Announce' and \
|
|
|
|
liked_post_json.get('object'):
|
|
|
|
if isinstance(liked_post_json['object'], str):
|
|
|
|
announce_like_url = liked_post_json['object']
|
|
|
|
announce_liked_filename = \
|
|
|
|
locate_post(base_dir, handle_name,
|
|
|
|
domain, announce_like_url)
|
|
|
|
if announce_liked_filename:
|
|
|
|
post_liked_id = announce_like_url
|
|
|
|
post_filename = announce_liked_filename
|
2021-12-29 21:55:09 +00:00
|
|
|
update_likes_collection(recent_posts_cache,
|
|
|
|
base_dir,
|
|
|
|
post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_liked_id,
|
|
|
|
like_actor,
|
|
|
|
handle_name,
|
2021-12-29 21:55:09 +00:00
|
|
|
domain, debug, None)
|
2022-01-02 19:57:50 +00:00
|
|
|
if liked_post_json:
|
2021-10-14 22:29:51 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
|
|
|
get_cached_post_filename(base_dir, handle_name, domain,
|
|
|
|
liked_post_json)
|
|
|
|
print('Liked post json: ' + str(liked_post_json))
|
|
|
|
print('Liked post nickname: ' + handle_name + ' ' + domain)
|
|
|
|
print('Liked post cache: ' + str(cached_post_filename))
|
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
|
|
|
follower_approval_active(base_dir, handle_name, domain)
|
|
|
|
not_dm = not is_dm(liked_post_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, handle_name, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name, domain, port, liked_post_json,
|
2021-12-29 21:55:09 +00:00
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2019-07-10 12:40:31 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_undo_like(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [], cached_webfingers: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {}, message_json: {}, federation_list: [],
|
|
|
|
debug: bool,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: str) -> bool:
|
2019-07-12 09:10:09 +00:00
|
|
|
"""Receives an undo like activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Undo':
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(message_json, debug):
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] != 'Like':
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-26 15:54:46 +00:00
|
|
|
if not has_object_string_object(message_json, debug):
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-12 09:10:09 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'] + ' like')
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']['object']:
|
2019-07-12 09:10:09 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from like object in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unknown recipient of undo like - ' + handle)
|
2019-07-12 09:10:09 +00:00
|
|
|
# if this post in the outbox of the person?
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = \
|
2022-01-02 19:57:50 +00:00
|
|
|
locate_post(base_dir, handle_name, handle_dom,
|
2021-12-26 20:36:08 +00:00
|
|
|
message_json['object']['object'])
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-07-12 09:10:09 +00:00
|
|
|
if debug:
|
2019-07-12 09:41:57 +00:00
|
|
|
print('DEBUG: unliked post not found in inbox or outbox')
|
2021-12-25 23:51:19 +00:00
|
|
|
print(message_json['object']['object'])
|
2019-07-12 09:10:09 +00:00
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: liked post found in inbox. Now undoing.')
|
2022-01-02 19:57:50 +00:00
|
|
|
like_actor = message_json['actor']
|
|
|
|
post_liked_id = message_json['object']
|
2021-12-27 23:23:07 +00:00
|
|
|
undo_likes_collection_entry(recent_posts_cache, base_dir, post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_liked_id, like_actor, domain, debug, None)
|
2021-09-03 22:10:54 +00:00
|
|
|
# regenerate the html
|
2022-01-02 19:57:50 +00:00
|
|
|
liked_post_json = load_json(post_filename, 0, 1)
|
|
|
|
if liked_post_json:
|
|
|
|
if liked_post_json.get('type'):
|
|
|
|
if liked_post_json['type'] == 'Announce' and \
|
|
|
|
liked_post_json.get('object'):
|
|
|
|
if isinstance(liked_post_json['object'], str):
|
|
|
|
announce_like_url = liked_post_json['object']
|
|
|
|
announce_liked_filename = \
|
|
|
|
locate_post(base_dir, handle_name,
|
|
|
|
domain, announce_like_url)
|
|
|
|
if announce_liked_filename:
|
|
|
|
post_liked_id = announce_like_url
|
|
|
|
post_filename = announce_liked_filename
|
2021-12-27 23:23:07 +00:00
|
|
|
undo_likes_collection_entry(recent_posts_cache,
|
|
|
|
base_dir,
|
|
|
|
post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_liked_id,
|
|
|
|
like_actor, domain, debug,
|
2021-12-27 23:23:07 +00:00
|
|
|
None)
|
2022-01-02 19:57:50 +00:00
|
|
|
if liked_post_json:
|
2021-10-14 22:29:51 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
|
|
|
get_cached_post_filename(base_dir, handle_name, domain,
|
|
|
|
liked_post_json)
|
|
|
|
print('Unliked post json: ' + str(liked_post_json))
|
|
|
|
print('Unliked post nickname: ' + handle_name + ' ' + domain)
|
|
|
|
print('Unliked post cache: ' + str(cached_post_filename))
|
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
|
|
|
follower_approval_active(base_dir, handle_name, domain)
|
|
|
|
not_dm = not is_dm(liked_post_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, handle_name, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name, domain, port, liked_post_json,
|
2021-12-29 21:55:09 +00:00
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2019-07-12 09:10:09 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_reaction(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
|
|
|
onion_domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [], cached_webfingers: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {}, message_json: {}, federation_list: [],
|
|
|
|
debug: bool,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: str) -> bool:
|
2021-11-10 12:16:03 +00:00
|
|
|
"""Receives an emoji reaction within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'EmojiReact':
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_string(message_json, debug):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('content'):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' has no "content"')
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['content'], str):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' content is not string')
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-29 21:55:09 +00:00
|
|
|
if not valid_emoji_content(message_json['content']):
|
|
|
|
print('_receive_reaction: Invalid emoji reaction: "' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['content'] + '" from ' + message_json['actor'])
|
2021-11-10 13:10:02 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: "statuses" missing from object in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2021-11-10 12:16:03 +00:00
|
|
|
print('DEBUG: unknown recipient of emoji reaction - ' + handle)
|
2021-12-25 16:17:53 +00:00
|
|
|
if os.path.isfile(base_dir + '/accounts/' + handle +
|
2021-11-17 14:25:24 +00:00
|
|
|
'/.hideReactionButton'):
|
|
|
|
print('Emoji reaction rejected by ' + handle +
|
|
|
|
' due to their settings')
|
|
|
|
return True
|
2021-11-10 12:16:03 +00:00
|
|
|
# if this post in the outbox of the person?
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
2021-11-17 14:25:24 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
post_reaction_id = message_json['object']
|
|
|
|
emoji_content = remove_html(message_json['content'])
|
|
|
|
if not emoji_content:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: emoji reaction has no content')
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
post_filename = locate_post(base_dir, handle_name, handle_dom,
|
|
|
|
post_reaction_id)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: emoji reaction post not found in inbox or outbox')
|
2022-01-02 19:57:50 +00:00
|
|
|
print(post_reaction_id)
|
2021-11-10 12:16:03 +00:00
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: emoji reaction post found in inbox')
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
reaction_actor = message_json['actor']
|
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _already_reacted(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name, handle_dom,
|
|
|
|
post_reaction_id,
|
|
|
|
reaction_actor,
|
|
|
|
emoji_content):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reaction_notify(base_dir, domain, onion_domain, handle,
|
2022-01-02 19:57:50 +00:00
|
|
|
reaction_actor, post_reaction_id, emoji_content)
|
2021-12-29 21:55:09 +00:00
|
|
|
update_reaction_collection(recent_posts_cache, base_dir, post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_reaction_id, reaction_actor,
|
|
|
|
handle_name, domain, debug, None, emoji_content)
|
2021-11-10 12:16:03 +00:00
|
|
|
# regenerate the html
|
2021-12-29 21:55:09 +00:00
|
|
|
reaction_post_json = load_json(post_filename, 0, 1)
|
|
|
|
if reaction_post_json:
|
|
|
|
if reaction_post_json.get('type'):
|
|
|
|
if reaction_post_json['type'] == 'Announce' and \
|
|
|
|
reaction_post_json.get('object'):
|
|
|
|
if isinstance(reaction_post_json['object'], str):
|
2022-01-02 19:57:50 +00:00
|
|
|
announce_reaction_url = reaction_post_json['object']
|
|
|
|
announce_reaction_filename = \
|
|
|
|
locate_post(base_dir, handle_name,
|
|
|
|
domain, announce_reaction_url)
|
|
|
|
if announce_reaction_filename:
|
|
|
|
post_reaction_id = announce_reaction_url
|
|
|
|
post_filename = announce_reaction_filename
|
2021-12-29 21:55:09 +00:00
|
|
|
update_reaction_collection(recent_posts_cache,
|
|
|
|
base_dir,
|
|
|
|
post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_reaction_id,
|
|
|
|
reaction_actor,
|
|
|
|
handle_name,
|
2021-12-29 21:55:09 +00:00
|
|
|
domain, debug, None,
|
2022-01-02 19:57:50 +00:00
|
|
|
emoji_content)
|
2021-12-29 21:55:09 +00:00
|
|
|
if reaction_post_json:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
|
|
|
get_cached_post_filename(base_dir, handle_name, domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
reaction_post_json)
|
|
|
|
print('Reaction post json: ' + str(reaction_post_json))
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Reaction post nickname: ' + handle_name + ' ' + domain)
|
|
|
|
print('Reaction post cache: ' + str(cached_post_filename))
|
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
|
|
|
follower_approval_active(base_dir, handle_name, domain)
|
|
|
|
not_dm = not is_dm(reaction_post_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, handle_name, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name, domain, port,
|
2021-12-29 21:55:09 +00:00
|
|
|
reaction_post_json,
|
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2021-11-10 12:16:03 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_undo_reaction(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {},
|
|
|
|
person_cache: {}, message_json: {},
|
|
|
|
federation_list: [],
|
|
|
|
debug: bool,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: str) -> bool:
|
2021-11-10 12:16:03 +00:00
|
|
|
"""Receives an undo emoji reaction within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Undo':
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(message_json, debug):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] != 'EmojiReact':
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 15:54:46 +00:00
|
|
|
if not has_object_string_object(message_json, debug):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('content'):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' has no "content"')
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['object']['content'], str):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' content is not string')
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'] + ' reaction')
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']['object']:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: "statuses" missing from reaction object in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2021-11-10 12:16:03 +00:00
|
|
|
print('DEBUG: unknown recipient of undo reaction - ' + handle)
|
|
|
|
# if this post in the outbox of the person?
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = \
|
2022-01-02 19:57:50 +00:00
|
|
|
locate_post(base_dir, handle_name, handle_dom,
|
2021-12-26 20:36:08 +00:00
|
|
|
message_json['object']['object'])
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: unreaction post not found in inbox or outbox')
|
2021-12-25 23:51:19 +00:00
|
|
|
print(message_json['object']['object'])
|
2021-11-10 12:16:03 +00:00
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: reaction post found in inbox. Now undoing.')
|
2022-01-02 19:57:50 +00:00
|
|
|
reaction_actor = message_json['actor']
|
|
|
|
post_reaction_id = message_json['object']
|
|
|
|
emoji_content = remove_html(message_json['object']['content'])
|
|
|
|
if not emoji_content:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: unreaction has no content')
|
|
|
|
return True
|
2021-12-27 23:02:50 +00:00
|
|
|
undo_reaction_collection_entry(recent_posts_cache, base_dir, post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_reaction_id, reaction_actor, domain,
|
|
|
|
debug, None, emoji_content)
|
2021-11-10 12:16:03 +00:00
|
|
|
# regenerate the html
|
2021-12-29 21:55:09 +00:00
|
|
|
reaction_post_json = load_json(post_filename, 0, 1)
|
|
|
|
if reaction_post_json:
|
|
|
|
if reaction_post_json.get('type'):
|
|
|
|
if reaction_post_json['type'] == 'Announce' and \
|
|
|
|
reaction_post_json.get('object'):
|
|
|
|
if isinstance(reaction_post_json['object'], str):
|
2022-01-02 19:57:50 +00:00
|
|
|
announce_reaction_url = reaction_post_json['object']
|
|
|
|
announce_reaction_filename = \
|
|
|
|
locate_post(base_dir, handle_name,
|
|
|
|
domain, announce_reaction_url)
|
|
|
|
if announce_reaction_filename:
|
|
|
|
post_reaction_id = announce_reaction_url
|
|
|
|
post_filename = announce_reaction_filename
|
2021-12-27 23:02:50 +00:00
|
|
|
undo_reaction_collection_entry(recent_posts_cache,
|
|
|
|
base_dir,
|
|
|
|
post_filename,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_reaction_id,
|
|
|
|
reaction_actor,
|
2021-12-27 23:02:50 +00:00
|
|
|
domain,
|
|
|
|
debug, None,
|
2022-01-02 19:57:50 +00:00
|
|
|
emoji_content)
|
2021-12-29 21:55:09 +00:00
|
|
|
if reaction_post_json:
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
|
|
|
get_cached_post_filename(base_dir, handle_name, domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
reaction_post_json)
|
|
|
|
print('Unreaction post json: ' + str(reaction_post_json))
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Unreaction post nickname: ' +
|
|
|
|
handle_name + ' ' + domain)
|
|
|
|
print('Unreaction post cache: ' + str(cached_post_filename))
|
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
|
|
|
follower_approval_active(base_dir, handle_name, domain)
|
|
|
|
not_dm = not is_dm(reaction_post_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, handle_name, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name, domain, port,
|
2021-12-29 21:55:09 +00:00
|
|
|
reaction_post_json,
|
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2021-11-10 12:16:03 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_bookmark(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [], cached_webfingers: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {}, message_json: {}, federation_list: [],
|
|
|
|
debug: bool, signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: {}) -> bool:
|
2019-11-17 14:01:49 +00:00
|
|
|
"""Receives a bookmark activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('type'):
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Add':
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('target'):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: no target in inbox bookmark Add')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(message_json, debug):
|
2021-03-20 14:25:24 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['target'], str):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: inbox bookmark Add target is not string')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2021-12-26 10:00:46 +00:00
|
|
|
if not message_json['actor'].endswith(domain_full + '/users/' + nickname):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: inbox bookmark Add unexpected actor')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['target'].endswith(message_json['actor'] +
|
|
|
|
'/tlbookmarks'):
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox bookmark Add target invalid ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['target'])
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] != 'Document':
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox bookmark Add type is not Document')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('url'):
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox bookmark Add missing url')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']['url']:
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox bookmark Add missing statuses un url')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s inbox bookmark Add request arrived in outbox')
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
message_url = remove_id_ending(message_json['object']['url'])
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
post_filename = locate_post(base_dir, nickname, domain, message_url)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: c2s inbox like post not found in inbox or outbox')
|
2022-01-02 19:57:50 +00:00
|
|
|
print(message_url)
|
2019-11-17 14:01:49 +00:00
|
|
|
return True
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
update_bookmarks_collection(recent_posts_cache, base_dir, post_filename,
|
|
|
|
message_json['object']['url'],
|
|
|
|
message_json['actor'], domain, debug)
|
2021-09-03 22:28:50 +00:00
|
|
|
# regenerate the html
|
2022-01-02 19:57:50 +00:00
|
|
|
bookmarked_post_json = load_json(post_filename, 0, 1)
|
|
|
|
if bookmarked_post_json:
|
2021-09-03 22:28:50 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
2021-12-26 23:41:34 +00:00
|
|
|
get_cached_post_filename(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
bookmarked_post_json)
|
|
|
|
print('Bookmarked post json: ' + str(bookmarked_post_json))
|
2021-09-03 22:28:50 +00:00
|
|
|
print('Bookmarked post nickname: ' + nickname + ' ' + domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Bookmarked post cache: ' + str(cached_post_filename))
|
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
2021-12-28 20:32:11 +00:00
|
|
|
follower_approval_active(base_dir, nickname, domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
not_dm = not is_dm(bookmarked_post_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, nickname, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname, domain, port, bookmarked_post_json,
|
2021-12-29 21:55:09 +00:00
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2019-11-17 14:01:49 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_undo_bookmark(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {},
|
|
|
|
person_cache: {}, message_json: {},
|
|
|
|
federation_list: [],
|
|
|
|
debug: bool, signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: str) -> bool:
|
2019-11-17 14:01:49 +00:00
|
|
|
"""Receives an undo bookmark activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('type'):
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Remove':
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('target'):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: no target in inbox undo bookmark Remove')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(message_json, debug):
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not isinstance(message_json['target'], str):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 14:25:24 +00:00
|
|
|
print('DEBUG: inbox Remove bookmark target is not string')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2021-12-26 10:00:46 +00:00
|
|
|
if not message_json['actor'].endswith(domain_full + '/users/' + nickname):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: inbox undo bookmark Remove unexpected actor')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['target'].endswith(message_json['actor'] +
|
|
|
|
'/tlbookmarks'):
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox undo bookmark Remove target invalid ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['target'])
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] != 'Document':
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox undo bookmark Remove type is not Document')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('url'):
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox undo bookmark Remove missing url')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']['url']:
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox undo bookmark Remove missing statuses un url')
|
2021-03-20 14:09:07 +00:00
|
|
|
return False
|
2021-03-20 09:49:43 +00:00
|
|
|
if debug:
|
2021-03-20 14:09:07 +00:00
|
|
|
print('DEBUG: c2s inbox Remove bookmark ' +
|
2021-03-20 09:49:43 +00:00
|
|
|
'request arrived in outbox')
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
message_url = remove_id_ending(message_json['object']['url'])
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
post_filename = locate_post(base_dir, nickname, domain, message_url)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2021-03-20 09:49:43 +00:00
|
|
|
print('DEBUG: c2s inbox like post not found in inbox or outbox')
|
2022-01-02 19:57:50 +00:00
|
|
|
print(message_url)
|
2019-11-17 14:01:49 +00:00
|
|
|
return True
|
2021-03-20 09:49:43 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
undo_bookmarks_collection_entry(recent_posts_cache, base_dir,
|
|
|
|
post_filename,
|
|
|
|
message_json['object']['url'],
|
|
|
|
message_json['actor'], domain, debug)
|
2021-09-03 22:28:50 +00:00
|
|
|
# regenerate the html
|
2022-01-02 19:57:50 +00:00
|
|
|
bookmarked_post_json = load_json(post_filename, 0, 1)
|
|
|
|
if bookmarked_post_json:
|
2021-09-03 22:28:50 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
2021-12-26 23:41:34 +00:00
|
|
|
get_cached_post_filename(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
bookmarked_post_json)
|
|
|
|
print('Unbookmarked post json: ' + str(bookmarked_post_json))
|
2021-09-03 22:28:50 +00:00
|
|
|
print('Unbookmarked post nickname: ' + nickname + ' ' + domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Unbookmarked post cache: ' + str(cached_post_filename))
|
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
2021-12-28 20:32:11 +00:00
|
|
|
follower_approval_active(base_dir, nickname, domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
not_dm = not is_dm(bookmarked_post_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, nickname, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname, domain, port, bookmarked_post_json,
|
2021-12-29 21:55:09 +00:00
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2022-02-25 19:12:40 +00:00
|
|
|
False, True, False, cw_lists, lists_enabled,
|
|
|
|
timezone)
|
2019-11-17 14:01:49 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
def _receive_delete(session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [], cached_webfingers: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {}, message_json: {}, federation_list: [],
|
|
|
|
debug: bool, allow_deletion: bool,
|
|
|
|
recent_posts_cache: {}) -> bool:
|
2019-07-11 21:38:28 +00:00
|
|
|
"""Receives a Delete activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Delete':
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2019-07-17 17:16:48 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Delete activity arrived')
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_string(message_json, debug):
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2022-01-02 19:57:50 +00:00
|
|
|
delete_prefix = http_prefix + '://' + domain_full + '/'
|
2021-12-25 21:29:53 +00:00
|
|
|
if (not allow_deletion and
|
2022-01-02 19:57:50 +00:00
|
|
|
(not message_json['object'].startswith(delete_prefix) or
|
|
|
|
not message_json['actor'].startswith(delete_prefix))):
|
2019-08-12 18:02:29 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: delete not permitted from other instances')
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('to'):
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'"users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/statuses/' not in message_json['object']:
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from object in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['actor'] not in message_json['object']:
|
2019-07-11 21:42:15 +00:00
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: actor is not the owner of the post to be deleted')
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unknown recipient of like - ' + handle)
|
2019-07-11 21:38:28 +00:00
|
|
|
# if this post in the outbox of the person?
|
2022-01-02 19:57:50 +00:00
|
|
|
message_id = remove_id_ending(message_json['object'])
|
|
|
|
remove_moderation_post_from_index(base_dir, message_id, debug)
|
|
|
|
handle_nickname = handle.split('@')[0]
|
|
|
|
handle_domain = handle.split('@')[1]
|
|
|
|
post_filename = locate_post(base_dir, handle_nickname,
|
|
|
|
handle_domain, message_id)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: delete post not found in inbox or outbox')
|
2022-01-02 19:57:50 +00:00
|
|
|
print(message_id)
|
2019-07-14 14:42:00 +00:00
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
delete_post(base_dir, http_prefix, handle_nickname,
|
|
|
|
handle_domain, post_filename, debug,
|
2021-12-28 14:55:45 +00:00
|
|
|
recent_posts_cache)
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
2021-12-26 23:41:34 +00:00
|
|
|
print('DEBUG: post deleted - ' + post_filename)
|
2020-11-27 10:38:51 +00:00
|
|
|
|
|
|
|
# also delete any local blogs saved to the news actor
|
2022-01-02 19:57:50 +00:00
|
|
|
if handle_nickname != 'news' and handle_domain == domain_full:
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = locate_post(base_dir, 'news',
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_domain, message_id)
|
2021-12-26 23:41:34 +00:00
|
|
|
if post_filename:
|
2021-12-28 14:55:45 +00:00
|
|
|
delete_post(base_dir, http_prefix, 'news',
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_domain, post_filename, debug,
|
2021-12-28 14:55:45 +00:00
|
|
|
recent_posts_cache)
|
2020-11-27 10:38:51 +00:00
|
|
|
if debug:
|
2021-12-26 23:41:34 +00:00
|
|
|
print('DEBUG: blog post deleted - ' + post_filename)
|
2019-07-11 21:38:28 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_announce(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str,
|
2022-03-11 13:27:54 +00:00
|
|
|
domain: str,
|
|
|
|
onion_domain: str, i2p_domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [], cached_webfingers: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {}, message_json: {}, federation_list: [],
|
|
|
|
debug: bool, translate: {},
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int,
|
|
|
|
allow_deletion: bool,
|
|
|
|
peertube_instances: [],
|
|
|
|
max_like_count: int, cw_lists: {},
|
|
|
|
lists_enabled: str) -> bool:
|
2019-07-12 09:41:57 +00:00
|
|
|
"""Receives an announce activity within the POST section of HTTPServer
|
2019-07-11 19:31:02 +00:00
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Announce':
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2019-09-29 09:15:10 +00:00
|
|
|
if '@' not in handle:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: bad handle ' + handle)
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: receiving announce on ' + handle)
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_string(message_json, debug):
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('to'):
|
2019-07-11 19:31:02 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-11 19:31:02 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'"users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-09-09 09:41:31 +00:00
|
|
|
return False
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_self_announce(message_json):
|
2021-06-03 08:33:50 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: self-boost rejected')
|
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['object']):
|
2019-09-09 09:41:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'"users", "channel" or "profile" missing in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'])
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2020-05-25 09:21:34 +00:00
|
|
|
|
2021-12-31 23:07:23 +00:00
|
|
|
blocked_cache = {}
|
2021-12-27 17:20:01 +00:00
|
|
|
prefixes = get_protocol_prefixes()
|
2020-05-25 09:21:34 +00:00
|
|
|
# is the domain of the announce actor blocked?
|
2022-01-02 19:57:50 +00:00
|
|
|
object_domain = message_json['object']
|
2020-06-11 12:04:42 +00:00
|
|
|
for prefix in prefixes:
|
2022-01-02 19:57:50 +00:00
|
|
|
object_domain = object_domain.replace(prefix, '')
|
|
|
|
if '/' in object_domain:
|
|
|
|
object_domain = object_domain.split('/')[0]
|
|
|
|
if is_blocked_domain(base_dir, object_domain):
|
2019-09-09 16:02:14 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: announced domain is blocked')
|
|
|
|
return False
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unknown recipient of announce - ' + handle)
|
2020-05-25 09:21:34 +00:00
|
|
|
|
|
|
|
# is the announce actor blocked?
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_nickname = get_nickname_from_actor(message_json['actor'])
|
|
|
|
actor_domain, _ = get_domain_from_actor(message_json['actor'])
|
|
|
|
if is_blocked(base_dir, nickname, domain, actor_nickname, actor_domain):
|
2020-05-25 09:21:34 +00:00
|
|
|
print('Receive announce blocked for actor: ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_nickname + '@' + actor_domain)
|
2020-05-25 09:21:34 +00:00
|
|
|
return False
|
|
|
|
|
2021-08-23 12:30:16 +00:00
|
|
|
# also check the actor for the url being announced
|
2021-12-27 22:19:18 +00:00
|
|
|
announcedActorNickname = get_nickname_from_actor(message_json['object'])
|
2021-08-23 12:30:16 +00:00
|
|
|
announcedActorDomain, announcedActorPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(message_json['object'])
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_blocked(base_dir, nickname, domain,
|
|
|
|
announcedActorNickname, announcedActorDomain):
|
2021-08-23 12:31:37 +00:00
|
|
|
print('Receive announce object blocked for actor: ' +
|
2021-08-23 12:30:16 +00:00
|
|
|
announcedActorNickname + '@' + announcedActorDomain)
|
|
|
|
return False
|
|
|
|
|
2020-05-25 09:21:34 +00:00
|
|
|
# is this post in the outbox of the person?
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = locate_post(base_dir, nickname, domain,
|
|
|
|
message_json['object'])
|
|
|
|
if not post_filename:
|
2019-07-11 19:31:02 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: announce post not found in inbox or outbox')
|
2021-12-25 23:51:19 +00:00
|
|
|
print(message_json['object'])
|
2019-07-11 19:31:02 +00:00
|
|
|
return True
|
2021-12-26 23:41:34 +00:00
|
|
|
update_announce_collection(recent_posts_cache, base_dir, post_filename,
|
|
|
|
message_json['actor'], nickname, domain, debug)
|
2019-09-29 10:13:00 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('DEBUG: Downloading announce post ' + message_json['actor'] +
|
|
|
|
' -> ' + message_json['object'])
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-09-04 16:11:48 +00:00
|
|
|
|
2021-09-04 17:10:29 +00:00
|
|
|
# Generate html. This also downloads the announced post.
|
2022-01-02 19:57:50 +00:00
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
2021-12-28 20:32:11 +00:00
|
|
|
follower_approval_active(base_dir, nickname, domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
not_dm = True
|
2021-09-04 18:05:52 +00:00
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('Generating html for announce ' + message_json['id'])
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, nickname, domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
announce_html = \
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, True,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
|
|
|
nickname, domain, port, message_json,
|
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2022-01-02 19:57:50 +00:00
|
|
|
if not announce_html:
|
2021-09-04 16:11:48 +00:00
|
|
|
print('WARN: Unable to generate html for announce ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
str(message_json))
|
2021-09-04 16:11:48 +00:00
|
|
|
else:
|
2021-09-04 18:05:52 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Generated announce html ' + announce_html.replace('\n', ''))
|
2021-09-04 16:11:48 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_object = download_announce(session, base_dir,
|
|
|
|
http_prefix,
|
|
|
|
nickname, domain,
|
|
|
|
message_json,
|
|
|
|
__version__, translate,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
allow_local_network_access,
|
|
|
|
recent_posts_cache, debug,
|
|
|
|
system_language,
|
|
|
|
domain_full, person_cache,
|
|
|
|
signing_priv_key_pem,
|
2021-12-31 23:07:23 +00:00
|
|
|
blocked_cache)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('WARN: unable to download announce: ' + str(message_json))
|
2022-01-02 19:57:50 +00:00
|
|
|
not_in_onion = True
|
2021-12-25 20:43:43 +00:00
|
|
|
if onion_domain:
|
2021-12-25 23:51:19 +00:00
|
|
|
if onion_domain in message_json['object']:
|
2022-01-02 19:57:50 +00:00
|
|
|
not_in_onion = False
|
|
|
|
if domain not in message_json['object'] and not_in_onion:
|
2021-12-26 23:41:34 +00:00
|
|
|
if os.path.isfile(post_filename):
|
2020-12-21 21:40:29 +00:00
|
|
|
# if the announce can't be downloaded then remove it
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2021-12-26 23:41:34 +00:00
|
|
|
os.remove(post_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _receive_announce unable to delete ' +
|
2021-12-26 23:41:34 +00:00
|
|
|
str(post_filename))
|
2020-12-21 21:40:29 +00:00
|
|
|
else:
|
2019-10-01 13:23:22 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Announce post downloaded for ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['actor'] + ' -> ' + message_json['object'])
|
2021-12-29 21:55:09 +00:00
|
|
|
store_hash_tags(base_dir, nickname, domain,
|
|
|
|
http_prefix, domain_full,
|
|
|
|
post_json_object, translate)
|
2019-09-30 19:13:14 +00:00
|
|
|
# Try to obtain the actor for this person
|
|
|
|
# so that their avatar can be shown
|
2022-01-02 19:57:50 +00:00
|
|
|
lookup_actor = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('attributedTo'):
|
|
|
|
if isinstance(post_json_object['attributedTo'], str):
|
2022-01-02 19:57:50 +00:00
|
|
|
lookup_actor = post_json_object['attributedTo']
|
2019-10-01 14:11:15 +00:00
|
|
|
else:
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('attributedTo'):
|
|
|
|
attrib = post_json_object['object']['attributedTo']
|
2021-06-22 15:45:59 +00:00
|
|
|
if isinstance(attrib, str):
|
2022-01-02 19:57:50 +00:00
|
|
|
lookup_actor = attrib
|
|
|
|
if lookup_actor:
|
|
|
|
if has_users_path(lookup_actor):
|
|
|
|
if '/statuses/' in lookup_actor:
|
|
|
|
lookup_actor = lookup_actor.split('/statuses/')[0]
|
2019-10-01 12:35:39 +00:00
|
|
|
|
2021-12-26 20:43:03 +00:00
|
|
|
if is_recent_post(post_json_object, 3):
|
2021-12-26 23:41:34 +00:00
|
|
|
if not os.path.isfile(post_filename + '.tts'):
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-29 21:55:09 +00:00
|
|
|
update_speaker(base_dir, http_prefix,
|
|
|
|
nickname, domain, domain_full,
|
|
|
|
post_json_object, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, lookup_actor,
|
2021-12-29 21:55:09 +00:00
|
|
|
theme_name)
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(post_filename + '.tts', 'w+') as ttsfile:
|
|
|
|
ttsfile.write('\n')
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2021-11-25 22:22:54 +00:00
|
|
|
print('EX: unable to write recent post ' +
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename)
|
2021-03-01 22:27:36 +00:00
|
|
|
|
2019-10-01 12:50:06 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Obtaining actor for announce post ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
lookup_actor)
|
2019-10-01 13:23:22 +00:00
|
|
|
for tries in range(6):
|
2022-01-02 19:57:50 +00:00
|
|
|
pub_key = \
|
|
|
|
get_person_pub_key(base_dir, session, lookup_actor,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, debug,
|
|
|
|
__version__, http_prefix,
|
|
|
|
domain, onion_domain,
|
2022-03-11 13:27:54 +00:00
|
|
|
i2p_domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem)
|
2022-01-02 19:57:50 +00:00
|
|
|
if pub_key:
|
2021-03-14 20:15:44 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: public key obtained for announce: ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
lookup_actor)
|
2019-10-01 13:23:22 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Retry ' + str(tries + 1) +
|
2022-01-02 19:57:50 +00:00
|
|
|
' obtaining actor for ' + lookup_actor)
|
2020-03-22 21:16:02 +00:00
|
|
|
time.sleep(5)
|
2020-12-21 21:40:29 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: announced/repeated post arrived in inbox')
|
2019-07-11 19:31:02 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_undo_announce(recent_posts_cache: {},
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle: str, is_group: bool, base_dir: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {},
|
|
|
|
person_cache: {}, message_json: {},
|
|
|
|
federation_list: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-12 09:41:57 +00:00
|
|
|
"""Receives an undo announce activity within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['type'] != 'Undo':
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(message_json):
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
2021-12-26 15:54:46 +00:00
|
|
|
if not has_object_string_object(message_json, debug):
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object']['type'] != 'Announce':
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2019-07-12 09:41:57 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['type'] + ' announce')
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unknown recipient of undo announce - ' + handle)
|
2019-07-12 09:41:57 +00:00
|
|
|
# if this post in the outbox of the person?
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name = handle.split('@')[0]
|
|
|
|
handle_dom = handle.split('@')[1]
|
|
|
|
post_filename = locate_post(base_dir, handle_name, handle_dom,
|
2021-12-26 23:41:34 +00:00
|
|
|
message_json['object']['object'])
|
|
|
|
if not post_filename:
|
2019-07-12 09:41:57 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo announce post not found in inbox or outbox')
|
2021-12-25 23:51:19 +00:00
|
|
|
print(message_json['object']['object'])
|
2019-07-12 09:41:57 +00:00
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: announced/repeated post to be undone found in inbox')
|
2019-09-17 12:14:36 +00:00
|
|
|
|
2021-12-26 23:41:34 +00:00
|
|
|
post_json_object = load_json(post_filename)
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object:
|
|
|
|
if not post_json_object.get('type'):
|
|
|
|
if post_json_object['type'] != 'Announce':
|
2019-07-12 10:09:15 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print("DEBUG: Attempt to undo something " +
|
|
|
|
"which isn't an announcement")
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2021-12-27 10:55:48 +00:00
|
|
|
undo_announce_collection_entry(recent_posts_cache, base_dir, post_filename,
|
|
|
|
message_json['actor'], domain, debug)
|
2021-12-26 23:41:34 +00:00
|
|
|
if os.path.isfile(post_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2021-12-26 23:41:34 +00:00
|
|
|
os.remove(post_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _receive_undo_announce unable to delete ' +
|
2021-12-26 23:41:34 +00:00
|
|
|
str(post_filename))
|
2019-07-12 09:41:57 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def json_post_allows_comments(post_json_object: {}) -> bool:
|
2020-08-21 18:32:16 +00:00
|
|
|
"""Returns true if the given post allows comments/replies
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if 'commentsEnabled' in post_json_object:
|
|
|
|
return post_json_object['commentsEnabled']
|
|
|
|
if 'rejectReplies' in post_json_object:
|
|
|
|
return not post_json_object['rejectReplies']
|
|
|
|
if post_json_object.get('object'):
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2020-08-21 18:32:16 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if 'commentsEnabled' in post_json_object['object']:
|
2021-12-25 22:09:19 +00:00
|
|
|
return post_json_object['object']['commentsEnabled']
|
2022-01-02 19:57:50 +00:00
|
|
|
if 'rejectReplies' in post_json_object['object']:
|
2021-12-25 22:09:19 +00:00
|
|
|
return not post_json_object['object']['rejectReplies']
|
2020-08-21 18:32:16 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _post_allow_comments(post_filename: str) -> bool:
|
2020-08-21 18:32:16 +00:00
|
|
|
"""Returns true if the given post allows comments/replies
|
|
|
|
"""
|
2021-12-26 23:41:34 +00:00
|
|
|
post_json_object = load_json(post_filename)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2020-08-21 18:32:16 +00:00
|
|
|
return False
|
2021-12-29 21:55:09 +00:00
|
|
|
return json_post_allows_comments(post_json_object)
|
2020-08-21 18:32:16 +00:00
|
|
|
|
|
|
|
|
2021-12-28 20:32:11 +00:00
|
|
|
def populate_replies(base_dir: str, http_prefix: str, domain: str,
|
|
|
|
message_json: {}, max_replies: int, debug: bool) -> bool:
|
2020-03-22 21:16:02 +00:00
|
|
|
"""Updates the list of replies for a post on this domain if
|
2019-07-12 12:35:38 +00:00
|
|
|
a reply to it arrives
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('id'):
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(message_json):
|
2019-07-13 20:16:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('inReplyTo'):
|
2019-07-13 20:16:07 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('to'):
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
reply_to = message_json['object']['inReplyTo']
|
|
|
|
if not isinstance(reply_to, str):
|
2020-08-28 14:45:07 +00:00
|
|
|
return False
|
2019-07-12 12:35:38 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: post contains a reply')
|
|
|
|
# is this a reply to a post on this domain?
|
2022-01-02 19:57:50 +00:00
|
|
|
if not reply_to.startswith(http_prefix + '://' + domain + '/'):
|
2019-07-12 12:35:38 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: post is a reply to another not on this domain')
|
2022-01-02 19:57:50 +00:00
|
|
|
print(reply_to)
|
2021-12-25 17:09:22 +00:00
|
|
|
print('Expected: ' + http_prefix + '://' + domain + '/')
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
reply_to_nickname = get_nickname_from_actor(reply_to)
|
|
|
|
if not reply_to_nickname:
|
|
|
|
print('DEBUG: no nickname found for ' + reply_to)
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
reply_to_domain, _ = get_domain_from_actor(reply_to)
|
|
|
|
if not reply_to_domain:
|
2019-07-12 12:35:38 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: no domain found for ' + reply_to)
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2021-03-23 23:33:33 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
post_filename = locate_post(base_dir, reply_to_nickname,
|
|
|
|
reply_to_domain, reply_to)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2019-07-12 12:35:38 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: post may have expired - ' + reply_to)
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2021-03-23 22:52:00 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _post_allow_comments(post_filename):
|
2020-08-21 18:32:16 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: post does not allow comments - ' + reply_to)
|
2020-08-21 18:32:16 +00:00
|
|
|
return False
|
2019-07-13 19:28:14 +00:00
|
|
|
# populate a text file containing the ids of replies
|
2022-01-02 19:57:50 +00:00
|
|
|
post_replies_filename = post_filename.replace('.json', '.replies')
|
|
|
|
message_id = remove_id_ending(message_json['id'])
|
|
|
|
if os.path.isfile(post_replies_filename):
|
|
|
|
num_lines = sum(1 for line in open(post_replies_filename))
|
|
|
|
if num_lines > max_replies:
|
2019-07-13 21:00:12 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if message_id not in open(post_replies_filename).read():
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(post_replies_filename, 'a+') as replies_file:
|
|
|
|
replies_file.write(message_id + '\n')
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to append ' + post_replies_filename)
|
2019-07-13 19:28:14 +00:00
|
|
|
else:
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(post_replies_filename, 'w+') as replies_file:
|
|
|
|
replies_file.write(message_id + '\n')
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + post_replies_filename)
|
2019-07-13 19:28:14 +00:00
|
|
|
return True
|
2019-09-30 09:43:46 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _estimate_number_of_mentions(content: str) -> int:
|
2019-09-30 10:15:20 +00:00
|
|
|
"""Returns a rough estimate of the number of mentions
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
return int(content.count('@') / 2)
|
|
|
|
|
2019-11-16 14:49:21 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _estimate_number_of_emoji(content: str) -> int:
|
2019-11-16 14:49:21 +00:00
|
|
|
"""Returns a rough estimate of the number of emoji
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
return int(content.count(':') / 2)
|
2019-11-16 14:49:21 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _valid_post_content(base_dir: str, nickname: str, domain: str,
|
|
|
|
message_json: {}, max_mentions: int, max_emoji: int,
|
|
|
|
allow_local_network_access: bool, debug: bool,
|
|
|
|
system_language: str,
|
|
|
|
http_prefix: str, domain_full: str,
|
|
|
|
person_cache: {}) -> bool:
|
2019-09-30 09:43:46 +00:00
|
|
|
"""Is the content of a received post valid?
|
2019-09-30 10:15:20 +00:00
|
|
|
Check for bad html
|
|
|
|
Check for hellthreads
|
2021-12-14 10:11:10 +00:00
|
|
|
Check that the language is understood
|
|
|
|
Check if it's a git patch
|
|
|
|
Check number of tags and mentions is reasonable
|
2019-09-30 09:43:46 +00:00
|
|
|
"""
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(message_json):
|
2019-09-30 09:43:46 +00:00
|
|
|
return True
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('content'):
|
2019-09-30 09:43:46 +00:00
|
|
|
return True
|
2019-11-29 22:45:56 +00:00
|
|
|
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['object'].get('published'):
|
2019-11-29 22:45:56 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if 'T' not in message_json['object']['published']:
|
2019-11-29 22:45:56 +00:00
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if 'Z' not in message_json['object']['published']:
|
2019-11-29 22:45:56 +00:00
|
|
|
return False
|
2021-12-26 12:31:47 +00:00
|
|
|
if not valid_post_date(message_json['object']['published'], 90, debug):
|
2020-12-21 10:45:31 +00:00
|
|
|
return False
|
2020-07-10 14:15:01 +00:00
|
|
|
|
2021-08-19 22:16:32 +00:00
|
|
|
summary = None
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('summary'):
|
|
|
|
summary = message_json['object']['summary']
|
2020-08-25 19:45:15 +00:00
|
|
|
if not isinstance(summary, str):
|
|
|
|
print('WARN: content warning is not a string')
|
|
|
|
return False
|
2021-12-29 21:55:09 +00:00
|
|
|
if summary != valid_content_warning(summary):
|
2020-08-25 19:45:15 +00:00
|
|
|
print('WARN: invalid content warning ' + summary)
|
|
|
|
return False
|
|
|
|
|
2021-09-19 09:03:19 +00:00
|
|
|
# check for patches before dangeousMarkup, which excludes code
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_git_patch(base_dir, nickname, domain,
|
|
|
|
message_json['object']['type'],
|
|
|
|
summary,
|
|
|
|
message_json['object']['content']):
|
2020-05-02 11:08:38 +00:00
|
|
|
return True
|
2020-07-10 14:15:01 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
content_str = get_base_content_from_post(message_json, system_language)
|
|
|
|
if dangerous_markup(content_str, allow_local_network_access):
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('id'):
|
|
|
|
print('REJECT ARBITRARY HTML: ' + message_json['object']['id'])
|
2020-07-10 14:15:01 +00:00
|
|
|
print('REJECT ARBITRARY HTML: bad string in post - ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
content_str)
|
2020-07-10 14:15:01 +00:00
|
|
|
return False
|
|
|
|
|
2019-09-30 10:35:49 +00:00
|
|
|
# check (rough) number of mentions
|
2022-01-02 19:57:50 +00:00
|
|
|
mentions_est = _estimate_number_of_mentions(content_str)
|
|
|
|
if mentions_est > max_mentions:
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('id'):
|
|
|
|
print('REJECT HELLTHREAD: ' + message_json['object']['id'])
|
2020-04-03 16:27:34 +00:00
|
|
|
print('REJECT HELLTHREAD: Too many mentions in post - ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
content_str)
|
2019-11-16 14:49:21 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
if _estimate_number_of_emoji(content_str) > max_emoji:
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('id'):
|
|
|
|
print('REJECT EMOJI OVERLOAD: ' + message_json['object']['id'])
|
2020-04-03 16:27:34 +00:00
|
|
|
print('REJECT EMOJI OVERLOAD: Too many emoji in post - ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
content_str)
|
2019-09-30 10:15:20 +00:00
|
|
|
return False
|
2019-09-30 10:35:49 +00:00
|
|
|
# check number of tags
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('tag'):
|
|
|
|
if not isinstance(message_json['object']['tag'], list):
|
|
|
|
message_json['object']['tag'] = []
|
2019-09-30 11:05:35 +00:00
|
|
|
else:
|
2021-12-25 23:51:19 +00:00
|
|
|
if len(message_json['object']['tag']) > int(max_mentions * 2):
|
|
|
|
if message_json['object'].get('id'):
|
|
|
|
print('REJECT: ' + message_json['object']['id'])
|
2020-04-03 16:27:34 +00:00
|
|
|
print('REJECT: Too many tags in post - ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['tag'])
|
2019-09-30 10:35:49 +00:00
|
|
|
return False
|
2021-07-18 18:33:53 +00:00
|
|
|
# check that the post is in a language suitable for this account
|
2021-12-29 21:55:09 +00:00
|
|
|
if not understood_post_language(base_dir, nickname, domain,
|
|
|
|
message_json, system_language,
|
|
|
|
http_prefix, domain_full,
|
|
|
|
person_cache):
|
2021-07-18 18:33:53 +00:00
|
|
|
return False
|
2020-02-05 17:29:38 +00:00
|
|
|
# check for filtered content
|
2022-01-02 19:57:50 +00:00
|
|
|
if is_filtered(base_dir, nickname, domain, content_str):
|
2020-02-05 17:29:38 +00:00
|
|
|
print('REJECT: content filtered')
|
|
|
|
return False
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json['object'].get('inReplyTo'):
|
|
|
|
if isinstance(message_json['object']['inReplyTo'], str):
|
2022-01-02 19:57:50 +00:00
|
|
|
original_post_id = message_json['object']['inReplyTo']
|
|
|
|
post_post_filename = locate_post(base_dir, nickname, domain,
|
|
|
|
original_post_id)
|
|
|
|
if post_post_filename:
|
|
|
|
if not _post_allow_comments(post_post_filename):
|
2020-08-21 18:32:16 +00:00
|
|
|
print('REJECT: reply to post which does not ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
'allow comments: ' + original_post_id)
|
2020-08-21 18:32:16 +00:00
|
|
|
return False
|
2021-12-26 19:15:36 +00:00
|
|
|
if invalid_ciphertext(message_json['object']['content']):
|
2021-11-22 11:59:41 +00:00
|
|
|
print('REJECT: malformed ciphertext in content')
|
|
|
|
return False
|
2021-03-14 20:15:44 +00:00
|
|
|
if debug:
|
|
|
|
print('ACCEPT: post content is valid')
|
2019-09-30 09:43:46 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str,
|
|
|
|
domain: str, onion_domain: str,
|
2022-03-11 13:27:54 +00:00
|
|
|
i2p_domain: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache: {},
|
|
|
|
post_json_object: {}, debug: bool,
|
|
|
|
signing_priv_key_pem: str) -> None:
|
2019-09-30 19:23:53 +00:00
|
|
|
"""Tries to obtain the actor for the person being replied to
|
|
|
|
so that their avatar can later be shown
|
|
|
|
"""
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2019-09-30 19:39:48 +00:00
|
|
|
return
|
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('inReplyTo'):
|
2019-09-30 19:39:48 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
lookup_actor = post_json_object['object']['inReplyTo']
|
|
|
|
if not lookup_actor:
|
2019-10-21 12:49:16 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not isinstance(lookup_actor, str):
|
2020-08-28 14:45:07 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not has_users_path(lookup_actor):
|
2019-10-21 12:49:16 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if '/statuses/' in lookup_actor:
|
|
|
|
lookup_actor = lookup_actor.split('/statuses/')[0]
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-10-21 12:49:16 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: Obtaining actor for reply post ' + lookup_actor)
|
2019-10-01 13:23:22 +00:00
|
|
|
|
2019-10-21 12:49:16 +00:00
|
|
|
for tries in range(6):
|
2022-01-02 19:57:50 +00:00
|
|
|
pub_key = \
|
|
|
|
get_person_pub_key(base_dir, session, lookup_actor,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, debug,
|
|
|
|
__version__, http_prefix,
|
2022-03-11 13:27:54 +00:00
|
|
|
domain, onion_domain, i2p_domain,
|
|
|
|
signing_priv_key_pem)
|
2022-01-02 19:57:50 +00:00
|
|
|
if pub_key:
|
2021-03-14 20:15:44 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: public key obtained for reply: ' + lookup_actor)
|
2019-10-21 12:49:16 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Retry ' + str(tries + 1) +
|
2022-01-02 19:57:50 +00:00
|
|
|
' obtaining actor for ' + lookup_actor)
|
2020-03-22 21:16:02 +00:00
|
|
|
time.sleep(5)
|
2019-09-30 19:23:53 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _dm_notify(base_dir: str, handle: str, url: str) -> None:
|
2019-10-03 16:22:34 +00:00
|
|
|
"""Creates a notification that a new DM has arrived
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
account_dir = base_dir + '/accounts/' + handle
|
|
|
|
if not os.path.isdir(account_dir):
|
2019-10-03 16:22:34 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
dm_file = account_dir + '/.newDM'
|
|
|
|
if not os.path.isfile(dm_file):
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(dm_file, 'w+') as fp_dm:
|
|
|
|
fp_dm.write(url)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + dm_file)
|
2019-10-03 16:22:34 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _already_liked(base_dir: str, nickname: str, domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_url: str, likerActor: str) -> bool:
|
2020-07-13 13:36:45 +00:00
|
|
|
"""Is the given post already liked by the given handle?
|
|
|
|
"""
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = \
|
2022-01-02 19:57:50 +00:00
|
|
|
locate_post(base_dir, nickname, domain, post_url)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2020-07-13 13:36:45 +00:00
|
|
|
return False
|
2021-12-26 23:41:34 +00:00
|
|
|
post_json_object = load_json(post_filename, 1)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2020-07-13 13:36:45 +00:00
|
|
|
return False
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2020-07-13 13:36:45 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('likes'):
|
2020-07-13 13:36:45 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object']['likes'].get('items'):
|
2020-07-13 13:36:45 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
for like in post_json_object['object']['likes']['items']:
|
2020-07-13 13:36:45 +00:00
|
|
|
if not like.get('type'):
|
|
|
|
continue
|
|
|
|
if not like.get('actor'):
|
|
|
|
continue
|
|
|
|
if like['type'] != 'Like':
|
|
|
|
continue
|
|
|
|
if like['actor'] == likerActor:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _already_reacted(base_dir: str, nickname: str, domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_url: str, reaction_actor: str,
|
|
|
|
emoji_content: str) -> bool:
|
2021-11-10 12:16:03 +00:00
|
|
|
"""Is the given post already emoji reacted by the given handle?
|
|
|
|
"""
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = \
|
2022-01-02 19:57:50 +00:00
|
|
|
locate_post(base_dir, nickname, domain, post_url)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 23:41:34 +00:00
|
|
|
post_json_object = load_json(post_filename, 1)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('reactions'):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object']['reactions'].get('items'):
|
2021-11-10 12:16:03 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
for react in post_json_object['object']['reactions']['items']:
|
2021-11-10 12:16:03 +00:00
|
|
|
if not react.get('type'):
|
|
|
|
continue
|
|
|
|
if not react.get('content'):
|
|
|
|
continue
|
|
|
|
if not react.get('actor'):
|
|
|
|
continue
|
|
|
|
if react['type'] != 'EmojiReact':
|
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
if react['content'] != emoji_content:
|
2021-11-10 12:16:03 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
if react['actor'] == reaction_actor:
|
2021-11-10 12:16:03 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _like_notify(base_dir: str, domain: str, onion_domain: str,
|
|
|
|
handle: str, actor: str, url: str) -> None:
|
2020-07-08 19:49:15 +00:00
|
|
|
"""Creates a notification that a like has arrived
|
|
|
|
"""
|
2020-07-08 22:04:17 +00:00
|
|
|
# This is not you liking your own post
|
|
|
|
if actor in url:
|
|
|
|
return
|
|
|
|
|
|
|
|
# check that the liked post was by this handle
|
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
if '/' + domain + '/users/' + nickname not in url:
|
2021-12-25 20:43:43 +00:00
|
|
|
if not onion_domain:
|
2020-07-08 22:04:17 +00:00
|
|
|
return
|
2021-12-25 20:43:43 +00:00
|
|
|
if '/' + onion_domain + '/users/' + nickname not in url:
|
2020-07-08 22:04:17 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
account_dir = base_dir + '/accounts/' + handle
|
2020-08-27 09:35:26 +00:00
|
|
|
|
|
|
|
# are like notifications enabled?
|
2022-01-02 19:57:50 +00:00
|
|
|
notify_likes_enabled_filename = account_dir + '/.notifyLikes'
|
|
|
|
if not os.path.isfile(notify_likes_enabled_filename):
|
2020-07-08 19:49:15 +00:00
|
|
|
return
|
2020-08-27 09:19:32 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
like_file = account_dir + '/.newLike'
|
|
|
|
if os.path.isfile(like_file):
|
|
|
|
if '##sent##' not in open(like_file).read():
|
2020-07-08 21:52:18 +00:00
|
|
|
return
|
2020-07-08 19:49:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
liker_nickname = get_nickname_from_actor(actor)
|
|
|
|
liker_domain, _ = get_domain_from_actor(actor)
|
|
|
|
if liker_nickname and liker_domain:
|
|
|
|
liker_handle = liker_nickname + '@' + liker_domain
|
2020-07-08 21:04:19 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('_like_notify liker_handle: ' +
|
|
|
|
str(liker_nickname) + '@' + str(liker_domain))
|
|
|
|
liker_handle = actor
|
|
|
|
if liker_handle != handle:
|
|
|
|
like_str = liker_handle + ' ' + url + '?likedBy=' + actor
|
|
|
|
prev_like_file = account_dir + '/.prevLike'
|
2020-07-08 22:17:21 +00:00
|
|
|
# was there a previous like notification?
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(prev_like_file):
|
2020-07-08 22:17:21 +00:00
|
|
|
# is it the same as the current notification ?
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(prev_like_file, 'r') as fp_like:
|
|
|
|
prev_like_str = fp_like.read()
|
|
|
|
if prev_like_str == like_str:
|
2021-06-21 22:52:04 +00:00
|
|
|
return
|
2021-06-21 22:53:04 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(prev_like_file, 'w+') as fp_like:
|
|
|
|
fp_like.write(like_str)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
print('EX: ERROR: unable to save previous like notification ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
prev_like_file)
|
2021-11-25 21:18:53 +00:00
|
|
|
|
2021-06-21 22:53:04 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(like_file, 'w+') as fp_like:
|
|
|
|
fp_like.write(like_str)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
print('EX: ERROR: unable to write like notification file ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
like_file)
|
2020-07-08 19:49:15 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _reaction_notify(base_dir: str, domain: str, onion_domain: str,
|
|
|
|
handle: str, actor: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
url: str, emoji_content: str) -> None:
|
2021-11-10 12:16:03 +00:00
|
|
|
"""Creates a notification that an emoji reaction has arrived
|
|
|
|
"""
|
|
|
|
# This is not you reacting to your own post
|
|
|
|
if actor in url:
|
|
|
|
return
|
|
|
|
|
|
|
|
# check that the reaction post was by this handle
|
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
if '/' + domain + '/users/' + nickname not in url:
|
2021-12-25 20:43:43 +00:00
|
|
|
if not onion_domain:
|
2021-11-10 12:16:03 +00:00
|
|
|
return
|
2021-12-25 20:43:43 +00:00
|
|
|
if '/' + onion_domain + '/users/' + nickname not in url:
|
2021-11-10 12:16:03 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
account_dir = base_dir + '/accounts/' + handle
|
2021-11-10 12:16:03 +00:00
|
|
|
|
|
|
|
# are reaction notifications enabled?
|
2022-01-02 19:57:50 +00:00
|
|
|
notify_reaction_enabled_filename = account_dir + '/.notifyReactions'
|
|
|
|
if not os.path.isfile(notify_reaction_enabled_filename):
|
2021-11-10 12:16:03 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
reaction_file = account_dir + '/.newReaction'
|
|
|
|
if os.path.isfile(reaction_file):
|
|
|
|
if '##sent##' not in open(reaction_file).read():
|
2021-11-10 12:16:03 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
reaction_nickname = get_nickname_from_actor(actor)
|
|
|
|
reaction_domain, _ = get_domain_from_actor(actor)
|
|
|
|
if reaction_nickname and reaction_domain:
|
|
|
|
reaction_handle = reaction_nickname + '@' + reaction_domain
|
2021-11-10 12:16:03 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('_reaction_notify reaction_handle: ' +
|
|
|
|
str(reaction_nickname) + '@' + str(reaction_domain))
|
|
|
|
reaction_handle = actor
|
|
|
|
if reaction_handle != handle:
|
|
|
|
reaction_str = \
|
|
|
|
reaction_handle + ' ' + url + '?reactBy=' + actor + \
|
|
|
|
';emoj=' + emoji_content
|
|
|
|
prev_reaction_file = account_dir + '/.prevReaction'
|
2021-11-10 12:16:03 +00:00
|
|
|
# was there a previous reaction notification?
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(prev_reaction_file):
|
2021-11-10 12:16:03 +00:00
|
|
|
# is it the same as the current notification ?
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(prev_reaction_file, 'r') as fp_react:
|
|
|
|
prev_reaction_str = fp_react.read()
|
|
|
|
if prev_reaction_str == reaction_str:
|
2021-11-10 12:16:03 +00:00
|
|
|
return
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(prev_reaction_file, 'w+') as fp_react:
|
|
|
|
fp_react.write(reaction_str)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2021-11-10 12:16:03 +00:00
|
|
|
print('EX: ERROR: unable to save previous reaction notification ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
prev_reaction_file)
|
2021-11-25 21:18:53 +00:00
|
|
|
|
2021-11-10 12:16:03 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(reaction_file, 'w+') as fp_react:
|
|
|
|
fp_react.write(reaction_str)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2021-11-10 12:16:03 +00:00
|
|
|
print('EX: ERROR: unable to write reaction notification file ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
reaction_file)
|
2021-11-10 12:16:03 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _notify_post_arrival(base_dir: str, handle: str, url: str) -> None:
|
2021-07-07 09:32:48 +00:00
|
|
|
"""Creates a notification that a new post has arrived.
|
|
|
|
This is for followed accounts with the notify checkbox enabled
|
|
|
|
on the person options screen
|
2021-07-06 20:38:08 +00:00
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
account_dir = base_dir + '/accounts/' + handle
|
|
|
|
if not os.path.isdir(account_dir):
|
2021-07-06 20:38:08 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
notify_file = account_dir + '/.newNotifiedPost'
|
|
|
|
if os.path.isfile(notify_file):
|
2021-07-07 09:30:15 +00:00
|
|
|
# check that the same notification is not repeatedly sent
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(notify_file, 'r') as fp_notify:
|
|
|
|
existing_notification_message = fp_notify.read()
|
|
|
|
if url in existing_notification_message:
|
2021-07-07 09:30:15 +00:00
|
|
|
return
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(notify_file, 'w+') as fp_notify:
|
|
|
|
fp_notify.write(url)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + notify_file)
|
2021-07-06 20:38:08 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _reply_notify(base_dir: str, handle: str, url: str) -> None:
|
2019-10-03 16:37:25 +00:00
|
|
|
"""Creates a notification that a new reply has arrived
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
account_dir = base_dir + '/accounts/' + handle
|
|
|
|
if not os.path.isdir(account_dir):
|
2019-10-03 16:37:25 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
reply_file = account_dir + '/.newReply'
|
|
|
|
if not os.path.isfile(reply_file):
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(reply_file, 'w+') as fp_reply:
|
|
|
|
fp_reply.write(url)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + reply_file)
|
2019-10-03 16:37:25 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _git_patch_notify(base_dir: str, handle: str,
|
|
|
|
subject: str, content: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
from_nickname: str, from_domain: str) -> None:
|
2020-05-02 11:20:57 +00:00
|
|
|
"""Creates a notification that a new git patch has arrived
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
account_dir = base_dir + '/accounts/' + handle
|
|
|
|
if not os.path.isdir(account_dir):
|
2020-05-02 11:20:57 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
patch_file = account_dir + '/.newPatch'
|
2020-05-03 09:58:51 +00:00
|
|
|
subject = subject.replace('[PATCH]', '').strip()
|
2022-01-02 19:57:50 +00:00
|
|
|
handle = '@' + from_nickname + '@' + from_domain
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(patch_file, 'w+') as fp_patch:
|
|
|
|
fp_patch.write('git ' + handle + ' ' + subject)
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + patch_file)
|
2020-05-02 17:16:24 +00:00
|
|
|
|
2020-05-02 11:20:57 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _group_handle(base_dir: str, handle: str) -> bool:
|
2019-10-04 12:22:56 +00:00
|
|
|
"""Is the given account handle a group?
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_file = base_dir + '/accounts/' + handle + '.json'
|
|
|
|
if not os.path.isfile(actor_file):
|
2019-10-04 12:22:56 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_json = load_json(actor_file)
|
2021-12-26 10:29:52 +00:00
|
|
|
if not actor_json:
|
2019-10-04 12:22:56 +00:00
|
|
|
return False
|
2021-12-26 10:29:52 +00:00
|
|
|
return actor_json['type'] == 'Group'
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2019-10-04 12:22:56 +00:00
|
|
|
|
2022-03-11 22:13:22 +00:00
|
|
|
def _send_to_group_members(session, session_onion, session_i2p,
|
|
|
|
base_dir: str, handle: str, port: int,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_object: {},
|
|
|
|
http_prefix: str, federation_list: [],
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {},
|
|
|
|
person_cache: {}, debug: bool,
|
|
|
|
system_language: str,
|
|
|
|
onion_domain: str, i2p_domain: str,
|
|
|
|
signing_priv_key_pem: str) -> None:
|
2019-10-04 12:22:56 +00:00
|
|
|
"""When a post arrives for a group send it out to the group members
|
|
|
|
"""
|
2021-08-01 13:25:11 +00:00
|
|
|
if debug:
|
|
|
|
print('\n\n=========================================================')
|
|
|
|
print(handle + ' sending to group members')
|
2021-08-02 19:01:31 +00:00
|
|
|
|
2022-01-01 15:11:42 +00:00
|
|
|
shared_item_federation_tokens = {}
|
2021-12-25 18:05:01 +00:00
|
|
|
shared_items_federated_domains = []
|
2022-01-02 19:57:50 +00:00
|
|
|
shared_items_federated_domains_str = \
|
2021-12-26 14:08:58 +00:00
|
|
|
get_config_param(base_dir, 'shared_items_federated_domains')
|
2022-01-02 19:57:50 +00:00
|
|
|
if shared_items_federated_domains_str:
|
|
|
|
si_federated_domains_list = \
|
|
|
|
shared_items_federated_domains_str.split(',')
|
|
|
|
for shared_federated_domain in si_federated_domains_list:
|
|
|
|
domain_str = shared_federated_domain.strip()
|
|
|
|
shared_items_federated_domains.append(domain_str)
|
|
|
|
|
|
|
|
followers_file = base_dir + '/accounts/' + handle + '/followers.txt'
|
|
|
|
if not os.path.isfile(followers_file):
|
2019-10-04 12:22:56 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('to'):
|
2021-08-02 14:19:09 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('object'):
|
2019-10-04 13:31:30 +00:00
|
|
|
return
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2021-07-30 19:20:49 +00:00
|
|
|
return
|
2021-08-02 14:19:09 +00:00
|
|
|
nickname = handle.split('@')[0].replace('!', '')
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = handle.split('@')[1]
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2022-01-02 19:57:50 +00:00
|
|
|
group_actor = local_actor_url(http_prefix, nickname, domain_full)
|
|
|
|
if group_actor not in post_json_object['to']:
|
2021-08-02 14:19:09 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
cc_str = ''
|
2021-08-02 14:19:09 +00:00
|
|
|
nickname = handle.split('@')[0].replace('!', '')
|
|
|
|
|
2021-09-18 09:57:03 +00:00
|
|
|
# save to the group outbox so that replies will be to the group
|
|
|
|
# rather than the original sender
|
2021-12-28 18:13:52 +00:00
|
|
|
save_post_to_box(base_dir, http_prefix, None,
|
|
|
|
nickname, domain, post_json_object, 'outbox')
|
2021-09-18 09:57:03 +00:00
|
|
|
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['object']['id'])
|
2021-08-02 14:19:09 +00:00
|
|
|
if debug:
|
2021-12-26 19:47:06 +00:00
|
|
|
print('Group announce: ' + post_id)
|
2022-01-02 19:57:50 +00:00
|
|
|
announce_json = \
|
2021-12-29 21:55:09 +00:00
|
|
|
create_announce(session, base_dir, federation_list,
|
|
|
|
nickname, domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
group_actor + '/followers', cc_str,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix, post_id, False, False,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, cached_webfingers,
|
|
|
|
debug, __version__, signing_priv_key_pem)
|
2021-08-02 14:19:09 +00:00
|
|
|
|
2022-03-11 22:13:22 +00:00
|
|
|
send_to_followers_thread(session, session_onion, session_i2p,
|
|
|
|
base_dir, nickname, domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
onion_domain, i2p_domain, port,
|
|
|
|
http_prefix, federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
announce_json, debug, __version__,
|
2021-12-29 21:55:09 +00:00
|
|
|
shared_items_federated_domains,
|
2022-01-01 15:11:42 +00:00
|
|
|
shared_item_federation_tokens,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem)
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _inbox_update_calendar(base_dir: str, handle: str,
|
|
|
|
post_json_object: {}) -> None:
|
2019-10-11 12:31:06 +00:00
|
|
|
"""Detects whether the tag list on a post contains calendar events
|
|
|
|
and if so saves the post id to a file in the calendar directory
|
|
|
|
for the account
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('actor'):
|
2020-07-03 18:49:00 +00:00
|
|
|
return
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2019-10-11 12:31:06 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('tag'):
|
2019-10-11 12:31:06 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not isinstance(post_json_object['object']['tag'], list):
|
2019-10-11 12:31:06 +00:00
|
|
|
return
|
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
actor = post_json_object['actor']
|
2022-01-02 19:57:50 +00:00
|
|
|
actor_nickname = get_nickname_from_actor(actor)
|
|
|
|
actor_domain, _ = get_domain_from_actor(actor)
|
|
|
|
handle_nickname = handle.split('@')[0]
|
|
|
|
handle_domain = handle.split('@')[1]
|
2021-12-29 21:55:09 +00:00
|
|
|
if not receiving_calendar_events(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_nickname, handle_domain,
|
|
|
|
actor_nickname, actor_domain):
|
2020-07-03 18:49:00 +00:00
|
|
|
return
|
2020-08-13 09:37:11 +00:00
|
|
|
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['id']).replace('/', '#')
|
2020-08-13 11:58:05 +00:00
|
|
|
|
2020-08-13 09:37:11 +00:00
|
|
|
# look for events within the tags list
|
2022-01-02 19:57:50 +00:00
|
|
|
for tag_dict in post_json_object['object']['tag']:
|
|
|
|
if not tag_dict.get('type'):
|
2020-07-11 22:36:52 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
if tag_dict['type'] != 'Event':
|
2019-10-11 12:31:06 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
if not tag_dict.get('startTime'):
|
2019-10-11 12:31:06 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
save_event_post(base_dir, handle, post_id, tag_dict)
|
2019-10-11 16:20:16 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def inbox_update_index(boxname: str, base_dir: str, handle: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
destination_filename: str, debug: bool) -> bool:
|
2019-10-20 10:25:38 +00:00
|
|
|
"""Updates the index of received posts
|
|
|
|
The new entry is added to the top of the file
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
index_filename = \
|
|
|
|
base_dir + '/accounts/' + handle + '/' + boxname + '.index'
|
2019-10-20 10:40:09 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: Updating index ' + index_filename)
|
2019-11-18 13:16:21 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if '/' + boxname + '/' in destination_filename:
|
|
|
|
destination_filename = \
|
|
|
|
destination_filename.split('/' + boxname + '/')[1]
|
2019-11-18 13:16:21 +00:00
|
|
|
|
|
|
|
# remove the path
|
2022-01-02 19:57:50 +00:00
|
|
|
if '/' in destination_filename:
|
|
|
|
destination_filename = destination_filename.split('/')[-1]
|
2019-11-18 13:16:21 +00:00
|
|
|
|
2021-09-03 11:30:23 +00:00
|
|
|
written = False
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(index_filename):
|
2019-10-20 12:43:59 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(index_filename, 'r+') as index_file:
|
|
|
|
content = index_file.read()
|
|
|
|
if destination_filename + '\n' not in content:
|
|
|
|
index_file.seek(0, 0)
|
|
|
|
index_file.write(destination_filename + '\n' + content)
|
2021-09-03 11:30:23 +00:00
|
|
|
written = True
|
2019-10-20 12:43:59 +00:00
|
|
|
return True
|
2021-12-25 15:28:52 +00:00
|
|
|
except OSError as ex:
|
|
|
|
print('EX: Failed to write entry to index ' + str(ex))
|
2019-10-20 10:45:12 +00:00
|
|
|
else:
|
2021-06-21 22:53:04 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(index_filename, 'w+') as index_file:
|
|
|
|
index_file.write(destination_filename + '\n')
|
2021-09-03 11:30:23 +00:00
|
|
|
written = True
|
2021-12-25 15:28:52 +00:00
|
|
|
except OSError as ex:
|
|
|
|
print('EX: Failed to write initial entry to index ' + str(ex))
|
2019-10-20 10:45:12 +00:00
|
|
|
|
2021-09-03 11:30:23 +00:00
|
|
|
return written
|
2019-10-20 10:25:38 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _update_last_seen(base_dir: str, handle: str, actor: str) -> None:
|
2020-12-13 11:27:12 +00:00
|
|
|
"""Updates the time when the given handle last saw the given actor
|
2020-12-13 11:28:23 +00:00
|
|
|
This can later be used to indicate if accounts are dormant/abandoned/moved
|
2020-12-13 11:27:12 +00:00
|
|
|
"""
|
|
|
|
if '@' not in handle:
|
|
|
|
return
|
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
domain = handle.split('@')[1]
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
account_path = acct_dir(base_dir, nickname, domain)
|
|
|
|
if not os.path.isdir(account_path):
|
2020-12-13 11:27:12 +00:00
|
|
|
return
|
2021-12-28 20:32:11 +00:00
|
|
|
if not is_following_actor(base_dir, nickname, domain, actor):
|
2020-12-13 11:27:12 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
last_seen_path = account_path + '/lastseen'
|
|
|
|
if not os.path.isdir(last_seen_path):
|
|
|
|
os.mkdir(last_seen_path)
|
|
|
|
last_seen_filename = \
|
|
|
|
last_seen_path + '/' + actor.replace('/', '#') + '.txt'
|
2021-12-26 13:17:46 +00:00
|
|
|
curr_time = datetime.datetime.utcnow()
|
2022-01-02 19:57:50 +00:00
|
|
|
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
2020-12-13 14:31:22 +00:00
|
|
|
# has the value changed?
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(last_seen_filename):
|
|
|
|
with open(last_seen_filename, 'r') as last_seen_file:
|
|
|
|
days_since_epoch_file = last_seen_file.read()
|
|
|
|
if int(days_since_epoch_file) == days_since_epoch:
|
2020-12-13 14:31:22 +00:00
|
|
|
# value hasn't changed, so we can save writing anything to file
|
|
|
|
return
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(last_seen_filename, 'w+') as last_seen_file:
|
|
|
|
last_seen_file.write(str(days_since_epoch))
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + last_seen_filename)
|
2020-12-13 11:27:12 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _bounce_dm(senderPostId: str, session, http_prefix: str,
|
|
|
|
base_dir: str, nickname: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
sending_handle: str, federation_list: [],
|
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
translate: {}, debug: bool,
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message: [], system_language: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem: str,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url: str,
|
2022-02-08 12:08:20 +00:00
|
|
|
languages_understood: [],
|
|
|
|
bounce_is_chat: bool) -> bool:
|
2021-02-24 11:01:44 +00:00
|
|
|
"""Sends a bounce message back to the sending handle
|
|
|
|
if a DM has been rejected
|
|
|
|
"""
|
|
|
|
print(nickname + '@' + domain +
|
2022-01-02 19:57:50 +00:00
|
|
|
' cannot receive DM from ' + sending_handle +
|
2021-02-24 11:01:44 +00:00
|
|
|
' because they do not follow them')
|
2021-02-24 11:43:48 +00:00
|
|
|
|
|
|
|
# Don't send out bounce messages too frequently.
|
|
|
|
# Otherwise an adversary could try to DoS your instance
|
|
|
|
# by continuously sending DMs to you
|
2021-12-26 13:17:46 +00:00
|
|
|
curr_time = int(time.time())
|
2022-01-02 19:57:50 +00:00
|
|
|
if curr_time - last_bounce_message[0] < 60:
|
2021-02-24 11:43:48 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# record the last time that a bounce was generated
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message[0] = curr_time
|
2021-02-24 11:43:48 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
sender_nickname = sending_handle.split('@')[0]
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = False
|
2022-01-02 19:57:50 +00:00
|
|
|
if sending_handle.startswith('!'):
|
|
|
|
sending_handle = sending_handle[1:]
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = True
|
2022-01-02 19:57:50 +00:00
|
|
|
sender_domain = sending_handle.split('@')[1]
|
|
|
|
sender_port = port
|
|
|
|
if ':' in sender_domain:
|
|
|
|
sender_port = get_port_from_domain(sender_domain)
|
|
|
|
sender_domain = remove_domain_port(sender_domain)
|
|
|
|
cc_list = []
|
2021-02-24 11:01:44 +00:00
|
|
|
|
|
|
|
# create the bounce DM
|
|
|
|
subject = None
|
|
|
|
content = translate['DM bounce']
|
2022-01-02 19:57:50 +00:00
|
|
|
followers_only = False
|
|
|
|
save_to_file = False
|
2021-12-25 20:39:35 +00:00
|
|
|
client_to_server = False
|
2022-01-02 19:57:50 +00:00
|
|
|
comments_enabled = False
|
|
|
|
attach_image_filename = None
|
|
|
|
media_type = None
|
|
|
|
image_description = ''
|
2021-05-09 19:29:53 +00:00
|
|
|
city = 'London, England'
|
2022-01-02 19:57:50 +00:00
|
|
|
in_reply_to = remove_id_ending(senderPostId)
|
|
|
|
in_reply_to_atom_uri = None
|
|
|
|
schedule_post = False
|
|
|
|
event_date = None
|
|
|
|
event_time = None
|
2021-02-24 11:01:44 +00:00
|
|
|
location = None
|
2022-01-02 19:57:50 +00:00
|
|
|
conversation_id = None
|
2021-12-25 18:20:56 +00:00
|
|
|
low_bandwidth = False
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = \
|
2021-12-28 19:33:29 +00:00
|
|
|
create_direct_message_post(base_dir, nickname, domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
http_prefix, content, followers_only,
|
|
|
|
save_to_file, client_to_server,
|
|
|
|
comments_enabled,
|
|
|
|
attach_image_filename, media_type,
|
|
|
|
image_description, city,
|
|
|
|
in_reply_to, in_reply_to_atom_uri,
|
|
|
|
subject, debug, schedule_post,
|
|
|
|
event_date, event_time, location,
|
|
|
|
system_language, conversation_id,
|
2021-12-28 19:33:29 +00:00
|
|
|
low_bandwidth,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url,
|
2022-02-08 12:08:20 +00:00
|
|
|
languages_understood, bounce_is_chat)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('WARN: unable to create bounce message to ' + sending_handle)
|
2021-02-24 11:43:48 +00:00
|
|
|
return False
|
2021-02-24 11:01:44 +00:00
|
|
|
# bounce DM goes back to the sender
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Sending bounce DM to ' + sending_handle)
|
2021-12-29 21:55:09 +00:00
|
|
|
send_signed_json(post_json_object, session, base_dir,
|
|
|
|
nickname, domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
sender_nickname, sender_domain, sender_port, cc_list,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix, False, False, federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log, cached_webfingers,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, debug, __version__, None, group_account,
|
|
|
|
signing_priv_key_pem, 7238634)
|
2021-02-24 11:43:48 +00:00
|
|
|
return True
|
2021-02-24 11:01:44 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _is_valid_dm(base_dir: str, nickname: str, domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_json_object: {}, update_index_list: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
session, http_prefix: str,
|
|
|
|
federation_list: [],
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {},
|
|
|
|
person_cache: {},
|
|
|
|
translate: {}, debug: bool,
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
handle: str, system_language: str,
|
|
|
|
signing_priv_key_pem: str,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url: str,
|
|
|
|
languages_understood: []) -> bool:
|
2021-06-07 16:34:08 +00:00
|
|
|
"""Is the given message a valid DM?
|
|
|
|
"""
|
|
|
|
if nickname == 'inbox':
|
|
|
|
# going to the shared inbox
|
|
|
|
return True
|
|
|
|
|
|
|
|
# check for the flag file which indicates to
|
|
|
|
# only receive DMs from people you are following
|
2022-01-02 19:57:50 +00:00
|
|
|
follow_dms_filename = acct_dir(base_dir, nickname, domain) + '/.followDMs'
|
|
|
|
if not os.path.isfile(follow_dms_filename):
|
2021-06-07 16:34:08 +00:00
|
|
|
# dm index will be updated
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list.append('dm')
|
|
|
|
act_url = local_actor_url(http_prefix, nickname, domain)
|
|
|
|
_dm_notify(base_dir, handle, act_url + '/dm')
|
2021-06-07 16:34:08 +00:00
|
|
|
return True
|
2021-06-07 17:49:10 +00:00
|
|
|
|
2021-06-07 16:34:08 +00:00
|
|
|
# get the file containing following handles
|
2022-01-02 19:57:50 +00:00
|
|
|
following_filename = \
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/following.txt'
|
2021-06-07 16:34:08 +00:00
|
|
|
# who is sending a DM?
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('actor'):
|
2021-06-07 16:34:08 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
sending_actor = post_json_object['actor']
|
|
|
|
sending_actor_nickname = \
|
|
|
|
get_nickname_from_actor(sending_actor)
|
|
|
|
if not sending_actor_nickname:
|
2021-06-07 16:34:08 +00:00
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
sending_actor_domain, _ = \
|
|
|
|
get_domain_from_actor(sending_actor)
|
|
|
|
if not sending_actor_domain:
|
2021-06-07 16:34:08 +00:00
|
|
|
return False
|
|
|
|
# Is this DM to yourself? eg. a reminder
|
2022-01-02 19:57:50 +00:00
|
|
|
sending_to_self = False
|
|
|
|
if sending_actor_nickname == nickname and \
|
|
|
|
sending_actor_domain == domain:
|
|
|
|
sending_to_self = True
|
2021-06-07 16:34:08 +00:00
|
|
|
|
|
|
|
# check that the following file exists
|
2022-01-02 19:57:50 +00:00
|
|
|
if not sending_to_self:
|
|
|
|
if not os.path.isfile(following_filename):
|
2021-06-07 16:34:08 +00:00
|
|
|
print('No following.txt file exists for ' +
|
|
|
|
nickname + '@' + domain +
|
|
|
|
' so not accepting DM from ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
sending_actor_nickname + '@' +
|
|
|
|
sending_actor_domain)
|
2021-06-07 16:34:08 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# Not sending to yourself
|
2022-01-02 19:57:50 +00:00
|
|
|
if not sending_to_self:
|
2021-06-07 16:34:08 +00:00
|
|
|
# get the handle of the DM sender
|
2022-01-02 19:57:50 +00:00
|
|
|
send_h = sending_actor_nickname + '@' + sending_actor_domain
|
2021-06-07 16:34:08 +00:00
|
|
|
# check the follow
|
2022-01-02 19:57:50 +00:00
|
|
|
if not is_following_actor(base_dir, nickname, domain, send_h):
|
2021-06-07 16:34:08 +00:00
|
|
|
# DMs may always be allowed from some domains
|
2021-12-26 19:09:04 +00:00
|
|
|
if not dm_allowed_from_domain(base_dir,
|
|
|
|
nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
sending_actor_domain):
|
2021-06-07 16:34:08 +00:00
|
|
|
# send back a bounce DM
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('id') and \
|
|
|
|
post_json_object.get('object'):
|
2021-06-07 16:34:08 +00:00
|
|
|
# don't send bounces back to
|
|
|
|
# replies to bounce messages
|
2021-12-25 22:09:19 +00:00
|
|
|
obj = post_json_object['object']
|
2021-06-07 16:34:08 +00:00
|
|
|
if isinstance(obj, dict):
|
|
|
|
if not obj.get('inReplyTo'):
|
2022-01-02 19:57:50 +00:00
|
|
|
bounced_id = \
|
2021-12-27 11:20:57 +00:00
|
|
|
remove_id_ending(post_json_object['id'])
|
2022-02-08 12:08:20 +00:00
|
|
|
bounce_chat = False
|
|
|
|
if obj.get('type'):
|
|
|
|
if obj['type'] == 'ChatMessage':
|
|
|
|
bounce_chat = True
|
2022-01-02 19:57:50 +00:00
|
|
|
_bounce_dm(bounced_id,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, http_prefix,
|
|
|
|
base_dir,
|
|
|
|
nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
port, send_h,
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
translate, debug,
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message,
|
2021-12-29 21:55:09 +00:00
|
|
|
system_language,
|
|
|
|
signing_priv_key_pem,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url,
|
2022-02-08 12:08:20 +00:00
|
|
|
languages_understood,
|
|
|
|
bounce_chat)
|
2021-06-07 16:34:08 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# dm index will be updated
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list.append('dm')
|
|
|
|
act_url = local_actor_url(http_prefix, nickname, domain)
|
|
|
|
_dm_notify(base_dir, handle, act_url + '/dm')
|
2021-06-07 16:34:08 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _receive_question_vote(base_dir: str, nickname: str, domain: str,
|
|
|
|
http_prefix: str, handle: str, debug: bool,
|
|
|
|
post_json_object: {}, recent_posts_cache: {},
|
2022-03-11 22:13:22 +00:00
|
|
|
session, session_onion, session_i2p,
|
|
|
|
onion_domain: str, i2p_domain: str, port: int,
|
2022-01-02 19:57:50 +00:00
|
|
|
federation_list: [], send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
max_recent_posts: int, translate: {},
|
|
|
|
allow_deletion: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
peertube_instances: [],
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int,
|
|
|
|
cw_lists: {}, lists_enabled: bool) -> None:
|
2021-11-04 11:48:23 +00:00
|
|
|
"""Updates the votes on a Question/poll
|
|
|
|
"""
|
|
|
|
# if this is a reply to a question then update the votes
|
2022-01-02 19:57:50 +00:00
|
|
|
question_json, question_post_filename = \
|
2021-12-29 21:55:09 +00:00
|
|
|
question_update_votes(base_dir, nickname, domain, post_json_object)
|
2022-01-02 19:57:50 +00:00
|
|
|
if not question_json:
|
2021-11-04 11:48:23 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
if not question_post_filename:
|
2021-11-04 11:48:23 +00:00
|
|
|
return
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
remove_post_from_cache(question_json, recent_posts_cache)
|
2021-11-04 11:48:23 +00:00
|
|
|
# ensure that the cached post is removed if it exists, so
|
|
|
|
# that it then will be recreated
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename = \
|
|
|
|
get_cached_post_filename(base_dir, nickname, domain, question_json)
|
|
|
|
if cached_post_filename:
|
|
|
|
if os.path.isfile(cached_post_filename):
|
2021-11-04 11:48:23 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(cached_post_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-11-04 11:48:23 +00:00
|
|
|
print('EX: replytoQuestion unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
cached_post_filename)
|
2021-11-05 10:24:18 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
page_number = 1
|
2021-12-25 20:06:27 +00:00
|
|
|
show_published_date_only = False
|
2022-01-02 19:57:50 +00:00
|
|
|
show_individual_post_icons = True
|
|
|
|
manually_approve_followers = \
|
2021-12-28 20:32:11 +00:00
|
|
|
follower_approval_active(base_dir, nickname, domain)
|
2022-01-02 19:57:50 +00:00
|
|
|
not_dm = not is_dm(question_json)
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir, nickname, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
individual_post_as_html(signing_priv_key_pem, False,
|
|
|
|
recent_posts_cache, max_recent_posts,
|
2022-01-02 19:57:50 +00:00
|
|
|
translate, page_number, base_dir,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname, domain, port, question_json,
|
2021-12-29 21:55:09 +00:00
|
|
|
None, True, allow_deletion,
|
|
|
|
http_prefix, __version__,
|
|
|
|
'inbox',
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
2022-01-02 19:57:50 +00:00
|
|
|
max_like_count, not_dm,
|
|
|
|
show_individual_post_icons,
|
|
|
|
manually_approve_followers,
|
2021-12-29 21:55:09 +00:00
|
|
|
False, True, False, cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled, timezone)
|
2021-11-05 10:24:18 +00:00
|
|
|
|
2021-11-05 10:26:20 +00:00
|
|
|
# add id to inbox index
|
2021-12-29 21:55:09 +00:00
|
|
|
inbox_update_index('inbox', base_dir, handle,
|
2022-01-02 19:57:50 +00:00
|
|
|
question_post_filename, debug)
|
2021-11-05 10:26:20 +00:00
|
|
|
|
2021-11-04 11:48:23 +00:00
|
|
|
# Is this a question created by this instance?
|
2022-01-02 19:57:50 +00:00
|
|
|
id_prefix = http_prefix + '://' + domain
|
|
|
|
if not question_json['object']['id'].startswith(id_prefix):
|
2021-11-04 11:48:23 +00:00
|
|
|
return
|
|
|
|
# if the votes on a question have changed then
|
|
|
|
# send out an update
|
2022-01-02 19:57:50 +00:00
|
|
|
question_json['type'] = 'Update'
|
2021-12-25 18:05:01 +00:00
|
|
|
shared_items_federated_domains = []
|
2022-01-01 15:11:42 +00:00
|
|
|
shared_item_federation_tokens = {}
|
2022-03-11 22:13:22 +00:00
|
|
|
send_to_followers_thread(session, session_onion, session_i2p,
|
|
|
|
base_dir, nickname, domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
onion_domain, i2p_domain, port,
|
|
|
|
http_prefix, federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers, person_cache,
|
|
|
|
post_json_object, debug, __version__,
|
|
|
|
shared_items_federated_domains,
|
2022-01-01 15:11:42 +00:00
|
|
|
shared_item_federation_tokens,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem)
|
|
|
|
|
|
|
|
|
|
|
|
def _create_reply_notification_file(base_dir: str, nickname: str, domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle: str, debug: bool, post_is_dm: bool,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_object: {}, actor: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list: [], http_prefix: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
default_reply_interval_hrs: int) -> bool:
|
2021-11-04 12:29:53 +00:00
|
|
|
"""Generates a file indicating that a new reply has arrived
|
|
|
|
The file can then be used by other systems to create a notification
|
|
|
|
xmpp, matrix, email, etc
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
is_reply_to_muted_post = False
|
|
|
|
if post_is_dm:
|
|
|
|
return is_reply_to_muted_post
|
2021-12-26 19:36:40 +00:00
|
|
|
if not is_reply(post_json_object, actor):
|
2022-01-02 19:57:50 +00:00
|
|
|
return is_reply_to_muted_post
|
2021-11-04 12:29:53 +00:00
|
|
|
if nickname == 'inbox':
|
2022-01-02 19:57:50 +00:00
|
|
|
return is_reply_to_muted_post
|
2021-11-04 12:29:53 +00:00
|
|
|
# replies index will be updated
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list.append('tlreplies')
|
2021-11-04 12:29:53 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
conversation_id = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('conversation'):
|
2022-01-02 19:57:50 +00:00
|
|
|
conversation_id = post_json_object['object']['conversation']
|
2021-11-04 12:29:53 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('inReplyTo'):
|
2022-01-02 19:57:50 +00:00
|
|
|
return is_reply_to_muted_post
|
|
|
|
in_reply_to = post_json_object['object']['inReplyTo']
|
|
|
|
if not in_reply_to:
|
|
|
|
return is_reply_to_muted_post
|
|
|
|
if not isinstance(in_reply_to, str):
|
|
|
|
return is_reply_to_muted_post
|
|
|
|
if not is_muted_conv(base_dir, nickname, domain, in_reply_to,
|
|
|
|
conversation_id):
|
2021-11-04 12:29:53 +00:00
|
|
|
# check if the reply is within the allowed time period
|
|
|
|
# after publication
|
2022-01-02 19:57:50 +00:00
|
|
|
reply_interval_hours = \
|
2021-12-28 10:25:50 +00:00
|
|
|
get_reply_interval_hours(base_dir, nickname, domain,
|
|
|
|
default_reply_interval_hrs)
|
2022-01-02 19:57:50 +00:00
|
|
|
if can_reply_to(base_dir, nickname, domain, in_reply_to,
|
|
|
|
reply_interval_hours):
|
|
|
|
act_url = local_actor_url(http_prefix, nickname, domain)
|
|
|
|
_reply_notify(base_dir, handle, act_url + '/tlreplies')
|
2021-11-04 12:29:53 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Reply to ' + in_reply_to + ' is outside of the ' +
|
|
|
|
'permitted interval of ' + str(reply_interval_hours) +
|
2021-11-04 12:29:53 +00:00
|
|
|
' hours')
|
|
|
|
return False
|
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
is_reply_to_muted_post = True
|
|
|
|
return is_reply_to_muted_post
|
2021-11-04 12:29:53 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _low_frequency_post_notification(base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
port: int, handle: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_is_dm: bool, json_obj: {}) -> None:
|
2021-11-04 12:43:46 +00:00
|
|
|
"""Should we notify that a post from this person has arrived?
|
|
|
|
This is for cases where the notify checkbox is enabled on the
|
|
|
|
person options screen
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
if post_is_dm:
|
2021-11-04 12:43:46 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj:
|
2021-11-04 12:43:46 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj.get('attributedTo'):
|
2021-11-04 12:43:46 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj.get('id'):
|
2021-11-04 12:43:46 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
attributed_to = json_obj['attributedTo']
|
|
|
|
if not isinstance(attributed_to, str):
|
2021-11-04 12:43:46 +00:00
|
|
|
return
|
2022-01-02 19:57:50 +00:00
|
|
|
from_nickname = get_nickname_from_actor(attributed_to)
|
|
|
|
from_domain, from_port = get_domain_from_actor(attributed_to)
|
|
|
|
from_domain_full = get_full_domain(from_domain, from_port)
|
2021-12-29 21:55:09 +00:00
|
|
|
if notify_when_person_posts(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
from_nickname, from_domain_full):
|
|
|
|
post_id = remove_id_ending(json_obj['id'])
|
|
|
|
dom_full = get_full_domain(domain, port)
|
|
|
|
post_link = \
|
|
|
|
local_actor_url(http_prefix, nickname, dom_full) + \
|
2021-12-26 19:47:06 +00:00
|
|
|
'?notifypost=' + post_id.replace('/', '-')
|
2022-01-02 19:57:50 +00:00
|
|
|
_notify_post_arrival(base_dir, handle, post_link)
|
2021-11-04 12:43:46 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _check_for_git_patches(base_dir: str, nickname: str, domain: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle: str, json_obj: {}) -> int:
|
2021-11-04 13:05:04 +00:00
|
|
|
"""check for incoming git patches
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj:
|
2021-11-04 13:05:04 +00:00
|
|
|
return 0
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj.get('content'):
|
2021-11-04 13:05:04 +00:00
|
|
|
return 0
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj.get('summary'):
|
2021-11-04 13:05:04 +00:00
|
|
|
return 0
|
2022-01-02 19:57:50 +00:00
|
|
|
if not json_obj.get('attributedTo'):
|
2021-11-04 13:05:04 +00:00
|
|
|
return 0
|
2022-01-02 19:57:50 +00:00
|
|
|
attributed_to = json_obj['attributedTo']
|
|
|
|
if not isinstance(attributed_to, str):
|
2021-11-04 13:05:04 +00:00
|
|
|
return 0
|
2022-01-02 19:57:50 +00:00
|
|
|
from_nickname = get_nickname_from_actor(attributed_to)
|
|
|
|
from_domain, from_port = get_domain_from_actor(attributed_to)
|
|
|
|
from_domain_full = get_full_domain(from_domain, from_port)
|
2021-12-29 21:55:09 +00:00
|
|
|
if receive_git_patch(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
json_obj['type'], json_obj['summary'],
|
|
|
|
json_obj['content'],
|
|
|
|
from_nickname, from_domain_full):
|
2021-12-29 21:55:09 +00:00
|
|
|
_git_patch_notify(base_dir, handle,
|
2022-01-02 19:57:50 +00:00
|
|
|
json_obj['summary'], json_obj['content'],
|
|
|
|
from_nickname, from_domain_full)
|
2021-11-04 13:05:04 +00:00
|
|
|
return 1
|
2022-01-02 19:57:50 +00:00
|
|
|
if '[PATCH]' in json_obj['content']:
|
|
|
|
print('WARN: git patch not accepted - ' + json_obj['summary'])
|
2021-11-04 13:05:04 +00:00
|
|
|
return 2
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int,
|
2022-03-11 22:13:22 +00:00
|
|
|
session, session_onion, session_i2p,
|
|
|
|
key_id: str, handle: str, message_json: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir: str, http_prefix: str, send_threads: [],
|
2022-01-02 19:57:50 +00:00
|
|
|
post_log: [], cached_webfingers: {}, person_cache: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
queue: [], domain: str,
|
|
|
|
onion_domain: str, i2p_domain: str,
|
|
|
|
port: int, proxy_type: str,
|
|
|
|
federation_list: [], debug: bool,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_filename: str, destination_filename: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
max_replies: int, allow_deletion: bool,
|
|
|
|
max_mentions: int, max_emoji: int, translate: {},
|
|
|
|
unit_test: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
show_published_date_only: bool,
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
peertube_instances: [],
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
default_reply_interval_hrs: int,
|
|
|
|
cw_lists: {}, lists_enabled: str,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url: str,
|
|
|
|
languages_understood: []) -> bool:
|
2020-09-27 18:35:35 +00:00
|
|
|
""" Anything which needs to be done after initial checks have passed
|
2019-07-10 09:59:22 +00:00
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
actor = key_id
|
2019-09-29 10:41:21 +00:00
|
|
|
if '#' in actor:
|
2022-01-02 19:57:50 +00:00
|
|
|
actor = key_id.split('#')[0]
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
_update_last_seen(base_dir, handle, actor)
|
2020-12-13 11:27:12 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
post_is_dm = False
|
|
|
|
is_group = _group_handle(base_dir, handle)
|
2021-12-29 21:55:09 +00:00
|
|
|
|
|
|
|
if _receive_like(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
|
|
|
onion_domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Like accepted from ' + actor)
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_undo_like(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2019-07-12 09:10:09 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Undo like accepted from ' + actor)
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_reaction(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
|
|
|
onion_domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Reaction accepted from ' + actor)
|
|
|
|
return False
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_undo_reaction(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2021-11-10 12:16:03 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Undo reaction accepted from ' + actor)
|
|
|
|
return False
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_bookmark(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Bookmark accepted from ' + actor)
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_undo_bookmark(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Undo bookmark accepted from ' + actor)
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_create_inside_announce(message_json):
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json = message_json['object']
|
2021-09-11 14:30:37 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_announce(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
2022-03-11 13:27:54 +00:00
|
|
|
domain, onion_domain, i2p_domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, translate,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
signing_priv_key_pem,
|
|
|
|
max_recent_posts,
|
|
|
|
allow_deletion,
|
|
|
|
peertube_instances,
|
|
|
|
max_like_count, cw_lists, lists_enabled):
|
2019-07-11 19:31:02 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Announce accepted from ' + actor)
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _receive_undo_announce(recent_posts_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug):
|
2019-07-12 09:41:57 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Undo announce accepted from ' + actor)
|
2019-07-12 11:35:03 +00:00
|
|
|
return False
|
2019-07-12 09:41:57 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if _receive_delete(session, handle, is_group,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
message_json,
|
|
|
|
federation_list,
|
|
|
|
debug, allow_deletion,
|
|
|
|
recent_posts_cache):
|
2019-08-12 18:02:29 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Delete accepted from ' + actor)
|
2019-08-12 18:02:29 +00:00
|
|
|
return False
|
|
|
|
|
2019-07-10 13:32:47 +00:00
|
|
|
if debug:
|
2020-09-27 18:35:35 +00:00
|
|
|
print('DEBUG: initial checks passed')
|
2022-01-02 19:57:50 +00:00
|
|
|
print('copy queue file from ' + queue_filename +
|
|
|
|
' to ' + destination_filename)
|
2019-08-16 22:04:45 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(destination_filename):
|
2019-09-11 17:42:55 +00:00
|
|
|
return True
|
2019-10-04 09:58:02 +00:00
|
|
|
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json.get('postNickname'):
|
|
|
|
post_json_object = message_json['post']
|
2019-07-18 09:26:47 +00:00
|
|
|
else:
|
2021-12-25 23:51:19 +00:00
|
|
|
post_json_object = message_json
|
2019-10-04 12:22:56 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2022-01-02 19:57:50 +00:00
|
|
|
json_obj = None
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-29 21:55:09 +00:00
|
|
|
if _valid_post_content(base_dir, nickname, domain,
|
|
|
|
post_json_object, max_mentions, max_emoji,
|
|
|
|
allow_local_network_access, debug,
|
|
|
|
system_language, http_prefix,
|
|
|
|
domain_full, person_cache):
|
2021-12-14 13:27:00 +00:00
|
|
|
# is the sending actor valid?
|
2021-12-29 21:55:09 +00:00
|
|
|
if not valid_sending_actor(session, base_dir, nickname, domain,
|
|
|
|
person_cache, post_json_object,
|
|
|
|
signing_priv_key_pem, debug, unit_test):
|
2021-12-14 13:27:00 +00:00
|
|
|
return False
|
2020-05-02 13:17:02 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('object'):
|
2022-01-02 19:57:50 +00:00
|
|
|
json_obj = post_json_object['object']
|
|
|
|
if not isinstance(json_obj, dict):
|
|
|
|
json_obj = None
|
2020-08-23 14:45:58 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
json_obj = post_json_object
|
2021-11-04 13:05:04 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _check_for_git_patches(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle, json_obj) == 2:
|
2021-11-04 13:05:04 +00:00
|
|
|
return False
|
2020-05-02 11:08:38 +00:00
|
|
|
|
2020-01-15 10:56:39 +00:00
|
|
|
# replace YouTube links, so they get less tracking data
|
2021-12-28 21:36:27 +00:00
|
|
|
replace_you_tube(post_json_object, yt_replace_domain, system_language)
|
2021-09-18 17:08:14 +00:00
|
|
|
# replace twitter link domains, so that you can view twitter posts
|
|
|
|
# without having an account
|
2021-12-28 21:36:27 +00:00
|
|
|
replace_twitter(post_json_object, twitter_replacement_domain,
|
|
|
|
system_language)
|
2020-01-15 10:56:39 +00:00
|
|
|
|
2019-10-22 20:07:12 +00:00
|
|
|
# list of indexes to be updated
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list = ['inbox']
|
2021-12-28 20:32:11 +00:00
|
|
|
populate_replies(base_dir, http_prefix, domain, post_json_object,
|
|
|
|
max_replies, debug)
|
2019-11-29 19:22:11 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
_receive_question_vote(base_dir, nickname, domain,
|
|
|
|
http_prefix, handle, debug,
|
|
|
|
post_json_object, recent_posts_cache,
|
2022-03-11 22:13:22 +00:00
|
|
|
session, session_onion, session_i2p,
|
|
|
|
onion_domain, i2p_domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
federation_list, send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers, person_cache,
|
|
|
|
signing_priv_key_pem,
|
|
|
|
max_recent_posts, translate,
|
|
|
|
allow_deletion,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_network_access,
|
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count,
|
|
|
|
cw_lists, lists_enabled)
|
2019-11-29 19:22:11 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
is_reply_to_muted_post = False
|
2020-08-27 17:40:09 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not is_group:
|
2019-10-04 12:22:56 +00:00
|
|
|
# create a DM notification file if needed
|
2022-01-02 19:57:50 +00:00
|
|
|
post_is_dm = is_dm(post_json_object)
|
|
|
|
if post_is_dm:
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _is_valid_dm(base_dir, nickname, domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_json_object, update_index_list,
|
2021-12-29 21:55:09 +00:00
|
|
|
session, http_prefix,
|
|
|
|
federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
|
|
|
translate, debug,
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message,
|
2021-12-29 21:55:09 +00:00
|
|
|
handle, system_language,
|
|
|
|
signing_priv_key_pem,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url,
|
|
|
|
languages_understood):
|
2021-06-07 16:34:08 +00:00
|
|
|
return False
|
2019-10-04 12:22:56 +00:00
|
|
|
|
|
|
|
# get the actor being replied to
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
2019-10-04 12:22:56 +00:00
|
|
|
|
|
|
|
# create a reply notification file if needed
|
2022-01-02 19:57:50 +00:00
|
|
|
is_reply_to_muted_post = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_reply_notification_file(base_dir, nickname, domain,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle, debug, post_is_dm,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_object, actor,
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list, http_prefix,
|
2021-12-29 21:55:09 +00:00
|
|
|
default_reply_interval_hrs)
|
|
|
|
|
|
|
|
if is_image_media(session, base_dir, http_prefix,
|
|
|
|
nickname, domain, post_json_object,
|
|
|
|
translate,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
allow_local_network_access,
|
|
|
|
recent_posts_cache, debug, system_language,
|
|
|
|
domain_full, person_cache, signing_priv_key_pem):
|
2019-10-22 20:30:43 +00:00
|
|
|
# media index will be updated
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list.append('tlmedia')
|
2021-12-28 13:49:44 +00:00
|
|
|
if is_blog_post(post_json_object):
|
2020-02-24 14:39:25 +00:00
|
|
|
# blogs index will be updated
|
2022-01-02 19:57:50 +00:00
|
|
|
update_index_list.append('tlblogs')
|
2019-10-22 20:30:43 +00:00
|
|
|
|
2019-10-04 10:00:57 +00:00
|
|
|
# get the avatar for a reply/announce
|
2021-12-29 21:55:09 +00:00
|
|
|
_obtain_avatar_for_reply_post(session, base_dir,
|
2022-03-11 13:27:54 +00:00
|
|
|
http_prefix, domain,
|
|
|
|
onion_domain, i2p_domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, post_json_object, debug,
|
|
|
|
signing_priv_key_pem)
|
2019-10-04 10:00:57 +00:00
|
|
|
|
|
|
|
# save the post to file
|
2022-01-02 19:57:50 +00:00
|
|
|
if save_json(post_json_object, destination_filename):
|
2021-12-29 21:55:09 +00:00
|
|
|
_low_frequency_post_notification(base_dir, http_prefix,
|
|
|
|
nickname, domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle, post_is_dm, json_obj)
|
2021-07-06 20:38:08 +00:00
|
|
|
|
2020-08-27 17:40:09 +00:00
|
|
|
# If this is a reply to a muted post then also mute it.
|
|
|
|
# This enables you to ignore a threat that's getting boring
|
2022-01-02 19:57:50 +00:00
|
|
|
if is_reply_to_muted_post:
|
|
|
|
print('MUTE REPLY: ' + destination_filename)
|
|
|
|
destination_filename_muted = destination_filename + '.muted'
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(destination_filename_muted, 'w+') as mute_file:
|
|
|
|
mute_file.write('\n')
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + destination_filename_muted)
|
2020-08-27 17:40:09 +00:00
|
|
|
|
2019-10-22 20:07:12 +00:00
|
|
|
# update the indexes for different timelines
|
2022-01-02 19:57:50 +00:00
|
|
|
for boxname in update_index_list:
|
2021-12-29 21:55:09 +00:00
|
|
|
if not inbox_update_index(boxname, base_dir, handle,
|
2022-01-02 19:57:50 +00:00
|
|
|
destination_filename, debug):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('ERROR: unable to update ' + boxname + ' index')
|
2020-08-26 11:10:21 +00:00
|
|
|
else:
|
2021-03-01 15:24:12 +00:00
|
|
|
if boxname == 'inbox':
|
2021-12-26 20:43:03 +00:00
|
|
|
if is_recent_post(post_json_object, 3):
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-29 21:55:09 +00:00
|
|
|
update_speaker(base_dir, http_prefix,
|
|
|
|
nickname, domain, domain_full,
|
|
|
|
post_json_object, person_cache,
|
|
|
|
translate, None, theme_name)
|
2021-12-25 21:32:15 +00:00
|
|
|
if not unit_test:
|
2020-08-26 11:19:32 +00:00
|
|
|
if debug:
|
|
|
|
print('Saving inbox post as html to cache')
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
html_cache_start_time = time.time()
|
|
|
|
handle_name = handle.split('@')[0]
|
2021-12-29 21:55:09 +00:00
|
|
|
allow_local_net_access = allow_local_network_access
|
|
|
|
show_pub_date_only = show_published_date_only
|
2022-02-25 19:12:40 +00:00
|
|
|
timezone = get_account_timezone(base_dir,
|
|
|
|
handle_name, domain)
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_store_post_to_html_cache(recent_posts_cache,
|
|
|
|
max_recent_posts,
|
|
|
|
translate, base_dir,
|
|
|
|
http_prefix,
|
|
|
|
session,
|
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name,
|
2021-12-29 21:55:09 +00:00
|
|
|
domain, port,
|
|
|
|
post_json_object,
|
|
|
|
allow_deletion,
|
|
|
|
boxname,
|
|
|
|
show_pub_date_only,
|
|
|
|
peertube_instances,
|
|
|
|
allow_local_net_access,
|
|
|
|
theme_name,
|
|
|
|
system_language,
|
|
|
|
max_like_count,
|
|
|
|
signing_priv_key_pem,
|
|
|
|
cw_lists,
|
2022-02-25 19:12:40 +00:00
|
|
|
lists_enabled,
|
|
|
|
timezone)
|
2020-08-26 11:19:32 +00:00
|
|
|
if debug:
|
2021-12-31 21:18:12 +00:00
|
|
|
time_diff = \
|
2022-01-02 19:57:50 +00:00
|
|
|
str(int((time.time() - html_cache_start_time) *
|
2020-08-26 11:19:32 +00:00
|
|
|
1000))
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Saved ' +
|
|
|
|
boxname + ' post as html to cache in ' +
|
2021-12-31 21:18:12 +00:00
|
|
|
time_diff + ' mS')
|
2019-10-20 10:25:38 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_name = handle.split('@')[0]
|
2021-10-14 15:12:35 +00:00
|
|
|
|
|
|
|
# is this an edit of a previous post?
|
|
|
|
# in Mastodon "delete and redraft"
|
2021-12-29 21:55:09 +00:00
|
|
|
# NOTE: this must be done before update_conversation is called
|
2022-01-02 19:57:50 +00:00
|
|
|
edited_filename = \
|
|
|
|
edited_post_filename(base_dir, handle_name, domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_object, debug, 300)
|
2021-10-14 15:12:35 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
update_conversation(base_dir, handle_name, domain,
|
|
|
|
post_json_object)
|
2021-08-12 10:22:04 +00:00
|
|
|
|
2021-10-14 15:12:35 +00:00
|
|
|
# If this was an edit then delete the previous version of the post
|
2022-01-02 19:57:50 +00:00
|
|
|
if edited_filename:
|
2021-12-28 14:55:45 +00:00
|
|
|
delete_post(base_dir, http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname, domain, edited_filename,
|
2021-12-28 14:55:45 +00:00
|
|
|
debug, recent_posts_cache)
|
2021-10-14 15:12:35 +00:00
|
|
|
|
2021-10-18 15:20:22 +00:00
|
|
|
# store the id of the last post made by this actor
|
2021-12-29 21:55:09 +00:00
|
|
|
_store_last_post_id(base_dir, nickname, domain, post_json_object)
|
2021-10-18 15:20:22 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_update_calendar(base_dir, handle, post_json_object)
|
2019-10-19 18:08:47 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
store_hash_tags(base_dir, handle_name, domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix, domain_full,
|
|
|
|
post_json_object, translate)
|
2019-12-12 17:34:31 +00:00
|
|
|
|
2019-10-19 13:00:46 +00:00
|
|
|
# send the post out to group members
|
2022-01-02 19:57:50 +00:00
|
|
|
if is_group:
|
2022-03-11 22:13:22 +00:00
|
|
|
_send_to_group_members(session, session_onion, session_i2p,
|
|
|
|
base_dir, handle, port,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_object,
|
|
|
|
http_prefix, federation_list,
|
|
|
|
send_threads,
|
2022-01-02 19:57:50 +00:00
|
|
|
post_log, cached_webfingers,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache,
|
|
|
|
debug, system_language,
|
|
|
|
onion_domain, i2p_domain,
|
|
|
|
signing_priv_key_pem)
|
2019-10-04 12:22:56 +00:00
|
|
|
|
2019-10-04 10:00:57 +00:00
|
|
|
# if the post wasn't saved
|
2022-01-02 19:57:50 +00:00
|
|
|
if not os.path.isfile(destination_filename):
|
2019-08-17 12:26:09 +00:00
|
|
|
return False
|
|
|
|
|
2019-07-10 09:59:22 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def clear_queue_items(base_dir: str, queue: []) -> None:
|
2020-05-22 12:57:15 +00:00
|
|
|
"""Clears the queue for each account
|
2020-05-22 11:48:13 +00:00
|
|
|
"""
|
|
|
|
ctr = 0
|
|
|
|
queue.clear()
|
2022-01-02 19:57:50 +00:00
|
|
|
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
2020-05-22 11:48:13 +00:00
|
|
|
for account in dirs:
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_dir = base_dir + '/accounts/' + account + '/queue'
|
|
|
|
if not os.path.isdir(queue_dir):
|
2020-06-02 09:05:55 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
for _, _, queuefiles in os.walk(queue_dir):
|
2020-06-02 09:05:55 +00:00
|
|
|
for qfile in queuefiles:
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(os.path.join(queue_dir, qfile))
|
2020-06-02 09:05:55 +00:00
|
|
|
ctr += 1
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 19:33:29 +00:00
|
|
|
print('EX: clear_queue_items unable to delete ' +
|
|
|
|
qfile)
|
2021-06-05 21:09:11 +00:00
|
|
|
break
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-05-22 11:48:13 +00:00
|
|
|
if ctr > 0:
|
|
|
|
print('Removed ' + str(ctr) + ' inbox queue items')
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _restore_queue_items(base_dir: str, queue: []) -> None:
|
2019-07-12 21:09:23 +00:00
|
|
|
"""Checks the queue for each account and appends filenames
|
|
|
|
"""
|
2019-08-15 16:45:07 +00:00
|
|
|
queue.clear()
|
2022-01-02 19:57:50 +00:00
|
|
|
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
2019-07-12 21:09:23 +00:00
|
|
|
for account in dirs:
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_dir = base_dir + '/accounts/' + account + '/queue'
|
|
|
|
if not os.path.isdir(queue_dir):
|
2020-06-02 09:05:55 +00:00
|
|
|
continue
|
2022-01-02 19:57:50 +00:00
|
|
|
for _, _, queuefiles in os.walk(queue_dir):
|
2020-06-02 09:05:55 +00:00
|
|
|
for qfile in queuefiles:
|
2022-01-02 19:57:50 +00:00
|
|
|
queue.append(os.path.join(queue_dir, qfile))
|
2021-06-05 21:09:11 +00:00
|
|
|
break
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-04-03 16:27:34 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
print('Restored ' + str(len(queue)) + ' inbox queue items')
|
2019-09-02 21:52:43 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2021-12-28 20:32:11 +00:00
|
|
|
def run_inbox_queue_watchdog(project_version: str, httpd) -> None:
|
2019-09-02 21:52:43 +00:00
|
|
|
"""This tries to keep the inbox thread running even if it dies
|
|
|
|
"""
|
|
|
|
print('Starting inbox queue watchdog')
|
2022-01-02 19:57:50 +00:00
|
|
|
inbox_queue_original = httpd.thrInboxQueue.clone(run_inbox_queue)
|
2019-09-02 21:52:43 +00:00
|
|
|
httpd.thrInboxQueue.start()
|
|
|
|
while True:
|
2020-03-22 21:16:02 +00:00
|
|
|
time.sleep(20)
|
2022-01-01 15:11:42 +00:00
|
|
|
if not httpd.thrInboxQueue.is_alive() or httpd.restart_inbox_queue:
|
|
|
|
httpd.restart_inbox_queue_in_progress = True
|
2019-09-02 21:52:43 +00:00
|
|
|
httpd.thrInboxQueue.kill()
|
2022-01-02 19:57:50 +00:00
|
|
|
httpd.thrInboxQueue = inbox_queue_original.clone(run_inbox_queue)
|
2021-12-25 23:23:29 +00:00
|
|
|
httpd.inbox_queue.clear()
|
2019-09-02 21:52:43 +00:00
|
|
|
httpd.thrInboxQueue.start()
|
|
|
|
print('Restarting inbox queue...')
|
2022-01-01 15:11:42 +00:00
|
|
|
httpd.restart_inbox_queue_in_progress = False
|
|
|
|
httpd.restart_inbox_queue = False
|
2019-09-02 21:52:43 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
def _inbox_quota_exceeded(queue: {}, queue_filename: str,
|
|
|
|
queue_json: {}, quotas_daily: {}, quotas_per_min: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
domain_max_posts_per_day: int,
|
|
|
|
account_max_posts_per_day: int,
|
|
|
|
debug: bool) -> bool:
|
2021-06-07 13:48:29 +00:00
|
|
|
"""limit the number of posts which can arrive per domain per day
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
post_domain = queue_json['postDomain']
|
|
|
|
if not post_domain:
|
2021-06-07 13:48:29 +00:00
|
|
|
return False
|
|
|
|
|
2021-12-25 21:13:55 +00:00
|
|
|
if domain_max_posts_per_day > 0:
|
2022-01-02 19:57:50 +00:00
|
|
|
if quotas_daily['domains'].get(post_domain):
|
|
|
|
if quotas_daily['domains'][post_domain] > \
|
2021-12-25 21:13:55 +00:00
|
|
|
domain_max_posts_per_day:
|
2021-06-07 13:48:29 +00:00
|
|
|
print('Queue: Quota per day - Maximum posts for ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
post_domain + ' reached (' +
|
2021-12-25 21:13:55 +00:00
|
|
|
str(domain_max_posts_per_day) + ')')
|
2021-06-07 13:48:29 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _inbox_quota_exceeded unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2021-06-07 13:48:29 +00:00
|
|
|
queue.pop(0)
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_daily['domains'][post_domain] += 1
|
2021-06-07 13:48:29 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_daily['domains'][post_domain] = 1
|
2021-06-07 13:48:29 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if quotas_per_min['domains'].get(post_domain):
|
|
|
|
domain_max_posts_per_min = \
|
2021-12-25 21:13:55 +00:00
|
|
|
int(domain_max_posts_per_day / (24 * 60))
|
2022-01-02 19:57:50 +00:00
|
|
|
if domain_max_posts_per_min < 5:
|
|
|
|
domain_max_posts_per_min = 5
|
|
|
|
if quotas_per_min['domains'][post_domain] > \
|
|
|
|
domain_max_posts_per_min:
|
2021-06-07 13:48:29 +00:00
|
|
|
print('Queue: Quota per min - Maximum posts for ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
post_domain + ' reached (' +
|
|
|
|
str(domain_max_posts_per_min) + ')')
|
2021-06-07 13:48:29 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _inbox_quota_exceeded unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2021-06-07 13:48:29 +00:00
|
|
|
queue.pop(0)
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_per_min['domains'][post_domain] += 1
|
2021-06-07 13:48:29 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_per_min['domains'][post_domain] = 1
|
2021-06-07 13:48:29 +00:00
|
|
|
|
2021-12-25 21:18:07 +00:00
|
|
|
if account_max_posts_per_day > 0:
|
2022-01-02 19:57:50 +00:00
|
|
|
post_handle = queue_json['postNickname'] + '@' + post_domain
|
|
|
|
if quotas_daily['accounts'].get(post_handle):
|
|
|
|
if quotas_daily['accounts'][post_handle] > \
|
2021-12-25 21:18:07 +00:00
|
|
|
account_max_posts_per_day:
|
2021-06-07 13:48:29 +00:00
|
|
|
print('Queue: Quota account posts per day -' +
|
|
|
|
' Maximum posts for ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
post_handle + ' reached (' +
|
2021-12-25 21:18:07 +00:00
|
|
|
str(account_max_posts_per_day) + ')')
|
2021-06-07 13:48:29 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _inbox_quota_exceeded unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2021-06-07 13:48:29 +00:00
|
|
|
queue.pop(0)
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_daily['accounts'][post_handle] += 1
|
2021-06-07 13:48:29 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_daily['accounts'][post_handle] = 1
|
2021-06-07 13:48:29 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if quotas_per_min['accounts'].get(post_handle):
|
|
|
|
account_max_posts_per_min = \
|
2021-12-25 21:18:07 +00:00
|
|
|
int(account_max_posts_per_day / (24 * 60))
|
2022-01-02 19:57:50 +00:00
|
|
|
if account_max_posts_per_min < 5:
|
|
|
|
account_max_posts_per_min = 5
|
|
|
|
if quotas_per_min['accounts'][post_handle] > \
|
|
|
|
account_max_posts_per_min:
|
2021-06-07 13:48:29 +00:00
|
|
|
print('Queue: Quota account posts per min -' +
|
|
|
|
' Maximum posts for ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
post_handle + ' reached (' +
|
|
|
|
str(account_max_posts_per_min) + ')')
|
2021-06-07 13:48:29 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _inbox_quota_exceeded unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2021-06-07 13:48:29 +00:00
|
|
|
queue.pop(0)
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_per_min['accounts'][post_handle] += 1
|
2021-06-07 13:48:29 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_per_min['accounts'][post_handle] = 1
|
2021-06-07 13:48:29 +00:00
|
|
|
|
|
|
|
if debug:
|
2021-12-25 21:18:07 +00:00
|
|
|
if account_max_posts_per_day > 0 or domain_max_posts_per_day > 0:
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(quotas_daily)
|
2021-06-07 13:48:29 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
def _check_json_signature(base_dir: str, queue_json: {}) -> (bool, bool):
|
2021-06-07 14:07:15 +00:00
|
|
|
"""check if a json signature exists on this post
|
|
|
|
"""
|
2022-01-02 19:57:50 +00:00
|
|
|
has_json_signature = False
|
|
|
|
jwebsig_type = None
|
|
|
|
original_json = queue_json['original']
|
|
|
|
if not original_json.get('@context') or \
|
|
|
|
not original_json.get('signature'):
|
|
|
|
return has_json_signature, jwebsig_type
|
|
|
|
if not isinstance(original_json['signature'], dict):
|
|
|
|
return has_json_signature, jwebsig_type
|
2021-06-07 14:07:15 +00:00
|
|
|
# see https://tools.ietf.org/html/rfc7515
|
2022-01-02 19:57:50 +00:00
|
|
|
jwebsig = original_json['signature']
|
2021-06-07 14:07:15 +00:00
|
|
|
# signature exists and is of the expected type
|
|
|
|
if not jwebsig.get('type') or \
|
|
|
|
not jwebsig.get('signatureValue'):
|
2022-01-02 19:57:50 +00:00
|
|
|
return has_json_signature, jwebsig_type
|
|
|
|
jwebsig_type = jwebsig['type']
|
|
|
|
if jwebsig_type == 'RsaSignature2017':
|
|
|
|
if has_valid_context(original_json):
|
|
|
|
has_json_signature = True
|
2021-06-07 14:07:15 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
unknown_contexts_file = \
|
2021-12-25 16:17:53 +00:00
|
|
|
base_dir + '/accounts/unknownContexts.txt'
|
2022-01-02 19:57:50 +00:00
|
|
|
unknown_context = str(original_json['@context'])
|
2021-06-07 14:07:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
print('unrecognized @context: ' + unknown_context)
|
2021-06-07 14:07:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
already_unknown = False
|
|
|
|
if os.path.isfile(unknown_contexts_file):
|
|
|
|
if unknown_context in \
|
|
|
|
open(unknown_contexts_file).read():
|
|
|
|
already_unknown = True
|
2021-06-07 14:07:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not already_unknown:
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(unknown_contexts_file, 'a+') as unknown_file:
|
|
|
|
unknown_file.write(unknown_context + '\n')
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to append ' + unknown_contexts_file)
|
2021-06-07 14:07:15 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Unrecognized jsonld signature type: ' + jwebsig_type)
|
2021-06-07 14:07:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
unknown_signatures_file = \
|
2021-12-25 16:17:53 +00:00
|
|
|
base_dir + '/accounts/unknownJsonSignatures.txt'
|
2021-06-07 14:07:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
already_unknown = False
|
|
|
|
if os.path.isfile(unknown_signatures_file):
|
|
|
|
if jwebsig_type in \
|
|
|
|
open(unknown_signatures_file).read():
|
|
|
|
already_unknown = True
|
2021-06-07 14:07:15 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not already_unknown:
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(unknown_signatures_file, 'a+') as unknown_file:
|
|
|
|
unknown_file.write(jwebsig_type + '\n')
|
2021-11-25 21:18:53 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to append ' + unknown_signatures_file)
|
|
|
|
return has_json_signature, jwebsig_type
|
2021-06-07 14:07:15 +00:00
|
|
|
|
|
|
|
|
2022-03-12 10:41:36 +00:00
|
|
|
def _receive_follow_request(session, session_onion, session_i2p,
|
|
|
|
base_dir: str, http_prefix: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
port: int, send_threads: [], post_log: [],
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
message_json: {}, federation_list: [],
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
max_followers: int, onion_domain: str,
|
2022-03-11 13:27:54 +00:00
|
|
|
i2p_domain: str, signing_priv_key_pem: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
unit_test: bool) -> bool:
|
2021-12-14 13:59:42 +00:00
|
|
|
"""Receives a follow request within the POST section of HTTPServer
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json['type'].startswith('Follow'):
|
|
|
|
if not message_json['type'].startswith('Join'):
|
2021-12-14 13:59:42 +00:00
|
|
|
return False
|
|
|
|
print('Receiving follow request')
|
2021-12-26 17:15:04 +00:00
|
|
|
if not has_actor(message_json, debug):
|
2021-12-14 13:59:42 +00:00
|
|
|
return False
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['actor']):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: users/profile/accounts/channel missing from actor')
|
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
domain, temp_port = get_domain_from_actor(message_json['actor'])
|
|
|
|
from_port = port
|
|
|
|
domain_full = get_full_domain(domain, temp_port)
|
|
|
|
if temp_port:
|
|
|
|
from_port = temp_port
|
2021-12-27 18:28:26 +00:00
|
|
|
if not domain_permitted(domain, federation_list):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: follower from domain not permitted - ' + domain)
|
|
|
|
return False
|
2021-12-27 22:19:18 +00:00
|
|
|
nickname = get_nickname_from_actor(message_json['actor'])
|
2021-12-14 13:59:42 +00:00
|
|
|
if not nickname:
|
|
|
|
# single user instance
|
|
|
|
nickname = 'dev'
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: follow request does not contain a ' +
|
|
|
|
'nickname. Assuming single user instance.')
|
2021-12-25 23:51:19 +00:00
|
|
|
if not message_json.get('to'):
|
|
|
|
message_json['to'] = message_json['object']
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(message_json['object']):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: users/profile/channel/accounts ' +
|
|
|
|
'not found within object')
|
|
|
|
return False
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_to_follow, temp_port = get_domain_from_actor(message_json['object'])
|
|
|
|
if not domain_permitted(domain_to_follow, federation_list):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: follow domain not permitted ' + domain_to_follow)
|
2021-12-14 13:59:42 +00:00
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
domain_to_follow_full = get_full_domain(domain_to_follow, temp_port)
|
|
|
|
nickname_to_follow = get_nickname_from_actor(message_json['object'])
|
|
|
|
if not nickname_to_follow:
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: follow request does not contain a ' +
|
|
|
|
'nickname for the account followed')
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
if is_system_account(nickname_to_follow):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Cannot follow system account - ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow)
|
2021-12-14 13:59:42 +00:00
|
|
|
return True
|
2021-12-25 19:37:10 +00:00
|
|
|
if max_followers > 0:
|
2021-12-29 21:55:09 +00:00
|
|
|
if get_no_of_followers(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow,
|
2021-12-29 21:55:09 +00:00
|
|
|
True) > max_followers:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('WARN: ' + nickname_to_follow +
|
2021-12-14 13:59:42 +00:00
|
|
|
' has reached their maximum number of followers')
|
|
|
|
return True
|
2022-01-02 19:57:50 +00:00
|
|
|
handle_to_follow = nickname_to_follow + '@' + domain_to_follow
|
|
|
|
if domain_to_follow == domain:
|
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle_to_follow):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: followed account not found - ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
base_dir + '/accounts/' + handle_to_follow)
|
2021-12-14 13:59:42 +00:00
|
|
|
return True
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_follower_of_person(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow_full,
|
2021-12-29 21:55:09 +00:00
|
|
|
nickname, domain_full):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: ' + nickname + '@' + domain +
|
|
|
|
' is already a follower of ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow + '@' + domain_to_follow)
|
2021-12-14 13:59:42 +00:00
|
|
|
return True
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
approve_handle = nickname + '@' + domain_full
|
2021-12-14 13:59:42 +00:00
|
|
|
|
2022-03-12 10:41:36 +00:00
|
|
|
curr_session = session
|
|
|
|
curr_http_prefix = http_prefix
|
|
|
|
curr_domain = domain
|
|
|
|
curr_port = from_port
|
|
|
|
if onion_domain and domain_to_follow.endswith('.onion'):
|
|
|
|
curr_session = session_onion
|
|
|
|
curr_http_prefix = 'http'
|
|
|
|
curr_domain = onion_domain
|
|
|
|
curr_port = 80
|
|
|
|
elif i2p_domain and domain_to_follow.endswith('.i2p'):
|
|
|
|
curr_session = session_i2p
|
|
|
|
curr_http_prefix = 'http'
|
|
|
|
curr_domain = i2p_domain
|
|
|
|
curr_port = 80
|
|
|
|
|
2021-12-14 13:59:42 +00:00
|
|
|
# is the actor sending the request valid?
|
2022-03-12 10:41:36 +00:00
|
|
|
if not valid_sending_actor(curr_session, base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, message_json,
|
|
|
|
signing_priv_key_pem, debug, unit_test):
|
2022-01-02 19:57:50 +00:00
|
|
|
print('REJECT spam follow request ' + approve_handle)
|
2021-12-14 13:59:42 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# what is the followers policy?
|
2022-01-02 19:57:50 +00:00
|
|
|
if follow_approval_required(base_dir, nickname_to_follow,
|
|
|
|
domain_to_follow, debug, approve_handle):
|
2021-12-14 13:59:42 +00:00
|
|
|
print('Follow approval is required')
|
|
|
|
if domain.endswith('.onion'):
|
2021-12-29 21:55:09 +00:00
|
|
|
if no_of_follow_requests(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow,
|
|
|
|
nickname, domain, from_port,
|
2021-12-29 21:55:09 +00:00
|
|
|
'onion') > 5:
|
2021-12-14 13:59:42 +00:00
|
|
|
print('Too many follow requests from onion addresses')
|
|
|
|
return False
|
|
|
|
elif domain.endswith('.i2p'):
|
2021-12-29 21:55:09 +00:00
|
|
|
if no_of_follow_requests(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow,
|
|
|
|
nickname, domain, from_port,
|
2021-12-29 21:55:09 +00:00
|
|
|
'i2p') > 5:
|
2021-12-14 13:59:42 +00:00
|
|
|
print('Too many follow requests from i2p addresses')
|
|
|
|
return False
|
|
|
|
else:
|
2021-12-29 21:55:09 +00:00
|
|
|
if no_of_follow_requests(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow,
|
|
|
|
nickname, domain, from_port,
|
2021-12-29 21:55:09 +00:00
|
|
|
'') > 10:
|
2021-12-14 13:59:42 +00:00
|
|
|
print('Too many follow requests')
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Get the actor for the follower and add it to the cache.
|
|
|
|
# Getting their public key has the same result
|
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('Obtaining the following actor: ' + message_json['actor'])
|
2022-03-12 10:41:36 +00:00
|
|
|
if not get_person_pub_key(base_dir, curr_session,
|
|
|
|
message_json['actor'],
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, debug, project_version,
|
2022-03-12 10:41:36 +00:00
|
|
|
curr_http_prefix,
|
|
|
|
domain_to_follow, onion_domain,
|
2022-03-11 13:27:54 +00:00
|
|
|
i2p_domain, signing_priv_key_pem):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('Unable to obtain following actor: ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['actor'])
|
2021-12-14 13:59:42 +00:00
|
|
|
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = \
|
2021-12-26 17:53:07 +00:00
|
|
|
has_group_type(base_dir, message_json['actor'], person_cache)
|
2021-12-26 17:33:24 +00:00
|
|
|
if group_account and is_group_account(base_dir, nickname, domain):
|
2021-12-14 13:59:42 +00:00
|
|
|
print('Group cannot follow a group')
|
|
|
|
return False
|
|
|
|
|
|
|
|
print('Storing follow request for approval')
|
2021-12-29 21:55:09 +00:00
|
|
|
return store_follow_request(base_dir,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow, port,
|
|
|
|
nickname, domain, from_port,
|
2021-12-29 21:55:09 +00:00
|
|
|
message_json, debug, message_json['actor'],
|
|
|
|
group_account)
|
2021-12-14 13:59:42 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Follow request does not require approval ' + approve_handle)
|
2021-12-14 13:59:42 +00:00
|
|
|
# update the followers
|
2022-01-02 19:57:50 +00:00
|
|
|
account_to_be_followed = \
|
|
|
|
acct_dir(base_dir, nickname_to_follow, domain_to_follow)
|
|
|
|
if os.path.isdir(account_to_be_followed):
|
|
|
|
followers_filename = account_to_be_followed + '/followers.txt'
|
2021-12-14 13:59:42 +00:00
|
|
|
|
|
|
|
# for actors which don't follow the mastodon
|
|
|
|
# /users/ path convention store the full actor
|
2021-12-25 23:51:19 +00:00
|
|
|
if '/users/' not in message_json['actor']:
|
2022-01-02 19:57:50 +00:00
|
|
|
approve_handle = message_json['actor']
|
2021-12-14 13:59:42 +00:00
|
|
|
|
|
|
|
# Get the actor for the follower and add it to the cache.
|
|
|
|
# Getting their public key has the same result
|
|
|
|
if debug:
|
2021-12-25 23:51:19 +00:00
|
|
|
print('Obtaining the following actor: ' +
|
|
|
|
message_json['actor'])
|
2022-03-12 10:41:36 +00:00
|
|
|
if not get_person_pub_key(base_dir, curr_session,
|
|
|
|
message_json['actor'],
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, debug, project_version,
|
2022-03-12 10:41:36 +00:00
|
|
|
curr_http_prefix, domain_to_follow,
|
2022-03-11 13:27:54 +00:00
|
|
|
onion_domain, i2p_domain,
|
|
|
|
signing_priv_key_pem):
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
|
|
|
print('Unable to obtain following actor: ' +
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['actor'])
|
2021-12-14 13:59:42 +00:00
|
|
|
|
|
|
|
print('Updating followers file: ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
followers_filename + ' adding ' + approve_handle)
|
|
|
|
if os.path.isfile(followers_filename):
|
|
|
|
if approve_handle not in open(followers_filename).read():
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = \
|
2021-12-26 17:53:07 +00:00
|
|
|
has_group_type(base_dir,
|
|
|
|
message_json['actor'], person_cache)
|
2021-12-14 13:59:42 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print(approve_handle + ' / ' + message_json['actor'] +
|
2021-12-26 00:07:44 +00:00
|
|
|
' is Group: ' + str(group_account))
|
|
|
|
if group_account and \
|
2021-12-26 17:33:24 +00:00
|
|
|
is_group_account(base_dir, nickname, domain):
|
2021-12-14 13:59:42 +00:00
|
|
|
print('Group cannot follow a group')
|
|
|
|
return False
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(followers_filename, 'r+') as followers_file:
|
|
|
|
content = followers_file.read()
|
|
|
|
if approve_handle + '\n' not in content:
|
|
|
|
followers_file.seek(0, 0)
|
2021-12-26 00:07:44 +00:00
|
|
|
if not group_account:
|
2022-01-02 19:57:50 +00:00
|
|
|
followers_file.write(approve_handle +
|
|
|
|
'\n' + content)
|
2021-12-14 13:59:42 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
followers_file.write('!' + approve_handle +
|
|
|
|
'\n' + content)
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
2021-12-14 13:59:42 +00:00
|
|
|
print('WARN: ' +
|
|
|
|
'Failed to write entry to followers file ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
str(ex))
|
2021-12-14 13:59:42 +00:00
|
|
|
else:
|
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
with open(followers_filename, 'w+') as followers_file:
|
|
|
|
followers_file.write(approve_handle + '\n')
|
2021-12-14 13:59:42 +00:00
|
|
|
except OSError:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('EX: unable to write ' + followers_filename)
|
2021-12-14 13:59:42 +00:00
|
|
|
|
|
|
|
print('Beginning follow accept')
|
2022-03-12 10:41:36 +00:00
|
|
|
return followed_account_accepts(curr_session, base_dir, curr_http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
nickname_to_follow, domain_to_follow, port,
|
2022-03-12 10:41:36 +00:00
|
|
|
nickname, curr_domain, curr_port,
|
2021-12-29 21:55:09 +00:00
|
|
|
message_json['actor'], federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
message_json, send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers, person_cache,
|
|
|
|
debug, project_version, True,
|
|
|
|
signing_priv_key_pem)
|
2021-12-14 13:59:42 +00:00
|
|
|
|
|
|
|
|
2021-12-28 20:32:11 +00:00
|
|
|
def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
|
|
|
|
project_version: str,
|
|
|
|
base_dir: str, http_prefix: str,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads: [], post_log: [],
|
2021-12-28 20:32:11 +00:00
|
|
|
cached_webfingers: {}, person_cache: {}, queue: [],
|
|
|
|
domain: str,
|
|
|
|
onion_domain: str, i2p_domain: str,
|
|
|
|
port: int, proxy_type: str,
|
|
|
|
federation_list: [], max_replies: int,
|
|
|
|
domain_max_posts_per_day: int,
|
|
|
|
account_max_posts_per_day: int,
|
|
|
|
allow_deletion: bool, debug: bool, max_mentions: int,
|
|
|
|
max_emoji: int, translate: {}, unit_test: bool,
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
show_published_date_only: bool,
|
|
|
|
max_followers: int,
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
peertube_instances: [],
|
|
|
|
verify_all_signatures: bool,
|
|
|
|
theme_name: str, system_language: str,
|
|
|
|
max_like_count: int, signing_priv_key_pem: str,
|
|
|
|
default_reply_interval_hrs: int,
|
|
|
|
cw_lists: {}) -> None:
|
2020-08-02 09:51:20 +00:00
|
|
|
"""Processes received items and moves them to the appropriate
|
|
|
|
directories
|
2019-07-04 12:23:53 +00:00
|
|
|
"""
|
2022-03-11 13:27:54 +00:00
|
|
|
print('Starting new session when starting inbox queue')
|
2022-01-02 19:57:50 +00:00
|
|
|
curr_session_time = int(time.time())
|
2022-03-11 22:13:22 +00:00
|
|
|
session_last_update = 0
|
2021-12-28 16:56:57 +00:00
|
|
|
session = create_session(proxy_type)
|
2022-03-11 22:13:22 +00:00
|
|
|
if session:
|
|
|
|
session_last_update = curr_session_time
|
2022-03-11 13:27:54 +00:00
|
|
|
|
|
|
|
# is this is a clearnet instance then optionally start sessions
|
|
|
|
# for onion and i2p domains
|
|
|
|
session_onion = None
|
|
|
|
session_i2p = None
|
2022-03-11 22:13:22 +00:00
|
|
|
session_last_update_onion = 0
|
|
|
|
session_last_update_i2p = 0
|
2022-03-11 13:27:54 +00:00
|
|
|
if proxy_type != 'tor' and onion_domain:
|
|
|
|
print('Starting onion session when starting inbox queue')
|
|
|
|
session_onion = create_session('tor')
|
2022-03-11 22:13:22 +00:00
|
|
|
if session_onion:
|
|
|
|
session_onion = curr_session_time
|
2022-03-11 13:27:54 +00:00
|
|
|
if proxy_type != 'i2p' and i2p_domain:
|
|
|
|
print('Starting i2p session when starting inbox queue')
|
|
|
|
session_i2p = create_session('i2p')
|
2022-03-11 22:13:22 +00:00
|
|
|
if session_i2p:
|
|
|
|
session_i2p = curr_session_time
|
2022-03-11 13:27:54 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
inbox_handle = 'inbox@' + domain
|
2019-07-04 12:23:53 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Inbox queue running')
|
|
|
|
|
2019-07-12 21:09:23 +00:00
|
|
|
# if queue processing was interrupted (eg server crash)
|
|
|
|
# then this loads any outstanding items back into the queue
|
2021-12-29 21:55:09 +00:00
|
|
|
_restore_queue_items(base_dir, queue)
|
2019-07-15 10:22:19 +00:00
|
|
|
|
2020-03-25 10:21:25 +00:00
|
|
|
# keep track of numbers of incoming posts per day
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_last_update_daily = int(time.time())
|
|
|
|
quotas_daily = {
|
2019-07-15 10:22:19 +00:00
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_last_update_per_min = int(time.time())
|
|
|
|
quotas_per_min = {
|
2020-03-25 10:36:37 +00:00
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2019-08-05 22:38:38 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
heart_beat_ctr = 0
|
|
|
|
queue_restore_ctr = 0
|
2019-09-03 08:46:26 +00:00
|
|
|
|
2021-02-24 11:43:48 +00:00
|
|
|
# time when the last DM bounce message was sent
|
|
|
|
# This is in a list so that it can be changed by reference
|
2021-12-29 21:55:09 +00:00
|
|
|
# within _bounce_dm
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message = [int(time.time())]
|
2021-02-24 11:43:48 +00:00
|
|
|
|
2021-06-05 13:36:03 +00:00
|
|
|
# how long it takes for broch mode to lapse
|
2022-01-02 19:57:50 +00:00
|
|
|
broch_lapse_days = random.randrange(7, 14)
|
2021-06-05 13:36:03 +00:00
|
|
|
|
2019-07-04 12:23:53 +00:00
|
|
|
while True:
|
2020-04-16 18:25:59 +00:00
|
|
|
time.sleep(1)
|
2019-09-03 08:46:26 +00:00
|
|
|
|
|
|
|
# heartbeat to monitor whether the inbox queue is running
|
2022-01-02 19:57:50 +00:00
|
|
|
heart_beat_ctr += 1
|
|
|
|
if heart_beat_ctr >= 10:
|
2021-02-15 22:26:25 +00:00
|
|
|
# turn off broch mode after it has timed out
|
2022-01-02 19:57:50 +00:00
|
|
|
if broch_modeLapses(base_dir, broch_lapse_days):
|
|
|
|
broch_lapse_days = random.randrange(7, 14)
|
2020-04-16 10:14:05 +00:00
|
|
|
print('>>> Heartbeat Q:' + str(len(queue)) + ' ' +
|
2020-04-03 16:27:34 +00:00
|
|
|
'{:%F %T}'.format(datetime.datetime.now()))
|
2022-01-02 19:57:50 +00:00
|
|
|
heart_beat_ctr = 0
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
if len(queue) == 0:
|
2019-09-03 09:11:33 +00:00
|
|
|
# restore any remaining queue items
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_restore_ctr += 1
|
|
|
|
if queue_restore_ctr >= 30:
|
|
|
|
queue_restore_ctr = 0
|
2021-12-29 21:55:09 +00:00
|
|
|
_restore_queue_items(base_dir, queue)
|
2020-04-16 09:49:57 +00:00
|
|
|
continue
|
2020-04-16 10:14:05 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# oldest item first
|
|
|
|
queue.sort()
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_filename = queue[0]
|
|
|
|
if not os.path.isfile(queue_filename):
|
2020-04-16 09:49:57 +00:00
|
|
|
print("Queue: queue item rejected because it has no file: " +
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_filename)
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-04 12:23:53 +00:00
|
|
|
|
2021-03-14 19:22:58 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Loading queue item ' + queue_filename)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# Load the queue json
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json = load_json(queue_filename, 1)
|
|
|
|
if not queue_json:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('Queue: run_inbox_queue failed to load inbox queue item ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_filename)
|
2020-04-16 09:49:57 +00:00
|
|
|
# Assume that the file is probably corrupt/unreadable
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
# delete the queue file
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(queue_filename):
|
2020-04-16 09:49:57 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 1 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
continue
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2022-03-11 13:27:54 +00:00
|
|
|
curr_time = int(time.time())
|
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# clear the daily quotas for maximum numbers of received posts
|
2022-01-02 19:57:50 +00:00
|
|
|
if curr_time - quotas_last_update_daily > 60 * 60 * 24:
|
|
|
|
quotas_daily = {
|
2020-04-16 09:49:57 +00:00
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_last_update_daily = curr_time
|
2020-04-16 09:49:57 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if curr_time - quotas_last_update_per_min > 60:
|
2021-01-05 10:48:22 +00:00
|
|
|
# clear the per minute quotas for maximum numbers of received posts
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_per_min = {
|
2020-04-16 09:49:57 +00:00
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2021-01-05 10:48:22 +00:00
|
|
|
# also check if the json signature enforcement has changed
|
2022-01-02 19:57:50 +00:00
|
|
|
verify_all_sigs = get_config_param(base_dir, "verifyAllSignatures")
|
|
|
|
if verify_all_sigs is not None:
|
|
|
|
verify_all_signatures = verify_all_sigs
|
2021-01-05 10:48:22 +00:00
|
|
|
# change the last time that this was done
|
2022-01-02 19:57:50 +00:00
|
|
|
quotas_last_update_per_min = curr_time
|
2020-04-16 09:49:57 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if _inbox_quota_exceeded(queue, queue_filename,
|
|
|
|
queue_json, quotas_daily, quotas_per_min,
|
2021-12-29 21:55:09 +00:00
|
|
|
domain_max_posts_per_day,
|
|
|
|
account_max_posts_per_day, debug):
|
2021-06-07 13:48:29 +00:00
|
|
|
continue
|
2019-07-15 10:22:19 +00:00
|
|
|
|
2022-03-11 22:13:22 +00:00
|
|
|
# recreate the session periodically
|
|
|
|
if not session or curr_time - session_last_update > 21600:
|
|
|
|
print('Regenerating inbox queue session at 6hr interval')
|
|
|
|
session = create_session(proxy_type)
|
|
|
|
if session:
|
|
|
|
session_last_update = curr_time
|
|
|
|
else:
|
|
|
|
print('WARN: inbox session not created')
|
|
|
|
continue
|
|
|
|
if onion_domain:
|
|
|
|
if not session_onion or \
|
|
|
|
curr_time - session_last_update_onion > 21600:
|
|
|
|
print('Regenerating inbox queue onion session at 6hr interval')
|
|
|
|
session_onion = create_session('tor')
|
|
|
|
if session_onion:
|
|
|
|
session_last_update_onion = curr_time
|
|
|
|
else:
|
|
|
|
print('WARN: inbox onion session not created')
|
|
|
|
continue
|
|
|
|
if i2p_domain:
|
|
|
|
if not session_i2p or curr_time - session_last_update_i2p > 21600:
|
|
|
|
print('Regenerating inbox queue i2p session at 6hr interval')
|
|
|
|
session_i2p = create_session('i2p')
|
|
|
|
if session_i2p:
|
|
|
|
session_last_update_i2p = curr_time
|
|
|
|
else:
|
|
|
|
print('WARN: inbox i2p session not created')
|
|
|
|
continue
|
|
|
|
|
2022-03-11 13:27:54 +00:00
|
|
|
curr_session = session
|
|
|
|
curr_proxy_type = proxy_type
|
|
|
|
if queue_json.get('actor'):
|
|
|
|
if isinstance(queue_json['actor'], str):
|
|
|
|
sender_domain, _ = get_domain_from_actor(queue_json['actor'])
|
|
|
|
if sender_domain.endswith('.onion') and \
|
|
|
|
session_onion and proxy_type != 'tor':
|
|
|
|
curr_proxy_type = 'tor'
|
|
|
|
curr_session = session_onion
|
|
|
|
elif (sender_domain.endswith('.i2p') and
|
|
|
|
session_i2p and proxy_type != 'i2p'):
|
|
|
|
curr_proxy_type = 'i2p'
|
|
|
|
curr_session = session_i2p
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if debug and queue_json.get('actor'):
|
|
|
|
print('Obtaining public key for actor ' + queue_json['actor'])
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# Try a few times to obtain the public key
|
2022-01-02 19:57:50 +00:00
|
|
|
pub_key = None
|
|
|
|
key_id = None
|
2020-04-16 09:49:57 +00:00
|
|
|
for tries in range(8):
|
2022-01-02 19:57:50 +00:00
|
|
|
key_id = None
|
|
|
|
signature_params = \
|
|
|
|
queue_json['httpHeaders']['signature'].split(',')
|
|
|
|
for signature_item in signature_params:
|
|
|
|
if signature_item.startswith('keyId='):
|
|
|
|
if '"' in signature_item:
|
|
|
|
key_id = signature_item.split('"')[1]
|
2020-04-16 09:49:57 +00:00
|
|
|
break
|
2022-01-02 19:57:50 +00:00
|
|
|
if not key_id:
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Queue: No keyId in signature: ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['httpHeaders']['signature'])
|
|
|
|
pub_key = None
|
2020-04-16 09:49:57 +00:00
|
|
|
break
|
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
pub_key = \
|
2022-03-11 13:27:54 +00:00
|
|
|
get_person_pub_key(base_dir, curr_session, key_id,
|
2021-12-29 21:55:09 +00:00
|
|
|
person_cache, debug,
|
|
|
|
project_version, http_prefix,
|
2022-03-11 13:27:54 +00:00
|
|
|
domain, onion_domain, i2p_domain,
|
|
|
|
signing_priv_key_pem)
|
2022-01-02 19:57:50 +00:00
|
|
|
if pub_key:
|
2019-07-04 17:31:41 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: public key: ' + str(pub_key))
|
2020-04-16 09:49:57 +00:00
|
|
|
break
|
2019-07-04 17:31:41 +00:00
|
|
|
|
2019-08-15 08:36:49 +00:00
|
|
|
if debug:
|
2020-04-16 09:49:57 +00:00
|
|
|
print('DEBUG: Retry ' + str(tries+1) +
|
2022-01-02 19:57:50 +00:00
|
|
|
' obtaining public key for ' + key_id)
|
2020-04-16 18:25:59 +00:00
|
|
|
time.sleep(1)
|
2019-07-04 12:23:53 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if not pub_key:
|
2021-03-14 19:39:00 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Queue: public key could not be obtained from ' + key_id)
|
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 2 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
|
2021-01-03 09:44:33 +00:00
|
|
|
# check the http header signature
|
2020-04-16 09:49:57 +00:00
|
|
|
if debug:
|
2021-01-03 09:44:33 +00:00
|
|
|
print('DEBUG: checking http header signature')
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(queue_json['httpHeaders'])
|
|
|
|
post_str = json.dumps(queue_json['post'])
|
|
|
|
http_signature_failed = False
|
|
|
|
if not verify_post_headers(http_prefix, pub_key,
|
|
|
|
queue_json['httpHeaders'],
|
|
|
|
queue_json['path'], False,
|
|
|
|
queue_json['digest'],
|
|
|
|
post_str, debug):
|
|
|
|
http_signature_failed = True
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Queue: Header signature check failed')
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(queue_json['httpHeaders'])
|
2021-02-14 15:22:03 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: http header signature check success')
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-01-04 19:02:24 +00:00
|
|
|
# check if a json signature exists on this post
|
2022-01-02 19:57:50 +00:00
|
|
|
has_json_signature, jwebsig_type = \
|
|
|
|
_check_json_signature(base_dir, queue_json)
|
2021-01-05 10:29:37 +00:00
|
|
|
|
2021-01-05 10:54:50 +00:00
|
|
|
# strict enforcement of json signatures
|
2022-01-02 19:57:50 +00:00
|
|
|
if not has_json_signature:
|
|
|
|
if http_signature_failed:
|
|
|
|
if jwebsig_type:
|
2021-02-14 15:45:42 +00:00
|
|
|
print('Queue: Header signature check failed and does ' +
|
|
|
|
'not have a recognised jsonld signature type ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
jwebsig_type)
|
2021-02-14 15:45:42 +00:00
|
|
|
else:
|
|
|
|
print('Queue: Header signature check failed and ' +
|
2021-02-14 20:30:01 +00:00
|
|
|
'does not have jsonld signature')
|
2021-02-14 20:41:11 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(queue_json['httpHeaders'])
|
2021-01-05 10:29:37 +00:00
|
|
|
|
2021-12-25 18:40:32 +00:00
|
|
|
if verify_all_signatures:
|
2022-01-02 19:57:50 +00:00
|
|
|
original_json = queue_json['original']
|
2021-02-14 15:22:03 +00:00
|
|
|
print('Queue: inbox post does not have a jsonld signature ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
key_id + ' ' + str(original_json))
|
2021-02-14 15:22:03 +00:00
|
|
|
|
2022-01-02 19:57:50 +00:00
|
|
|
if http_signature_failed or verify_all_signatures:
|
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 3 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2021-01-05 20:55:11 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2021-02-14 15:22:03 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
if http_signature_failed or verify_all_signatures:
|
2021-02-14 15:22:03 +00:00
|
|
|
# use the original json message received, not one which
|
|
|
|
# may have been modified along the way
|
2022-01-02 19:57:50 +00:00
|
|
|
original_json = queue_json['original']
|
|
|
|
if not verify_json_signature(original_json, pub_key):
|
2021-02-14 15:22:03 +00:00
|
|
|
if debug:
|
|
|
|
print('WARN: jsonld inbox signature check failed ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
key_id + ' ' + pub_key + ' ' +
|
|
|
|
str(original_json))
|
2021-02-14 15:22:03 +00:00
|
|
|
else:
|
|
|
|
print('WARN: jsonld inbox signature check failed ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
key_id)
|
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 4 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2021-02-14 15:22:03 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
if http_signature_failed:
|
2021-02-14 15:22:03 +00:00
|
|
|
print('jsonld inbox signature check success ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
'via relay ' + key_id)
|
2021-02-14 15:22:03 +00:00
|
|
|
else:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('jsonld inbox signature check success ' + key_id)
|
2021-01-03 14:25:20 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# set the id to the same as the post filename
|
|
|
|
# This makes the filename and the id consistent
|
2022-01-02 19:57:50 +00:00
|
|
|
# if queue_json['post'].get('id'):
|
|
|
|
# queue_json['post']['id'] = queue_json['id']
|
2020-04-16 09:49:57 +00:00
|
|
|
|
2022-03-11 13:27:54 +00:00
|
|
|
if _receive_undo(curr_session,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['post'],
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list,
|
|
|
|
debug):
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Queue: Undo accepted from ' + key_id)
|
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 5 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-17 10:34:00 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: checking for follow requests')
|
2022-03-12 10:41:36 +00:00
|
|
|
if _receive_follow_request(curr_session, session_onion, session_i2p,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['post'],
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list,
|
|
|
|
debug, project_version,
|
2022-03-11 13:27:54 +00:00
|
|
|
max_followers, onion_domain, i2p_domain,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem, unit_test):
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 6 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Queue: Follow activity for ' + key_id +
|
2020-06-28 19:04:43 +00:00
|
|
|
' removed from queue')
|
2020-04-16 09:49:57 +00:00
|
|
|
continue
|
|
|
|
else:
|
2019-08-15 16:05:28 +00:00
|
|
|
if debug:
|
2020-04-16 09:49:57 +00:00
|
|
|
print('DEBUG: No follow requests')
|
|
|
|
|
2022-03-11 13:27:54 +00:00
|
|
|
if receive_accept_reject(curr_session,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix, domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers, person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['post'],
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list, debug):
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Queue: Accept/Reject received from ' + key_id)
|
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 7 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-06 15:17:21 +00:00
|
|
|
|
2022-03-11 13:27:54 +00:00
|
|
|
if _receive_update_activity(recent_posts_cache, curr_session,
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
|
|
|
domain, port,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['post'],
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['postNickname'],
|
2021-12-29 21:55:09 +00:00
|
|
|
debug):
|
2021-03-14 19:46:46 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('Queue: Update accepted from ' + key_id)
|
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 8 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get recipients list
|
2022-01-02 19:57:50 +00:00
|
|
|
recipients_dict, recipients_dict_followers = \
|
|
|
|
_inbox_post_recipients(base_dir, queue_json['post'],
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix, domain, port, debug)
|
2022-01-02 19:57:50 +00:00
|
|
|
if len(recipients_dict.items()) == 0 and \
|
|
|
|
len(recipients_dict_followers.items()) == 0:
|
2021-03-14 19:22:58 +00:00
|
|
|
if debug:
|
|
|
|
print('Queue: no recipients were resolved ' +
|
|
|
|
'for post arriving in inbox')
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 9 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-09 14:20:23 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# if there are only a small number of followers then
|
|
|
|
# process them as if they were specifically
|
|
|
|
# addresses to particular accounts
|
2022-01-02 19:57:50 +00:00
|
|
|
no_of_follow_items = len(recipients_dict_followers.items())
|
|
|
|
if no_of_follow_items > 0:
|
2020-04-16 09:49:57 +00:00
|
|
|
# always deliver to individual inboxes
|
2022-01-02 19:57:50 +00:00
|
|
|
if no_of_follow_items < 999999:
|
2020-04-16 09:49:57 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
print('DEBUG: moving ' + str(no_of_follow_items) +
|
2020-04-16 09:49:57 +00:00
|
|
|
' inbox posts addressed to followers')
|
2022-01-02 19:57:50 +00:00
|
|
|
for handle, post_item in recipients_dict_followers.items():
|
|
|
|
recipients_dict[handle] = post_item
|
|
|
|
recipients_dict_followers = {}
|
|
|
|
# recipients_list = [recipients_dict, recipients_dict_followers]
|
2020-04-16 09:49:57 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('*************************************')
|
|
|
|
print('Resolved recipients list:')
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(recipients_dict)
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Resolved followers list:')
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(recipients_dict_followers)
|
2020-04-16 09:49:57 +00:00
|
|
|
print('*************************************')
|
|
|
|
|
|
|
|
# Copy any posts addressed to followers into the shared inbox
|
|
|
|
# this avoid copying file multiple times to potentially many
|
|
|
|
# individual inboxes
|
2022-01-02 19:57:50 +00:00
|
|
|
if len(recipients_dict_followers) > 0:
|
|
|
|
shared_inbox_post_filename = \
|
|
|
|
queue_json['destination'].replace(inbox_handle, inbox_handle)
|
|
|
|
if not os.path.isfile(shared_inbox_post_filename):
|
|
|
|
save_json(queue_json['post'], shared_inbox_post_filename)
|
2020-04-16 09:49:57 +00:00
|
|
|
|
2021-12-30 13:56:38 +00:00
|
|
|
lists_enabled = get_config_param(base_dir, "listsEnabled")
|
|
|
|
content_license_url = get_config_param(base_dir, "contentLicenseUrl")
|
2021-10-21 19:00:25 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# for posts addressed to specific accounts
|
2022-01-02 19:57:50 +00:00
|
|
|
for handle, _ in recipients_dict.items():
|
2020-04-16 09:49:57 +00:00
|
|
|
destination = \
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['destination'].replace(inbox_handle, handle)
|
2022-02-25 15:04:28 +00:00
|
|
|
languages_understood = []
|
2021-12-29 21:55:09 +00:00
|
|
|
_inbox_after_initial(recent_posts_cache,
|
|
|
|
max_recent_posts,
|
2022-03-11 22:13:22 +00:00
|
|
|
session, session_onion, session_i2p,
|
|
|
|
key_id, handle,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_json['post'],
|
2021-12-29 21:55:09 +00:00
|
|
|
base_dir, http_prefix,
|
2022-01-02 19:57:50 +00:00
|
|
|
send_threads, post_log,
|
2021-12-29 21:55:09 +00:00
|
|
|
cached_webfingers,
|
|
|
|
person_cache, queue,
|
|
|
|
domain,
|
|
|
|
onion_domain, i2p_domain,
|
2022-03-11 13:27:54 +00:00
|
|
|
port, curr_proxy_type,
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list,
|
|
|
|
debug,
|
2022-01-02 19:57:50 +00:00
|
|
|
queue_filename, destination,
|
2021-12-29 21:55:09 +00:00
|
|
|
max_replies, allow_deletion,
|
|
|
|
max_mentions, max_emoji,
|
|
|
|
translate, unit_test,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
show_published_date_only,
|
|
|
|
allow_local_network_access,
|
|
|
|
peertube_instances,
|
2022-01-02 19:57:50 +00:00
|
|
|
last_bounce_message,
|
2021-12-29 21:55:09 +00:00
|
|
|
theme_name, system_language,
|
|
|
|
max_like_count,
|
|
|
|
signing_priv_key_pem,
|
|
|
|
default_reply_interval_hrs,
|
|
|
|
cw_lists, lists_enabled,
|
2022-01-28 10:54:53 +00:00
|
|
|
content_license_url,
|
|
|
|
languages_understood)
|
2020-09-27 18:35:35 +00:00
|
|
|
if debug:
|
2022-01-02 19:57:50 +00:00
|
|
|
pprint(queue_json['post'])
|
2021-03-14 19:22:58 +00:00
|
|
|
print('Queue: Queue post accepted')
|
2022-01-02 19:57:50 +00:00
|
|
|
if os.path.isfile(queue_filename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
2022-01-02 19:57:50 +00:00
|
|
|
os.remove(queue_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-28 20:32:11 +00:00
|
|
|
print('EX: run_inbox_queue 10 unable to delete ' +
|
2022-01-02 19:57:50 +00:00
|
|
|
str(queue_filename))
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|