epicyon/inbox.py

4565 lines
191 KiB
Python
Raw Normal View History

2020-04-03 16:27:34 +00:00
__filename__ = "inbox.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
2021-01-26 10:07:42 +00:00
__version__ = "1.2.0"
2020-04-03 16:27:34 +00:00
__maintainer__ = "Bob Mottram"
2021-09-10 16:14:50 +00:00
__email__ = "bob@libreserver.org"
2020-04-03 16:27:34 +00:00
__status__ = "Production"
2021-06-15 15:08:12 +00:00
__module_group__ = "Timeline"
2019-06-28 21:59:54 +00:00
import json
import os
2019-06-29 10:08:59 +00:00
import datetime
2019-07-04 12:23:53 +00:00
import time
import random
2021-12-29 21:55:09 +00:00
from linked_data_sig import verify_json_signature
from languages import understood_post_language
from like import update_likes_collection
from reaction import update_reaction_collection
from reaction import valid_emoji_content
2021-12-27 18:28:26 +00:00
from utils import domain_permitted
2021-12-26 17:33:24 +00:00
from utils import is_group_account
2021-12-27 15:41:04 +00:00
from utils import is_system_account
2021-12-26 19:15:36 +00:00
from utils import invalid_ciphertext
2021-12-27 15:43:22 +00:00
from utils import remove_html
2021-12-28 14:01:37 +00:00
from utils import file_last_modified
2021-12-26 17:12:07 +00:00
from utils import has_object_string
2021-12-26 15:54:46 +00:00
from utils import has_object_string_object
2021-12-28 10:25:50 +00:00
from utils import get_reply_interval_hours
2021-12-28 12:15:46 +00:00
from utils import can_reply_to
2021-12-26 12:24:40 +00:00
from utils import get_user_paths
2021-12-26 11:29:40 +00:00
from utils import get_base_content_from_post
2021-12-26 12:02:29 +00:00
from utils import acct_dir
2021-12-26 18:17:37 +00:00
from utils import remove_domain_port
2021-12-26 18:14:21 +00:00
from utils import get_port_from_domain
2021-12-26 10:57:03 +00:00
from utils import has_object_dict
2021-12-26 19:09:04 +00:00
from utils import dm_allowed_from_domain
2021-12-26 20:43:03 +00:00
from utils import is_recent_post
2021-12-26 14:08:58 +00:00
from utils import get_config_param
2021-12-26 12:19:00 +00:00
from utils import has_users_path
2021-12-26 12:31:47 +00:00
from utils import valid_post_date
2021-12-26 12:45:03 +00:00
from utils import get_full_domain
2021-12-27 11:20:57 +00:00
from utils import remove_id_ending
2021-12-27 17:20:01 +00:00
from utils import get_protocol_prefixes
2021-12-28 13:49:44 +00:00
from utils import is_blog_post
2021-12-27 17:35:58 +00:00
from utils import remove_avatar_from_cache
2021-12-28 14:41:10 +00:00
from utils import is_public_post
2021-12-26 23:41:34 +00:00
from utils import get_cached_post_filename
2021-12-27 11:05:24 +00:00
from utils import remove_post_from_cache
2021-12-27 20:47:05 +00:00
from utils import url_permitted
2021-12-27 18:00:51 +00:00
from utils import create_inbox_queue_dir
2021-12-27 17:42:35 +00:00
from utils import get_status_number
2021-12-27 19:05:25 +00:00
from utils import get_domain_from_actor
2021-12-27 22:19:18 +00:00
from utils import get_nickname_from_actor
2021-12-26 20:36:08 +00:00
from utils import locate_post
2021-12-28 14:55:45 +00:00
from utils import delete_post
2021-12-28 13:12:10 +00:00
from utils import remove_moderation_post_from_index
2021-12-26 15:13:34 +00:00
from utils import load_json
2021-12-26 14:47:21 +00:00
from utils import save_json
2021-12-27 23:23:07 +00:00
from utils import undo_likes_collection_entry
2021-12-27 23:02:50 +00:00
from utils import undo_reaction_collection_entry
2021-12-26 17:53:07 +00:00
from utils import has_group_type
2021-12-26 10:19:59 +00:00
from utils import local_actor_url
2021-12-26 17:12:07 +00:00
from utils import has_object_stringType
2021-12-29 21:55:09 +00:00
from categories import get_hashtag_categories
from categories import set_hashtag_category
from httpsig import get_digest_algorithm_from_headers
from httpsig import verify_post_headers
2021-12-28 16:56:57 +00:00
from session import create_session
2021-12-28 20:32:11 +00:00
from follow import follower_approval_active
from follow import is_following_actor
2021-12-29 21:55:09 +00:00
from follow import get_followers_of_actor
from follow import unfollower_of_account
from follow import is_follower_of_person
from follow import followed_account_accepts
from follow import store_follow_request
from follow import no_of_follow_requests
from follow import get_no_of_followers
from follow import follow_approval_required
2019-07-04 14:36:29 +00:00
from pprint import pprint
2021-12-29 21:55:09 +00:00
from cache import store_person_in_cache
from cache import get_person_pub_key
from acceptreject import receive_accept_reject
from bookmarks import update_bookmarks_collection
from bookmarks import undo_bookmarks_collection_entry
from blocking import is_blocked
2021-12-28 21:55:38 +00:00
from blocking import is_blocked_domain
2021-12-25 18:38:19 +00:00
from blocking import broch_modeLapses
2021-12-29 21:55:09 +00:00
from filters import is_filtered
2021-12-26 23:41:34 +00:00
from utils import update_announce_collection
2021-12-27 10:55:48 +00:00
from utils import undo_announce_collection_entry
2021-12-27 21:42:08 +00:00
from utils import dangerous_markup
2021-12-26 20:12:18 +00:00
from utils import is_dm
2021-12-26 19:36:40 +00:00
from utils import is_reply
2021-12-26 17:15:04 +00:00
from utils import has_actor
2021-12-29 21:55:09 +00:00
from httpsig import message_content_digest
from posts import edited_post_filename
2021-12-28 18:13:52 +00:00
from posts import save_post_to_box
2021-12-29 21:55:09 +00:00
from posts import is_create_inside_announce
2021-12-28 19:33:29 +00:00
from posts import create_direct_message_post
2021-12-29 21:55:09 +00:00
from posts import valid_content_warning
from posts import download_announce
from posts import is_muted_conv
from posts import is_image_media
from posts import send_signed_json
from posts import send_to_followers_thread
from webapp_post import individual_post_as_html
from question import question_update_votes
2021-12-28 21:36:27 +00:00
from media import replace_you_tube
from media import replace_twitter
2021-12-29 21:55:09 +00:00
from git import is_git_patch
from git import receive_git_patch
from followingCalendar import receiving_calendar_events
from happening import save_event_post
from delete import remove_old_hashtags
from categories import guess_hashtag_category
from context import has_valid_context
from speaker import update_speaker
from announce import is_self_announce
from announce import create_announce
from notifyOnPost import notify_when_person_posts
from conversation import update_conversation
from content import valid_hash_tag
from webapp_hashtagswarm import html_hash_tag_swarm
from person import valid_sending_actor
def _store_last_post_id(base_dir: str, nickname: str, domain: str,
post_json_object: {}) -> None:
"""Stores the id of the last post made by an actor
2021-10-18 19:42:31 +00:00
When a new post arrives this allows it to be compared against the last
to see if it is an edited post.
It would be great if edited posts contained a back reference id to the
source but we don't live in that ideal world.
"""
2021-12-26 19:47:06 +00:00
actor = post_id = None
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('attributedTo'):
if isinstance(post_json_object['object']['attributedTo'], str):
actor = post_json_object['object']['attributedTo']
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['object']['id'])
if not actor:
2021-12-25 22:09:19 +00:00
actor = post_json_object['actor']
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['id'])
if not actor:
return
2021-12-26 12:02:29 +00:00
lastpostDir = acct_dir(base_dir, nickname, domain) + '/lastpost'
if not os.path.isdir(lastpostDir):
os.mkdir(lastpostDir)
actorFilename = lastpostDir + '/' + actor.replace('/', '#')
try:
with open(actorFilename, 'w+') as fp:
2021-12-26 19:47:06 +00:00
fp.write(post_id)
2021-11-25 21:18:53 +00:00
except OSError:
2021-10-29 18:48:15 +00:00
print('EX: Unable to write last post id to ' + actorFilename)
2021-12-29 21:55:09 +00:00
def _update_cached_hashtag_swarm(base_dir: str, nickname: str, domain: str,
http_prefix: str, domain_full: str,
translate: {}) -> bool:
2021-10-20 13:33:34 +00:00
"""Updates the hashtag swarm stored as a file
"""
cachedHashtagSwarmFilename = \
2021-12-26 12:02:29 +00:00
acct_dir(base_dir, nickname, domain) + '/.hashtagSwarm'
2021-10-20 13:33:34 +00:00
saveSwarm = True
if os.path.isfile(cachedHashtagSwarmFilename):
2021-12-28 14:01:37 +00:00
lastModified = file_last_modified(cachedHashtagSwarmFilename)
2021-10-20 13:33:34 +00:00
modifiedDate = None
try:
modifiedDate = \
2021-10-20 14:35:56 +00:00
datetime.datetime.strptime(lastModified, "%Y-%m-%dT%H:%M:%SZ")
2021-10-20 13:33:34 +00:00
except BaseException:
2021-10-29 18:48:15 +00:00
print('EX: unable to parse last modified cache date ' +
2021-10-20 13:33:34 +00:00
str(lastModified))
pass
if modifiedDate:
currDate = datetime.datetime.utcnow()
2021-12-31 21:18:12 +00:00
time_diff = currDate - modifiedDate
diffMins = int(time_diff.total_seconds() / 60)
2021-10-20 13:33:34 +00:00
if diffMins < 10:
# was saved recently, so don't save again
# This avoids too much disk I/O
saveSwarm = False
else:
print('Updating cached hashtag swarm, last changed ' +
str(diffMins) + ' minutes ago')
2021-10-20 14:53:00 +00:00
else:
print('WARN: no modified date for ' + str(lastModified))
2021-10-20 13:33:34 +00:00
if saveSwarm:
2021-12-26 10:19:59 +00:00
actor = local_actor_url(http_prefix, nickname, domain_full)
2021-12-29 21:55:09 +00:00
newSwarmStr = html_hash_tag_swarm(base_dir, actor, translate)
2021-10-20 13:33:34 +00:00
if newSwarmStr:
try:
with open(cachedHashtagSwarmFilename, 'w+') as fp:
fp.write(newSwarmStr)
return True
2021-11-25 21:18:53 +00:00
except OSError:
2021-10-29 18:48:15 +00:00
print('EX: unable to write cached hashtag swarm ' +
2021-10-20 13:33:34 +00:00
cachedHashtagSwarmFilename)
return False
2021-12-29 21:55:09 +00:00
def store_hash_tags(base_dir: str, nickname: str, domain: str,
http_prefix: str, domain_full: str,
post_json_object: {}, translate: {}) -> None:
2019-12-12 17:34:31 +00:00
"""Extracts hashtags from an incoming post and updates the
relevant tags files.
"""
2021-12-28 14:41:10 +00:00
if not is_public_post(post_json_object):
2019-12-12 17:34:31 +00:00
return
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2019-12-12 17:34:31 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('tag'):
2019-12-12 17:34:31 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object.get('id'):
2019-12-12 17:34:31 +00:00
return
2021-12-25 22:09:19 +00:00
if not isinstance(post_json_object['object']['tag'], list):
2019-12-12 17:34:31 +00:00
return
2021-12-25 16:17:53 +00:00
tagsDir = base_dir + '/tags'
# add tags directory if it doesn't exist
if not os.path.isdir(tagsDir):
print('Creating tags directory')
os.mkdir(tagsDir)
2021-12-29 21:55:09 +00:00
hashtagCategories = get_hashtag_categories(base_dir)
2020-12-05 11:11:32 +00:00
2021-10-20 13:33:34 +00:00
hashtagsCtr = 0
2021-12-25 22:09:19 +00:00
for tag in post_json_object['object']['tag']:
2019-12-12 17:34:31 +00:00
if not tag.get('type'):
continue
if not isinstance(tag['type'], str):
continue
2020-04-03 16:27:34 +00:00
if tag['type'] != 'Hashtag':
2019-12-12 17:34:31 +00:00
continue
if not tag.get('name'):
continue
2020-04-03 16:27:34 +00:00
tagName = tag['name'].replace('#', '').strip()
2021-12-29 21:55:09 +00:00
if not valid_hash_tag(tagName):
2021-09-15 19:48:01 +00:00
continue
2020-04-03 16:27:34 +00:00
tagsFilename = tagsDir + '/' + tagName + '.txt'
2021-12-27 11:20:57 +00:00
postUrl = remove_id_ending(post_json_object['id'])
2020-04-03 16:27:34 +00:00
postUrl = postUrl.replace('/', '#')
daysDiff = datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)
daysSinceEpoch = daysDiff.days
tagline = str(daysSinceEpoch) + ' ' + nickname + ' ' + postUrl + '\n'
2021-10-20 13:33:34 +00:00
hashtagsCtr += 1
2019-12-12 17:34:31 +00:00
if not os.path.isfile(tagsFilename):
2021-11-25 21:18:53 +00:00
try:
with open(tagsFilename, 'w+') as tagsFile:
tagsFile.write(tagline)
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + tagsFilename)
2019-12-12 17:34:31 +00:00
else:
if postUrl not in open(tagsFilename).read():
2019-12-12 17:47:16 +00:00
try:
with open(tagsFilename, 'r+') as tagsFile:
2020-04-03 16:27:34 +00:00
content = tagsFile.read()
if tagline not in content:
tagsFile.seek(0, 0)
tagsFile.write(tagline + content)
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-11-25 22:22:54 +00:00
print('EX: Failed to write entry to tags file ' +
2021-12-25 15:28:52 +00:00
tagsFilename + ' ' + str(ex))
2021-12-29 21:55:09 +00:00
remove_old_hashtags(base_dir, 3)
2020-04-03 16:27:34 +00:00
2020-12-05 11:11:32 +00:00
# automatically assign a category to the tag if possible
categoryFilename = tagsDir + '/' + tagName + '.category'
if not os.path.isfile(categoryFilename):
categoryStr = \
2021-12-29 21:55:09 +00:00
guess_hashtag_category(tagName, hashtagCategories)
2020-12-05 11:11:32 +00:00
if categoryStr:
2021-12-29 21:55:09 +00:00
set_hashtag_category(base_dir, tagName, categoryStr, False)
2020-12-05 11:11:32 +00:00
2021-10-20 13:33:34 +00:00
# if some hashtags were found then recalculate the swarm
# ready for later display
if hashtagsCtr > 0:
2021-12-29 21:55:09 +00:00
_update_cached_hashtag_swarm(base_dir, nickname, domain,
http_prefix, domain_full, translate)
def _inbox_store_post_to_html_cache(recent_posts_cache: {},
max_recent_posts: int,
translate: {},
base_dir: str, http_prefix: str,
session, cached_webfingers: {},
person_cache: {},
nickname: str, domain: str, port: int,
post_json_object: {},
allow_deletion: bool, boxname: str,
show_published_date_only: bool,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int,
signing_priv_key_pem: str,
cw_lists: {},
lists_enabled: str) -> None:
"""Converts the json post into html and stores it in a cache
This enables the post to be quickly displayed later
"""
2020-04-03 16:27:34 +00:00
pageNumber = -999
avatarUrl = None
if boxname != 'outbox':
2020-10-08 12:28:02 +00:00
boxname = 'inbox'
2020-12-18 18:12:33 +00:00
2021-12-26 20:12:18 +00:00
notDM = not is_dm(post_json_object)
2021-12-26 14:08:58 +00:00
yt_replace_domain = get_config_param(base_dir, 'youtubedomain')
twitter_replacement_domain = get_config_param(base_dir, 'twitterdomain')
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem,
True, recent_posts_cache, max_recent_posts,
translate, pageNumber,
base_dir, session, cached_webfingers,
person_cache,
nickname, domain, port, post_json_object,
avatarUrl, True, allow_deletion,
http_prefix, __version__, boxname,
yt_replace_domain, twitter_replacement_domain,
show_published_date_only,
peertube_instances, allow_local_network_access,
theme_name, system_language, max_like_count,
notDM, True, True, False, True, False,
cw_lists, lists_enabled)
def valid_inbox(base_dir: str, nickname: str, domain: str) -> bool:
2019-07-18 11:35:48 +00:00
"""Checks whether files were correctly saved to the inbox
"""
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-26 12:02:29 +00:00
inboxDir = acct_dir(base_dir, nickname, domain) + '/inbox'
if not os.path.isdir(inboxDir):
return True
for subdir, dirs, files in os.walk(inboxDir):
for f in files:
2020-04-03 16:27:34 +00:00
filename = os.path.join(subdir, f)
if not os.path.isfile(filename):
2020-04-03 16:27:34 +00:00
print('filename: ' + filename)
return False
if 'postNickname' in open(filename).read():
2020-04-03 16:27:34 +00:00
print('queue file incorrectly saved to ' + filename)
2019-07-18 11:35:48 +00:00
return False
2020-12-13 22:13:45 +00:00
break
2020-03-22 21:16:02 +00:00
return True
2019-07-18 11:35:48 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def valid_inbox_filenames(base_dir: str, nickname: str, domain: str,
expectedDomain: str, expectedPort: int) -> bool:
2019-07-18 11:35:48 +00:00
"""Used by unit tests to check that the port number gets appended to
domain names within saved post filenames
"""
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-26 12:02:29 +00:00
inboxDir = acct_dir(base_dir, nickname, domain) + '/inbox'
2019-07-18 11:35:48 +00:00
if not os.path.isdir(inboxDir):
2021-08-01 13:25:11 +00:00
print('Not an inbox directory: ' + inboxDir)
2019-07-18 11:35:48 +00:00
return True
2020-04-03 16:27:34 +00:00
expectedStr = expectedDomain + ':' + str(expectedPort)
2021-08-01 13:25:11 +00:00
expectedFound = False
2021-08-20 11:22:20 +00:00
ctr = 0
2019-07-18 11:35:48 +00:00
for subdir, dirs, files in os.walk(inboxDir):
for f in files:
2020-04-03 16:27:34 +00:00
filename = os.path.join(subdir, f)
2021-08-20 11:22:20 +00:00
ctr += 1
2019-07-18 11:35:48 +00:00
if not os.path.isfile(filename):
2020-04-03 16:27:34 +00:00
print('filename: ' + filename)
2019-07-18 11:35:48 +00:00
return False
2021-08-01 13:25:11 +00:00
if expectedStr in filename:
expectedFound = True
2020-12-13 22:13:45 +00:00
break
2021-08-20 11:22:20 +00:00
if ctr == 0:
return True
2021-08-01 13:25:11 +00:00
if not expectedFound:
print('Expected file was not found: ' + expectedStr)
2021-08-20 11:22:20 +00:00
for subdir, dirs, files in os.walk(inboxDir):
for f in files:
filename = os.path.join(subdir, f)
print(filename)
break
2021-08-01 13:25:11 +00:00
return False
2020-03-22 21:16:02 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def inbox_message_has_params(message_json: {}) -> bool:
2019-07-02 15:07:27 +00:00
"""Checks whether an incoming message contains expected parameters
"""
2020-08-23 14:45:58 +00:00
expectedParams = ['actor', 'type', 'object']
2019-07-02 15:07:27 +00:00
for param in expectedParams:
2021-12-25 23:51:19 +00:00
if not message_json.get(param):
2021-12-28 20:32:11 +00:00
# print('inbox_message_has_params: ' +
2021-12-25 23:51:19 +00:00
# param + ' ' + str(message_json))
2019-07-02 15:07:27 +00:00
return False
# actor should be a string
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['actor'], str):
print('WARN: actor should be a string, but is actually: ' +
2021-12-25 23:51:19 +00:00
str(message_json['actor']))
pprint(message_json)
return False
# type should be a string
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['type'], str):
print('WARN: type from ' + str(message_json['actor']) +
' should be a string, but is actually: ' +
2021-12-25 23:51:19 +00:00
str(message_json['type']))
return False
# object should be a dict or a string
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['object'], str):
print('WARN: object from ' + str(message_json['actor']) +
' should be a dict or string, but is actually: ' +
2021-12-25 23:51:19 +00:00
str(message_json['object']))
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
2021-11-10 12:16:03 +00:00
allowedWithoutToParam = ['Like', 'EmojiReact',
'Follow', 'Join', 'Request',
2020-04-03 16:27:34 +00:00
'Accept', 'Capability', 'Undo']
2021-12-25 23:51:19 +00:00
if message_json['type'] not in allowedWithoutToParam:
2019-07-06 13:49:25 +00:00
return False
2019-07-02 15:07:27 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def inbox_permitted_message(domain: str, message_json: {},
federation_list: []) -> bool:
2019-06-28 21:59:54 +00:00
""" check that we are receiving from a permitted domain
"""
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, False):
2019-06-28 21:59:54 +00:00
return False
2020-08-23 14:45:58 +00:00
2021-12-25 23:51:19 +00:00
actor = message_json['actor']
2019-06-28 21:59:54 +00:00
# always allow the local domain
2019-07-01 11:48:54 +00:00
if domain in actor:
2019-06-28 21:59:54 +00:00
return True
2021-12-27 20:47:05 +00:00
if not url_permitted(actor, federation_list):
2019-06-28 21:59:54 +00:00
return False
2021-11-10 12:16:03 +00:00
alwaysAllowedTypes = (
'Follow', 'Join', 'Like', 'EmojiReact', 'Delete', 'Announce'
)
2021-12-25 23:51:19 +00:00
if message_json['type'] not in alwaysAllowedTypes:
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
2019-11-16 12:32:28 +00:00
return True
2021-12-25 23:51:19 +00:00
if message_json['object'].get('inReplyTo'):
inReplyTo = message_json['object']['inReplyTo']
2020-08-28 14:45:07 +00:00
if not isinstance(inReplyTo, str):
return False
2021-12-27 20:47:05 +00:00
if not url_permitted(inReplyTo, federation_list):
2019-07-15 09:20:16 +00:00
return False
2019-06-28 21:59:54 +00:00
return True
2019-06-29 10:08:59 +00:00
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def save_post_to_inbox_queue(base_dir: str, http_prefix: str,
nickname: str, domain: str,
post_json_object: {},
originalPostJsonObject: {},
messageBytes: str,
httpHeaders: {},
postPath: str, debug: bool,
2021-12-31 23:07:23 +00:00
blocked_cache: [], system_language: str) -> str:
"""Saves the given json to the inbox queue for the person
2019-07-04 10:02:56 +00:00
keyId specifies the actor sending the post
"""
2020-04-03 16:27:34 +00:00
if len(messageBytes) > 10240:
print('WARN: inbox message too long ' +
str(len(messageBytes)) + ' bytes')
return None
2020-04-03 16:27:34 +00:00
originalDomain = domain
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
# block at the ealiest stage possible, which means the data
# isn't written to file
2020-04-03 16:27:34 +00:00
postNickname = None
postDomain = None
actor = None
2021-12-25 22:09:19 +00:00
if post_json_object.get('actor'):
if not isinstance(post_json_object['actor'], str):
return None
2021-12-25 22:09:19 +00:00
actor = post_json_object['actor']
2021-12-27 22:19:18 +00:00
postNickname = get_nickname_from_actor(post_json_object['actor'])
2019-09-01 19:20:28 +00:00
if not postNickname:
2021-12-25 22:09:19 +00:00
print('No post Nickname in actor ' + post_json_object['actor'])
2019-09-01 19:20:28 +00:00
return None
2021-12-27 19:05:25 +00:00
postDomain, postPort = get_domain_from_actor(post_json_object['actor'])
2019-09-01 19:20:28 +00:00
if not postDomain:
2019-10-29 20:23:49 +00:00
if debug:
2021-12-25 22:09:19 +00:00
pprint(post_json_object)
2019-09-01 19:20:28 +00:00
print('No post Domain in actor')
return None
2021-12-29 21:55:09 +00:00
if is_blocked(base_dir, nickname, domain,
2021-12-31 23:07:23 +00:00
postNickname, postDomain, blocked_cache):
2019-08-18 09:39:12 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: post from ' + postNickname + ' blocked')
return None
2021-12-26 12:45:03 +00:00
postDomain = get_full_domain(postDomain, postPort)
2019-07-14 20:50:27 +00:00
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('inReplyTo'):
if isinstance(post_json_object['object']['inReplyTo'], str):
inReplyTo = \
2021-12-25 22:09:19 +00:00
post_json_object['object']['inReplyTo']
replyDomain, replyPort = \
2021-12-27 19:05:25 +00:00
get_domain_from_actor(inReplyTo)
2021-12-31 23:07:23 +00:00
if is_blocked_domain(base_dir, replyDomain, blocked_cache):
if debug:
print('WARN: post contains reply from ' +
str(actor) +
' to a blocked domain: ' + replyDomain)
return None
else:
replyNickname = \
2021-12-27 22:19:18 +00:00
get_nickname_from_actor(inReplyTo)
if replyNickname and replyDomain:
2021-12-29 21:55:09 +00:00
if is_blocked(base_dir, nickname, domain,
replyNickname, replyDomain,
2021-12-31 23:07:23 +00:00
blocked_cache):
if debug:
print('WARN: post contains reply from ' +
str(actor) +
' to a blocked account: ' +
replyNickname + '@' + replyDomain)
return None
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('content'):
contentStr = \
2021-12-26 11:29:40 +00:00
get_base_content_from_post(post_json_object, system_language)
if contentStr:
2021-12-29 21:55:09 +00:00
if is_filtered(base_dir, nickname, domain, contentStr):
if debug:
print('WARN: post was filtered out due to content')
return None
2020-04-03 16:27:34 +00:00
originalPostId = None
2021-12-25 22:09:19 +00:00
if post_json_object.get('id'):
if not isinstance(post_json_object['id'], str):
return None
2021-12-27 11:20:57 +00:00
originalPostId = remove_id_ending(post_json_object['id'])
2019-08-16 15:04:40 +00:00
2021-12-26 13:17:46 +00:00
curr_time = datetime.datetime.utcnow()
2019-08-16 15:04:40 +00:00
2021-12-26 19:47:06 +00:00
post_id = None
2021-12-25 22:09:19 +00:00
if post_json_object.get('id'):
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['id'])
2021-12-26 13:17:46 +00:00
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
2021-12-26 19:47:06 +00:00
if not post_id:
2021-12-27 17:42:35 +00:00
statusNumber, published = get_status_number()
2019-08-16 15:04:40 +00:00
if actor:
2021-12-26 19:47:06 +00:00
post_id = actor + '/statuses/' + statusNumber
2019-08-16 15:04:40 +00:00
else:
2021-12-26 19:47:06 +00:00
post_id = \
local_actor_url(http_prefix, nickname, originalDomain) + \
2021-08-14 11:13:39 +00:00
'/statuses/' + statusNumber
2020-03-22 21:16:02 +00:00
2021-12-25 22:09:19 +00:00
# NOTE: don't change post_json_object['id'] before signature check
2020-03-22 21:16:02 +00:00
2021-12-27 18:00:51 +00:00
inbox_queueDir = create_inbox_queue_dir(nickname, domain, base_dir)
2020-04-03 16:27:34 +00:00
handle = nickname + '@' + domain
2021-12-25 16:17:53 +00:00
destination = base_dir + '/accounts/' + \
2021-12-26 19:47:06 +00:00
handle + '/inbox/' + post_id.replace('/', '#') + '.json'
filename = inbox_queueDir + '/' + post_id.replace('/', '#') + '.json'
2020-04-03 16:27:34 +00:00
sharedInboxItem = False
if nickname == 'inbox':
nickname = originalDomain
sharedInboxItem = True
digestStartTime = time.time()
2021-12-29 21:55:09 +00:00
digestAlgorithm = get_digest_algorithm_from_headers(httpHeaders)
digest = message_content_digest(messageBytes, digestAlgorithm)
2021-12-31 21:18:12 +00:00
time_diffStr = str(int((time.time() - digestStartTime) * 1000))
2019-11-16 10:12:40 +00:00
if debug:
2021-12-31 21:18:12 +00:00
while len(time_diffStr) < 6:
time_diffStr = '0' + time_diffStr
print('DIGEST|' + time_diffStr + '|' + filename)
2019-11-16 10:07:32 +00:00
2020-04-03 16:27:34 +00:00
newQueueItem = {
2019-08-16 09:35:06 +00:00
'originalId': originalPostId,
2021-12-26 19:47:06 +00:00
'id': post_id,
2019-08-16 09:35:06 +00:00
'actor': actor,
2019-07-07 15:51:04 +00:00
'nickname': nickname,
'domain': domain,
2019-07-15 10:22:19 +00:00
'postNickname': postNickname,
'postDomain': postDomain,
'sharedInbox': sharedInboxItem,
2019-07-04 10:09:27 +00:00
'published': published,
2019-08-15 21:34:25 +00:00
'httpHeaders': httpHeaders,
2019-07-05 22:13:20 +00:00
'path': postPath,
2021-12-25 22:09:19 +00:00
'post': post_json_object,
'original': originalPostJsonObject,
2019-11-16 10:07:32 +00:00
'digest': digest,
'filename': filename,
2019-08-05 09:50:45 +00:00
'destination': destination
2019-07-04 10:02:56 +00:00
}
2019-07-06 13:49:25 +00:00
if debug:
print('Inbox queue item created')
2021-12-26 14:47:21 +00:00
save_json(newQueueItem, filename)
2019-07-04 10:02:56 +00:00
return filename
2019-07-04 12:23:53 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _inbox_post_recipients_add(base_dir: str, http_prefix: str, toList: [],
recipientsDict: {},
domainMatch: str, domain: str,
actor: str, debug: bool) -> bool:
2019-07-08 22:12:24 +00:00
"""Given a list of post recipients (toList) from 'to' or 'cc' parameters
2020-09-27 18:35:35 +00:00
populate a recipientsDict with the handle for each
2019-07-08 22:12:24 +00:00
"""
2020-04-03 16:27:34 +00:00
followerRecipients = False
2019-07-08 22:12:24 +00:00
for recipient in toList:
2019-09-03 19:53:22 +00:00
if not recipient:
continue
2019-07-08 22:12:24 +00:00
# is this a to a local account?
if domainMatch in recipient:
# get the handle for the local account
2020-04-03 16:27:34 +00:00
nickname = recipient.split(domainMatch)[1]
2021-06-22 12:42:52 +00:00
handle = nickname + '@' + domain
2021-12-25 16:17:53 +00:00
if os.path.isdir(base_dir + '/accounts/' + handle):
2020-09-27 18:35:35 +00:00
recipientsDict[handle] = None
2019-07-11 12:29:31 +00:00
else:
if debug:
2021-12-25 16:17:53 +00:00
print('DEBUG: ' + base_dir + '/accounts/' +
2020-04-03 16:27:34 +00:00
handle + ' does not exist')
2019-07-11 12:29:31 +00:00
else:
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: ' + recipient + ' is not local to ' +
domainMatch)
2019-07-11 12:29:31 +00:00
print(str(toList))
2019-07-08 22:12:24 +00:00
if recipient.endswith('followers'):
2019-07-11 12:29:31 +00:00
if debug:
print('DEBUG: followers detected as post recipients')
2020-04-03 16:27:34 +00:00
followerRecipients = True
return followerRecipients, recipientsDict
2019-07-08 22:12:24 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _inbox_post_recipients(base_dir: str, post_json_object: {},
http_prefix: str, domain: str, port: int,
debug: bool) -> ([], []):
"""Returns dictionaries containing the recipients of the given post
The shared dictionary contains followers
"""
2020-04-03 16:27:34 +00:00
recipientsDict = {}
recipientsDictFollowers = {}
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
2019-07-11 12:29:31 +00:00
if debug:
2021-12-25 22:09:19 +00:00
pprint(post_json_object)
2019-07-11 12:29:31 +00:00
print('WARNING: inbox post has no actor')
2020-04-03 16:27:34 +00:00
return recipientsDict, recipientsDictFollowers
2019-07-08 22:12:24 +00:00
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2020-04-03 16:27:34 +00:00
domainBase = domain
2021-12-26 12:45:03 +00:00
domain = get_full_domain(domain, port)
2020-04-03 16:27:34 +00:00
domainMatch = '/' + domain + '/users/'
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
actor = post_json_object['actor']
2019-07-08 22:12:24 +00:00
# first get any specific people which the post is addressed to
2020-03-22 21:16:02 +00:00
2020-04-03 16:27:34 +00:00
followerRecipients = False
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('to'):
if isinstance(post_json_object['object']['to'], list):
recipientsList = post_json_object['object']['to']
2019-07-11 12:29:31 +00:00
else:
2021-12-25 22:09:19 +00:00
recipientsList = [post_json_object['object']['to']]
if debug:
print('DEBUG: resolving "to"')
includesFollowers, recipientsDict = \
2021-12-29 21:55:09 +00:00
_inbox_post_recipients_add(base_dir, http_prefix,
recipientsList,
recipientsDict,
domainMatch, domainBase,
actor, debug)
if includesFollowers:
followerRecipients = True
else:
if debug:
print('DEBUG: inbox post has no "to"')
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('cc'):
if isinstance(post_json_object['object']['cc'], list):
recipientsList = post_json_object['object']['cc']
2019-07-11 12:29:31 +00:00
else:
2021-12-25 22:09:19 +00:00
recipientsList = [post_json_object['object']['cc']]
includesFollowers, recipientsDict = \
2021-12-29 21:55:09 +00:00
_inbox_post_recipients_add(base_dir, http_prefix,
recipientsList,
recipientsDict,
domainMatch, domainBase,
actor, debug)
if includesFollowers:
followerRecipients = True
2019-07-11 12:29:31 +00:00
else:
if debug:
print('DEBUG: inbox post has no cc')
else:
2021-12-25 22:09:19 +00:00
if debug and post_json_object.get('object'):
if isinstance(post_json_object['object'], str):
if '/statuses/' in post_json_object['object']:
print('DEBUG: inbox item is a link to a post')
else:
2021-12-25 22:09:19 +00:00
if '/users/' in post_json_object['object']:
print('DEBUG: inbox item is a link to an actor')
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object.get('to'):
if isinstance(post_json_object['to'], list):
recipientsList = post_json_object['to']
2019-08-16 17:51:00 +00:00
else:
2021-12-25 22:09:19 +00:00
recipientsList = [post_json_object['to']]
2020-04-03 16:27:34 +00:00
includesFollowers, recipientsDict = \
2021-12-29 21:55:09 +00:00
_inbox_post_recipients_add(base_dir, http_prefix,
recipientsList,
recipientsDict,
domainMatch, domainBase,
actor, debug)
2019-07-08 22:12:24 +00:00
if includesFollowers:
2020-04-03 16:27:34 +00:00
followerRecipients = True
2019-07-08 22:12:24 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object.get('cc'):
if isinstance(post_json_object['cc'], list):
recipientsList = post_json_object['cc']
2019-08-16 17:51:00 +00:00
else:
2021-12-25 22:09:19 +00:00
recipientsList = [post_json_object['cc']]
2020-04-03 16:27:34 +00:00
includesFollowers, recipientsDict = \
2021-12-29 21:55:09 +00:00
_inbox_post_recipients_add(base_dir, http_prefix,
recipientsList,
recipientsDict,
domainMatch, domainBase,
actor, debug)
2019-07-08 22:12:24 +00:00
if includesFollowers:
2020-04-03 16:27:34 +00:00
followerRecipients = True
2019-07-08 22:12:24 +00:00
if not followerRecipients:
2019-07-11 12:29:31 +00:00
if debug:
print('DEBUG: no followers were resolved')
2020-04-03 16:27:34 +00:00
return recipientsDict, recipientsDictFollowers
2019-07-08 22:12:24 +00:00
# now resolve the followers
2020-04-03 16:27:34 +00:00
recipientsDictFollowers = \
2021-12-29 21:55:09 +00:00
get_followers_of_actor(base_dir, actor, debug)
2020-04-03 16:27:34 +00:00
return recipientsDict, recipientsDictFollowers
2019-07-08 22:12:24 +00:00
2021-12-29 21:55:09 +00:00
def _receive_undo_follow(session, base_dir: str, http_prefix: str,
port: int, message_json: {},
federation_list: [],
debug: bool) -> bool:
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('actor'):
2019-07-17 10:34:00 +00:00
if debug:
print('DEBUG: follow request has no actor within object')
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['object']['actor']):
2019-07-17 10:34:00 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "users" or "profile" missing ' +
'from actor within object')
2019-07-17 10:34:00 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['actor'] != message_json['actor']:
2019-07-17 10:34:00 +00:00
if debug:
print('DEBUG: actors do not match')
return False
2020-04-03 16:27:34 +00:00
nicknameFollower = \
2021-12-27 22:19:18 +00:00
get_nickname_from_actor(message_json['object']['actor'])
2019-09-02 09:43:43 +00:00
if not nicknameFollower:
2020-04-03 16:27:34 +00:00
print('WARN: unable to find nickname in ' +
2021-12-25 23:51:19 +00:00
message_json['object']['actor'])
2019-09-02 09:43:43 +00:00
return False
2020-04-03 16:27:34 +00:00
domainFollower, portFollower = \
2021-12-27 19:05:25 +00:00
get_domain_from_actor(message_json['object']['actor'])
2021-12-26 12:45:03 +00:00
domainFollowerFull = get_full_domain(domainFollower, portFollower)
2020-03-22 21:16:02 +00:00
2020-04-03 16:27:34 +00:00
nicknameFollowing = \
2021-12-27 22:19:18 +00:00
get_nickname_from_actor(message_json['object']['object'])
2019-09-02 09:43:43 +00:00
if not nicknameFollowing:
2020-04-03 16:27:34 +00:00
print('WARN: unable to find nickname in ' +
2021-12-25 23:51:19 +00:00
message_json['object']['object'])
2019-09-02 09:43:43 +00:00
return False
2020-04-03 16:27:34 +00:00
domainFollowing, portFollowing = \
2021-12-27 19:05:25 +00:00
get_domain_from_actor(message_json['object']['object'])
2021-12-26 12:45:03 +00:00
domainFollowingFull = get_full_domain(domainFollowing, portFollowing)
2019-07-17 10:34:00 +00:00
2021-12-26 00:07:44 +00:00
group_account = \
2021-12-26 17:53:07 +00:00
has_group_type(base_dir, message_json['object']['actor'], None)
2021-12-29 21:55:09 +00:00
if unfollower_of_account(base_dir,
nicknameFollowing, domainFollowingFull,
nicknameFollower, domainFollowerFull,
debug, group_account):
2020-08-20 12:11:07 +00:00
print(nicknameFollowing + '@' + domainFollowingFull + ': '
'Follower ' + nicknameFollower + '@' + domainFollowerFull +
' was removed')
2019-07-17 11:54:13 +00:00
return True
2020-03-22 21:16:02 +00:00
2019-07-17 11:54:13 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Follower ' +
nicknameFollower + '@' + domainFollowerFull +
' was not removed')
2019-07-17 11:54:13 +00:00
return False
2019-07-17 10:34:00 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_undo(session, base_dir: str, http_prefix: str,
port: int, send_threads: [], postLog: [],
cached_webfingers: {}, person_cache: {},
message_json: {}, federation_list: [],
debug: bool) -> bool:
2019-07-17 10:34:00 +00:00
"""Receives an undo request within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if not message_json['type'].startswith('Undo'):
2019-07-17 10:34:00 +00:00
return False
2019-07-17 11:24:11 +00:00
if debug:
print('DEBUG: Undo activity received')
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-17 10:34:00 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-17 10:34:00 +00:00
if debug:
2020-03-22 21:16:02 +00:00
print('DEBUG: "users" or "profile" missing from actor')
2019-07-17 10:34:00 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2021-07-19 09:03:56 +00:00
return False
2021-12-26 15:54:46 +00:00
if not has_object_string_object(message_json, debug):
2019-07-17 10:34:00 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] == 'Follow' or \
message_json['object']['type'] == 'Join':
2021-12-29 21:55:09 +00:00
return _receive_undo_follow(session, base_dir, http_prefix,
port, message_json,
federation_list, debug)
2019-07-17 10:34:00 +00:00
return False
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _person_receive_update(base_dir: str,
domain: str, port: int,
updateNickname: str, updateDomain: str,
updatePort: int,
personJson: {}, person_cache: {},
debug: bool) -> bool:
"""Changes an actor. eg: avatar or display name change
2019-08-20 19:41:58 +00:00
"""
2021-03-14 19:42:25 +00:00
if debug:
print('Receiving actor update for ' + personJson['url'] +
' ' + str(personJson))
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
updateDomainFull = get_full_domain(updateDomain, updatePort)
2021-12-26 12:24:40 +00:00
usersPaths = get_user_paths()
2020-12-23 22:18:19 +00:00
usersStrFound = False
for usersStr in usersPaths:
2021-07-29 17:14:33 +00:00
actor = updateDomainFull + usersStr + updateNickname
2020-12-23 22:18:19 +00:00
if actor in personJson['id']:
usersStrFound = True
break
if not usersStrFound:
if debug:
print('actor: ' + actor)
print('id: ' + personJson['id'])
print('DEBUG: Actor does not match id')
return False
2021-12-26 10:00:46 +00:00
if updateDomainFull == domain_full:
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: You can only receive actor updates ' +
'for domains other than your own')
2019-08-20 19:41:58 +00:00
return False
if not personJson.get('publicKey'):
if debug:
2020-03-22 21:16:02 +00:00
print('DEBUG: actor update does not contain a public key')
2019-08-20 19:41:58 +00:00
return False
if not personJson['publicKey'].get('publicKeyPem'):
if debug:
2020-03-22 21:16:02 +00:00
print('DEBUG: actor update does not contain a public key Pem')
2019-08-20 19:41:58 +00:00
return False
2021-12-25 16:17:53 +00:00
actorFilename = base_dir + '/cache/actors/' + \
2020-04-03 16:27:34 +00:00
personJson['id'].replace('/', '#') + '.json'
2019-08-20 19:41:58 +00:00
# check that the public keys match.
# If they don't then this may be a nefarious attempt to hack an account
2020-04-03 16:27:34 +00:00
idx = personJson['id']
2021-12-25 22:17:49 +00:00
if person_cache.get(idx):
if person_cache[idx]['actor']['publicKey']['publicKeyPem'] != \
2020-04-03 16:27:34 +00:00
personJson['publicKey']['publicKeyPem']:
2019-08-20 19:41:58 +00:00
if debug:
print('WARN: Public key does not match when updating actor')
return False
else:
if os.path.isfile(actorFilename):
2021-12-26 15:13:34 +00:00
existingPersonJson = load_json(actorFilename)
2019-10-22 11:55:06 +00:00
if existingPersonJson:
2020-04-03 16:27:34 +00:00
if existingPersonJson['publicKey']['publicKeyPem'] != \
personJson['publicKey']['publicKeyPem']:
2019-08-20 19:41:58 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('WARN: Public key does not match ' +
'cached actor when updating')
2019-08-20 19:41:58 +00:00
return False
# save to cache in memory
2021-12-29 21:55:09 +00:00
store_person_in_cache(base_dir, personJson['id'], personJson,
person_cache, True)
2021-03-14 19:46:46 +00:00
# save to cache on file
2021-12-26 14:47:21 +00:00
if save_json(personJson, actorFilename):
2021-03-14 19:42:25 +00:00
if debug:
print('actor updated for ' + personJson['id'])
# remove avatar if it exists so that it will be refreshed later
# when a timeline is constructed
2020-04-03 16:27:34 +00:00
actorStr = personJson['id'].replace('/', '-')
2021-12-27 17:35:58 +00:00
remove_avatar_from_cache(base_dir, actorStr)
2019-08-20 19:41:58 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_update_to_question(recent_posts_cache: {}, message_json: {},
base_dir: str,
nickname: str, domain: str) -> None:
2019-11-26 10:43:37 +00:00
"""Updating a question as new votes arrive
"""
# message url of the question
2021-12-25 23:51:19 +00:00
if not message_json.get('id'):
2019-11-26 10:43:37 +00:00
return
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, False):
2019-11-26 10:43:37 +00:00
return
2021-12-27 11:20:57 +00:00
messageId = remove_id_ending(message_json['id'])
2019-11-26 10:43:37 +00:00
if '#' in messageId:
2020-04-03 16:27:34 +00:00
messageId = messageId.split('#', 1)[0]
2019-11-26 10:43:37 +00:00
# find the question post
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain, messageId)
if not post_filename:
2019-11-26 10:43:37 +00:00
return
# load the json for the question
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename, 1)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2019-11-26 10:43:37 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
2019-11-26 10:43:37 +00:00
return
# does the actor match?
2021-12-25 23:51:19 +00:00
if post_json_object['actor'] != message_json['actor']:
2019-11-26 10:43:37 +00:00
return
2021-12-26 23:41:34 +00:00
save_json(message_json, post_filename)
2019-11-26 10:43:37 +00:00
# ensure that the cached post is removed if it exists, so
# that it then will be recreated
2020-04-03 16:27:34 +00:00
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain, message_json)
2019-11-26 10:43:37 +00:00
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
try:
os.remove(cachedPostFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _receive_update_to_question unable to delete ' +
2021-10-29 18:48:15 +00:00
cachedPostFilename)
2019-11-26 10:43:37 +00:00
# remove from memory cache
2021-12-27 11:05:24 +00:00
remove_post_from_cache(message_json, recent_posts_cache)
2020-03-22 21:16:02 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_update_activity(recent_posts_cache: {}, session, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [],
cached_webfingers: {},
person_cache: {}, message_json: {},
federation_list: [],
nickname: str, debug: bool) -> bool:
2019-07-09 14:20:23 +00:00
"""Receives an Update activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Update':
2019-07-09 14:20:23 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-09 14:20:23 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2021-07-19 09:07:20 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-09 14:20:23 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-09 14:20:23 +00:00
return False
2019-08-22 17:25:12 +00:00
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] == 'Question':
2021-12-29 21:55:09 +00:00
_receive_update_to_question(recent_posts_cache, message_json,
base_dir, nickname, domain)
if debug:
print('DEBUG: Question update was received')
return True
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] == 'Person' or \
message_json['object']['type'] == 'Application' or \
message_json['object']['type'] == 'Group' or \
message_json['object']['type'] == 'Service':
if message_json['object'].get('url') and \
message_json['object'].get('id'):
2021-03-14 19:42:25 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('Request to update actor: ' + str(message_json))
2021-12-27 22:19:18 +00:00
updateNickname = get_nickname_from_actor(message_json['actor'])
2019-09-02 09:43:43 +00:00
if updateNickname:
2020-04-03 16:27:34 +00:00
updateDomain, updatePort = \
2021-12-27 19:05:25 +00:00
get_domain_from_actor(message_json['actor'])
2021-12-29 21:55:09 +00:00
if _person_receive_update(base_dir,
domain, port,
updateNickname, updateDomain,
updatePort,
message_json['object'],
person_cache, debug):
2021-12-25 23:51:19 +00:00
print('Person Update: ' + str(message_json))
2019-09-02 09:43:43 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Profile update was received for ' +
2021-12-25 23:51:19 +00:00
message_json['object']['url'])
2019-09-02 09:43:43 +00:00
return True
2019-07-09 14:20:23 +00:00
return False
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_like(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
onion_domain: str,
send_threads: [], postLog: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool,
signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int, cw_lists: {},
lists_enabled: str) -> bool:
2019-07-10 12:40:31 +00:00
"""Receives a Like activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Like':
2019-07-10 12:40:31 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-10 12:40:31 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_string(message_json, debug):
2019-07-10 12:40:31 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
2019-07-10 12:40:31 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
2019-07-10 12:40:31 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-10 12:40:31 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-10 12:40:31 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']:
2019-07-10 12:40:31 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "statuses" missing from object in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-10 12:40:31 +00:00
return False
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2020-04-03 16:27:34 +00:00
print('DEBUG: unknown recipient of like - ' + handle)
# if this post in the outbox of the person?
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-12-25 23:51:19 +00:00
postLikedId = message_json['object']
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, handleName, handleDom, postLikedId)
if not post_filename:
2019-07-10 12:40:31 +00:00
if debug:
print('DEBUG: post not found in inbox or outbox')
2021-05-19 19:21:27 +00:00
print(postLikedId)
2019-07-10 12:40:31 +00:00
return True
if debug:
2019-07-11 12:59:00 +00:00
print('DEBUG: liked post found in inbox')
2019-10-19 17:50:05 +00:00
2021-12-25 23:51:19 +00:00
likeActor = message_json['actor']
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-12-29 21:55:09 +00:00
if not _already_liked(base_dir,
handleName, handleDom,
postLikedId,
likeActor):
_like_notify(base_dir, domain, onion_domain, handle,
likeActor, postLikedId)
update_likes_collection(recent_posts_cache, base_dir, post_filename,
postLikedId, likeActor,
handleName, domain, debug, None)
2021-09-03 22:04:50 +00:00
# regenerate the html
2021-12-26 23:41:34 +00:00
likedPostJson = load_json(post_filename, 0, 1)
2021-09-03 22:04:50 +00:00
if likedPostJson:
2021-10-14 22:29:51 +00:00
if likedPostJson.get('type'):
if likedPostJson['type'] == 'Announce' and \
likedPostJson.get('object'):
if isinstance(likedPostJson['object'], str):
announceLikeUrl = likedPostJson['object']
announceLikedFilename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, handleName,
domain, announceLikeUrl)
2021-10-14 22:29:51 +00:00
if announceLikedFilename:
postLikedId = announceLikeUrl
2021-12-26 23:41:34 +00:00
post_filename = announceLikedFilename
2021-12-29 21:55:09 +00:00
update_likes_collection(recent_posts_cache,
base_dir,
post_filename,
postLikedId,
likeActor,
handleName,
domain, debug, None)
2021-10-14 22:29:51 +00:00
if likedPostJson:
if debug:
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, handleName, domain,
likedPostJson)
2021-10-14 22:29:51 +00:00
print('Liked post json: ' + str(likedPostJson))
print('Liked post nickname: ' + handleName + ' ' + domain)
print('Liked post cache: ' + str(cachedPostFilename))
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-10-14 22:29:51 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, handleName, domain)
2021-12-26 20:12:18 +00:00
notDM = not is_dm(likedPostJson)
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
handleName, domain, port, likedPostJson,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2019-07-10 12:40:31 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_undo_like(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool,
signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int, cw_lists: {},
lists_enabled: str) -> bool:
2019-07-12 09:10:09 +00:00
"""Receives an undo like activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Undo':
2019-07-12 09:10:09 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-12 09:10:09 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2019-07-12 09:10:09 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] != 'Like':
2019-07-12 09:10:09 +00:00
return False
2021-12-26 15:54:46 +00:00
if not has_object_string_object(message_json, debug):
2019-07-12 09:10:09 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-12 09:10:09 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'] + ' like')
2019-07-12 09:10:09 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']['object']:
2019-07-12 09:10:09 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "statuses" missing from like object in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-12 09:10:09 +00:00
return False
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2020-04-03 16:27:34 +00:00
print('DEBUG: unknown recipient of undo like - ' + handle)
2019-07-12 09:10:09 +00:00
# if this post in the outbox of the person?
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-12-26 23:41:34 +00:00
post_filename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, handleName, handleDom,
message_json['object']['object'])
2021-12-26 23:41:34 +00:00
if not post_filename:
2019-07-12 09:10:09 +00:00
if debug:
2019-07-12 09:41:57 +00:00
print('DEBUG: unliked post not found in inbox or outbox')
2021-12-25 23:51:19 +00:00
print(message_json['object']['object'])
2019-07-12 09:10:09 +00:00
return True
if debug:
print('DEBUG: liked post found in inbox. Now undoing.')
2021-12-25 23:51:19 +00:00
likeActor = message_json['actor']
postLikedId = message_json['object']
2021-12-27 23:23:07 +00:00
undo_likes_collection_entry(recent_posts_cache, base_dir, post_filename,
postLikedId, likeActor, domain, debug, None)
2021-09-03 22:10:54 +00:00
# regenerate the html
2021-12-26 23:41:34 +00:00
likedPostJson = load_json(post_filename, 0, 1)
2021-09-03 22:10:54 +00:00
if likedPostJson:
2021-10-14 22:29:51 +00:00
if likedPostJson.get('type'):
if likedPostJson['type'] == 'Announce' and \
likedPostJson.get('object'):
if isinstance(likedPostJson['object'], str):
announceLikeUrl = likedPostJson['object']
announceLikedFilename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, handleName,
domain, announceLikeUrl)
2021-10-14 22:29:51 +00:00
if announceLikedFilename:
postLikedId = announceLikeUrl
2021-12-26 23:41:34 +00:00
post_filename = announceLikedFilename
2021-12-27 23:23:07 +00:00
undo_likes_collection_entry(recent_posts_cache,
base_dir,
post_filename,
postLikedId,
likeActor, domain, debug,
None)
2021-10-14 22:29:51 +00:00
if likedPostJson:
if debug:
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, handleName, domain,
likedPostJson)
2021-10-14 22:29:51 +00:00
print('Unliked post json: ' + str(likedPostJson))
print('Unliked post nickname: ' + handleName + ' ' + domain)
print('Unliked post cache: ' + str(cachedPostFilename))
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-10-14 22:29:51 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, handleName, domain)
2021-12-26 20:12:18 +00:00
notDM = not is_dm(likedPostJson)
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
handleName, domain, port, likedPostJson,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2019-07-12 09:10:09 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_reaction(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
onion_domain: str,
send_threads: [], postLog: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool,
signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int, cw_lists: {},
lists_enabled: str) -> bool:
2021-11-10 12:16:03 +00:00
"""Receives an emoji reaction within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'EmojiReact':
2021-11-10 12:16:03 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2021-11-10 12:16:03 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_string(message_json, debug):
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
2021-11-10 12:16:03 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('content'):
2021-11-10 12:16:03 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' has no "content"')
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['content'], str):
2021-11-10 12:16:03 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' content is not string')
2021-11-10 12:16:03 +00:00
return False
2021-12-29 21:55:09 +00:00
if not valid_emoji_content(message_json['content']):
print('_receive_reaction: Invalid emoji reaction: "' +
2021-12-25 23:51:19 +00:00
message_json['content'] + '" from ' + message_json['actor'])
2021-11-10 13:10:02 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: "users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']:
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: "statuses" missing from object in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2021-11-10 12:16:03 +00:00
return False
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2021-11-10 12:16:03 +00:00
print('DEBUG: unknown recipient of emoji reaction - ' + handle)
2021-12-25 16:17:53 +00:00
if os.path.isfile(base_dir + '/accounts/' + handle +
2021-11-17 14:25:24 +00:00
'/.hideReactionButton'):
print('Emoji reaction rejected by ' + handle +
' due to their settings')
return True
2021-11-10 12:16:03 +00:00
# if this post in the outbox of the person?
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-11-17 14:25:24 +00:00
2021-12-25 23:51:19 +00:00
postReactionId = message_json['object']
2021-12-27 15:43:22 +00:00
emojiContent = remove_html(message_json['content'])
2021-11-10 12:16:03 +00:00
if not emojiContent:
if debug:
print('DEBUG: emoji reaction has no content')
return True
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, handleName, handleDom,
postReactionId)
if not post_filename:
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: emoji reaction post not found in inbox or outbox')
print(postReactionId)
return True
if debug:
print('DEBUG: emoji reaction post found in inbox')
2021-12-25 23:51:19 +00:00
reactionActor = message_json['actor']
2021-11-10 12:16:03 +00:00
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-12-29 21:55:09 +00:00
if not _already_reacted(base_dir,
handleName, handleDom,
postReactionId,
reactionActor,
emojiContent):
_reaction_notify(base_dir, domain, onion_domain, handle,
reactionActor, postReactionId, emojiContent)
update_reaction_collection(recent_posts_cache, base_dir, post_filename,
postReactionId, reactionActor,
handleName, domain, debug, None, emojiContent)
2021-11-10 12:16:03 +00:00
# regenerate the html
2021-12-29 21:55:09 +00:00
reaction_post_json = load_json(post_filename, 0, 1)
if reaction_post_json:
if reaction_post_json.get('type'):
if reaction_post_json['type'] == 'Announce' and \
reaction_post_json.get('object'):
if isinstance(reaction_post_json['object'], str):
announceReactionUrl = reaction_post_json['object']
2021-11-10 12:16:03 +00:00
announceReactionFilename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, handleName,
domain, announceReactionUrl)
2021-11-10 12:16:03 +00:00
if announceReactionFilename:
postReactionId = announceReactionUrl
2021-12-26 23:41:34 +00:00
post_filename = announceReactionFilename
2021-12-29 21:55:09 +00:00
update_reaction_collection(recent_posts_cache,
base_dir,
post_filename,
postReactionId,
reactionActor,
handleName,
domain, debug, None,
emojiContent)
if reaction_post_json:
2021-11-10 12:16:03 +00:00
if debug:
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, handleName, domain,
2021-12-29 21:55:09 +00:00
reaction_post_json)
print('Reaction post json: ' + str(reaction_post_json))
2021-11-10 12:16:03 +00:00
print('Reaction post nickname: ' + handleName + ' ' + domain)
print('Reaction post cache: ' + str(cachedPostFilename))
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-11-10 12:16:03 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, handleName, domain)
2021-12-29 21:55:09 +00:00
notDM = not is_dm(reaction_post_json)
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
handleName, domain, port,
reaction_post_json,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2021-11-10 12:16:03 +00:00
return True
2021-12-29 21:55:09 +00:00
def _receive_undo_reaction(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [],
cached_webfingers: {},
person_cache: {}, message_json: {},
federation_list: [],
debug: bool,
signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int, cw_lists: {},
lists_enabled: str) -> bool:
2021-11-10 12:16:03 +00:00
"""Receives an undo emoji reaction within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Undo':
2021-11-10 12:16:03 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2021-11-10 12:16:03 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] != 'EmojiReact':
2021-11-10 12:16:03 +00:00
return False
2021-12-26 15:54:46 +00:00
if not has_object_string_object(message_json, debug):
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('content'):
2021-11-10 12:16:03 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' has no "content"')
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['object']['content'], str):
2021-11-10 12:16:03 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' content is not string')
2021-11-10 12:16:03 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: "users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'] + ' reaction')
2021-11-10 12:16:03 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']['object']:
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: "statuses" missing from reaction object in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2021-11-10 12:16:03 +00:00
return False
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2021-11-10 12:16:03 +00:00
print('DEBUG: unknown recipient of undo reaction - ' + handle)
# if this post in the outbox of the person?
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-12-26 23:41:34 +00:00
post_filename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, handleName, handleDom,
message_json['object']['object'])
2021-12-26 23:41:34 +00:00
if not post_filename:
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: unreaction post not found in inbox or outbox')
2021-12-25 23:51:19 +00:00
print(message_json['object']['object'])
2021-11-10 12:16:03 +00:00
return True
if debug:
print('DEBUG: reaction post found in inbox. Now undoing.')
2021-12-25 23:51:19 +00:00
reactionActor = message_json['actor']
postReactionId = message_json['object']
2021-12-27 15:43:22 +00:00
emojiContent = remove_html(message_json['object']['content'])
2021-11-10 12:16:03 +00:00
if not emojiContent:
if debug:
print('DEBUG: unreaction has no content')
return True
2021-12-27 23:02:50 +00:00
undo_reaction_collection_entry(recent_posts_cache, base_dir, post_filename,
postReactionId, reactionActor, domain,
debug, None, emojiContent)
2021-11-10 12:16:03 +00:00
# regenerate the html
2021-12-29 21:55:09 +00:00
reaction_post_json = load_json(post_filename, 0, 1)
if reaction_post_json:
if reaction_post_json.get('type'):
if reaction_post_json['type'] == 'Announce' and \
reaction_post_json.get('object'):
if isinstance(reaction_post_json['object'], str):
announceReactionUrl = reaction_post_json['object']
2021-11-10 12:16:03 +00:00
announceReactionFilename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, handleName,
domain, announceReactionUrl)
2021-11-10 12:16:03 +00:00
if announceReactionFilename:
postReactionId = announceReactionUrl
2021-12-26 23:41:34 +00:00
post_filename = announceReactionFilename
2021-12-27 23:02:50 +00:00
undo_reaction_collection_entry(recent_posts_cache,
base_dir,
post_filename,
postReactionId,
reactionActor,
domain,
debug, None,
emojiContent)
2021-12-29 21:55:09 +00:00
if reaction_post_json:
2021-11-10 12:16:03 +00:00
if debug:
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, handleName, domain,
2021-12-29 21:55:09 +00:00
reaction_post_json)
print('Unreaction post json: ' + str(reaction_post_json))
2021-11-10 12:16:03 +00:00
print('Unreaction post nickname: ' + handleName + ' ' + domain)
print('Unreaction post cache: ' + str(cachedPostFilename))
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-11-10 12:16:03 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, handleName, domain)
2021-12-29 21:55:09 +00:00
notDM = not is_dm(reaction_post_json)
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
handleName, domain, port,
reaction_post_json,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2021-11-10 12:16:03 +00:00
return True
2021-12-29 21:55:09 +00:00
def _receive_bookmark(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool, signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int, cw_lists: {},
lists_enabled: {}) -> bool:
2019-11-17 14:01:49 +00:00
"""Receives a bookmark activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Add':
2021-03-20 14:09:07 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('target'):
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: no target in inbox bookmark Add')
2021-03-20 14:09:07 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2021-03-20 14:25:24 +00:00
return False
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['target'], str):
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: inbox bookmark Add target is not string')
2021-03-20 14:09:07 +00:00
return False
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2020-04-03 16:27:34 +00:00
nickname = handle.split('@')[0]
2021-12-26 10:00:46 +00:00
if not message_json['actor'].endswith(domain_full + '/users/' + nickname):
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: inbox bookmark Add unexpected actor')
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json['target'].endswith(message_json['actor'] +
'/tlbookmarks'):
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox bookmark Add target invalid ' +
2021-12-25 23:51:19 +00:00
message_json['target'])
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] != 'Document':
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox bookmark Add type is not Document')
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('url'):
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox bookmark Add missing url')
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']['url']:
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox bookmark Add missing statuses un url')
2021-03-20 14:09:07 +00:00
return False
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: c2s inbox bookmark Add request arrived in outbox')
2021-12-27 11:20:57 +00:00
messageUrl = remove_id_ending(message_json['object']['url'])
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain, messageUrl)
if not post_filename:
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: c2s inbox like post not found in inbox or outbox')
print(messageUrl)
2019-11-17 14:01:49 +00:00
return True
2021-12-29 21:55:09 +00:00
update_bookmarks_collection(recent_posts_cache, base_dir, post_filename,
message_json['object']['url'],
message_json['actor'], domain, debug)
2021-09-03 22:28:50 +00:00
# regenerate the html
2021-12-26 23:41:34 +00:00
bookmarkedPostJson = load_json(post_filename, 0, 1)
2021-09-03 22:28:50 +00:00
if bookmarkedPostJson:
if debug:
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain,
bookmarkedPostJson)
2021-09-03 22:28:50 +00:00
print('Bookmarked post json: ' + str(bookmarkedPostJson))
print('Bookmarked post nickname: ' + nickname + ' ' + domain)
print('Bookmarked post cache: ' + str(cachedPostFilename))
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-09-03 22:28:50 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, nickname, domain)
2021-12-26 20:12:18 +00:00
notDM = not is_dm(bookmarkedPostJson)
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
nickname, domain, port, bookmarkedPostJson,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2019-11-17 14:01:49 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_undo_bookmark(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [],
cached_webfingers: {},
person_cache: {}, message_json: {},
federation_list: [],
debug: bool, signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int, cw_lists: {},
lists_enabled: str) -> bool:
2019-11-17 14:01:49 +00:00
"""Receives an undo bookmark activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Remove':
2021-03-20 14:09:07 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('target'):
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: no target in inbox undo bookmark Remove')
2021-03-20 14:09:07 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['target'], str):
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 14:25:24 +00:00
print('DEBUG: inbox Remove bookmark target is not string')
2021-03-20 14:09:07 +00:00
return False
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2020-04-03 16:27:34 +00:00
nickname = handle.split('@')[0]
2021-12-26 10:00:46 +00:00
if not message_json['actor'].endswith(domain_full + '/users/' + nickname):
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: inbox undo bookmark Remove unexpected actor')
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json['target'].endswith(message_json['actor'] +
'/tlbookmarks'):
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox undo bookmark Remove target invalid ' +
2021-12-25 23:51:19 +00:00
message_json['target'])
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] != 'Document':
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox undo bookmark Remove type is not Document')
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('url'):
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox undo bookmark Remove missing url')
2021-03-20 14:09:07 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']['url']:
2021-03-20 09:49:43 +00:00
if debug:
print('DEBUG: inbox undo bookmark Remove missing statuses un url')
2021-03-20 14:09:07 +00:00
return False
2021-03-20 09:49:43 +00:00
if debug:
2021-03-20 14:09:07 +00:00
print('DEBUG: c2s inbox Remove bookmark ' +
2021-03-20 09:49:43 +00:00
'request arrived in outbox')
2021-12-27 11:20:57 +00:00
messageUrl = remove_id_ending(message_json['object']['url'])
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain, messageUrl)
if not post_filename:
2019-11-17 14:01:49 +00:00
if debug:
2021-03-20 09:49:43 +00:00
print('DEBUG: c2s inbox like post not found in inbox or outbox')
print(messageUrl)
2019-11-17 14:01:49 +00:00
return True
2021-03-20 09:49:43 +00:00
2021-12-29 21:55:09 +00:00
undo_bookmarks_collection_entry(recent_posts_cache, base_dir,
post_filename,
message_json['object']['url'],
message_json['actor'], domain, debug)
2021-09-03 22:28:50 +00:00
# regenerate the html
2021-12-26 23:41:34 +00:00
bookmarkedPostJson = load_json(post_filename, 0, 1)
2021-09-03 22:28:50 +00:00
if bookmarkedPostJson:
if debug:
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain,
bookmarkedPostJson)
2021-09-03 22:28:50 +00:00
print('Unbookmarked post json: ' + str(bookmarkedPostJson))
print('Unbookmarked post nickname: ' + nickname + ' ' + domain)
print('Unbookmarked post cache: ' + str(cachedPostFilename))
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-09-03 22:28:50 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, nickname, domain)
2021-12-26 20:12:18 +00:00
notDM = not is_dm(bookmarkedPostJson)
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
nickname, domain, port, bookmarkedPostJson,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists, lists_enabled)
2019-11-17 14:01:49 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_delete(session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool, allow_deletion: bool,
recent_posts_cache: {}) -> bool:
2019-07-11 21:38:28 +00:00
"""Receives a Delete activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Delete':
2019-07-11 21:38:28 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-11 21:38:28 +00:00
return False
2019-07-17 17:16:48 +00:00
if debug:
print('DEBUG: Delete activity arrived')
2021-12-26 17:12:07 +00:00
if not has_object_string(message_json, debug):
2019-07-11 21:38:28 +00:00
return False
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:00:46 +00:00
deletePrefix = http_prefix + '://' + domain_full + '/'
2021-12-25 21:29:53 +00:00
if (not allow_deletion and
2021-12-25 23:51:19 +00:00
(not message_json['object'].startswith(deletePrefix) or
not message_json['actor'].startswith(deletePrefix))):
2019-08-12 18:02:29 +00:00
if debug:
print('DEBUG: delete not permitted from other instances')
2020-03-22 21:16:02 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
2019-07-11 21:38:28 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
2019-07-11 21:38:28 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-11 21:38:28 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: ' +
'"users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-11 21:38:28 +00:00
return False
2021-12-25 23:51:19 +00:00
if '/statuses/' not in message_json['object']:
2019-07-11 21:38:28 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "statuses" missing from object in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-11 21:38:28 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['actor'] not in message_json['object']:
if debug:
2020-03-22 21:16:02 +00:00
print('DEBUG: actor is not the owner of the post to be deleted')
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2020-04-03 16:27:34 +00:00
print('DEBUG: unknown recipient of like - ' + handle)
2019-07-11 21:38:28 +00:00
# if this post in the outbox of the person?
2021-12-27 11:20:57 +00:00
messageId = remove_id_ending(message_json['object'])
2021-12-28 13:12:10 +00:00
remove_moderation_post_from_index(base_dir, messageId, debug)
handleNickname = handle.split('@')[0]
handleDomain = handle.split('@')[1]
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, handleNickname,
handleDomain, messageId)
if not post_filename:
2019-07-11 21:38:28 +00:00
if debug:
print('DEBUG: delete post not found in inbox or outbox')
2019-07-17 17:16:48 +00:00
print(messageId)
return True
2021-12-28 14:55:45 +00:00
delete_post(base_dir, http_prefix, handleNickname,
handleDomain, post_filename, debug,
recent_posts_cache)
2019-07-11 21:38:28 +00:00
if debug:
2021-12-26 23:41:34 +00:00
print('DEBUG: post deleted - ' + post_filename)
# also delete any local blogs saved to the news actor
2021-12-26 10:00:46 +00:00
if handleNickname != 'news' and handleDomain == domain_full:
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, 'news',
handleDomain, messageId)
if post_filename:
2021-12-28 14:55:45 +00:00
delete_post(base_dir, http_prefix, 'news',
handleDomain, post_filename, debug,
recent_posts_cache)
if debug:
2021-12-26 23:41:34 +00:00
print('DEBUG: blog post deleted - ' + post_filename)
2019-07-11 21:38:28 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_announce(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str,
domain: str, onion_domain: str, port: int,
send_threads: [], postLog: [], cached_webfingers: {},
person_cache: {}, message_json: {}, federation_list: [],
debug: bool, translate: {},
yt_replace_domain: str,
twitter_replacement_domain: str,
allow_local_network_access: bool,
theme_name: str, system_language: str,
signing_priv_key_pem: str,
max_recent_posts: int,
allow_deletion: bool,
peertube_instances: [],
max_like_count: int, cw_lists: {},
lists_enabled: str) -> bool:
2019-07-12 09:41:57 +00:00
"""Receives an announce activity within the POST section of HTTPServer
2019-07-11 19:31:02 +00:00
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Announce':
2019-07-11 19:31:02 +00:00
return False
if '@' not in handle:
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: bad handle ' + handle)
2020-03-22 21:16:02 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-11 19:31:02 +00:00
return False
2019-07-16 22:57:45 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: receiving announce on ' + handle)
2021-12-26 17:12:07 +00:00
if not has_object_string(message_json, debug):
2019-07-11 19:31:02 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
2019-07-11 19:31:02 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: ' + message_json['type'] + ' has no "to" list')
2019-07-11 19:31:02 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-11 19:31:02 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: ' +
'"users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-09-09 09:41:31 +00:00
return False
2021-12-29 21:55:09 +00:00
if is_self_announce(message_json):
if debug:
print('DEBUG: self-boost rejected')
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['object']):
2019-09-09 09:41:31 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: ' +
'"users", "channel" or "profile" missing in ' +
2021-12-25 23:51:19 +00:00
message_json['type'])
2019-07-11 19:31:02 +00:00
return False
2021-12-31 23:07:23 +00:00
blocked_cache = {}
2021-12-27 17:20:01 +00:00
prefixes = get_protocol_prefixes()
# is the domain of the announce actor blocked?
2021-12-25 23:51:19 +00:00
objectDomain = message_json['object']
2020-06-11 12:04:42 +00:00
for prefix in prefixes:
objectDomain = objectDomain.replace(prefix, '')
if '/' in objectDomain:
2020-04-03 16:27:34 +00:00
objectDomain = objectDomain.split('/')[0]
2021-12-28 21:55:38 +00:00
if is_blocked_domain(base_dir, objectDomain):
if debug:
print('DEBUG: announced domain is blocked')
return False
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2020-04-03 16:27:34 +00:00
print('DEBUG: unknown recipient of announce - ' + handle)
# is the announce actor blocked?
2020-04-03 16:27:34 +00:00
nickname = handle.split('@')[0]
2021-12-27 22:19:18 +00:00
actorNickname = get_nickname_from_actor(message_json['actor'])
2021-12-27 19:05:25 +00:00
actorDomain, actorPort = get_domain_from_actor(message_json['actor'])
2021-12-29 21:55:09 +00:00
if is_blocked(base_dir, nickname, domain, actorNickname, actorDomain):
print('Receive announce blocked for actor: ' +
actorNickname + '@' + actorDomain)
return False
# also check the actor for the url being announced
2021-12-27 22:19:18 +00:00
announcedActorNickname = get_nickname_from_actor(message_json['object'])
announcedActorDomain, announcedActorPort = \
2021-12-27 19:05:25 +00:00
get_domain_from_actor(message_json['object'])
2021-12-29 21:55:09 +00:00
if is_blocked(base_dir, nickname, domain,
announcedActorNickname, announcedActorDomain):
2021-08-23 12:31:37 +00:00
print('Receive announce object blocked for actor: ' +
announcedActorNickname + '@' + announcedActorDomain)
return False
# is this post in the outbox of the person?
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain,
message_json['object'])
if not post_filename:
2019-07-11 19:31:02 +00:00
if debug:
print('DEBUG: announce post not found in inbox or outbox')
2021-12-25 23:51:19 +00:00
print(message_json['object'])
2019-07-11 19:31:02 +00:00
return True
2021-12-26 23:41:34 +00:00
update_announce_collection(recent_posts_cache, base_dir, post_filename,
message_json['actor'], nickname, domain, debug)
2019-09-29 10:13:00 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('DEBUG: Downloading announce post ' + message_json['actor'] +
' -> ' + message_json['object'])
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-09-04 16:11:48 +00:00
2021-09-04 17:10:29 +00:00
# Generate html. This also downloads the announced post.
2021-09-04 16:11:48 +00:00
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
2021-09-04 16:11:48 +00:00
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, nickname, domain)
2021-09-04 16:11:48 +00:00
notDM = True
2021-09-04 18:05:52 +00:00
if debug:
2021-12-25 23:51:19 +00:00
print('Generating html for announce ' + message_json['id'])
2021-09-04 16:11:48 +00:00
announceHtml = \
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem, True,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
nickname, domain, port, message_json,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2021-09-04 16:11:48 +00:00
if not announceHtml:
print('WARN: Unable to generate html for announce ' +
2021-12-25 23:51:19 +00:00
str(message_json))
2021-09-04 16:11:48 +00:00
else:
2021-09-04 18:05:52 +00:00
if debug:
print('Generated announce html ' + announceHtml.replace('\n', ''))
2021-09-04 16:11:48 +00:00
2021-12-29 21:55:09 +00:00
post_json_object = download_announce(session, base_dir,
http_prefix,
nickname, domain,
message_json,
__version__, translate,
yt_replace_domain,
twitter_replacement_domain,
allow_local_network_access,
recent_posts_cache, debug,
system_language,
domain_full, person_cache,
signing_priv_key_pem,
2021-12-31 23:07:23 +00:00
blocked_cache)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2021-12-25 23:51:19 +00:00
print('WARN: unable to download announce: ' + str(message_json))
2021-02-28 18:21:12 +00:00
notInOnion = True
2021-12-25 20:43:43 +00:00
if onion_domain:
2021-12-25 23:51:19 +00:00
if onion_domain in message_json['object']:
2021-02-28 18:21:12 +00:00
notInOnion = False
2021-12-25 23:51:19 +00:00
if domain not in message_json['object'] and notInOnion:
2021-12-26 23:41:34 +00:00
if os.path.isfile(post_filename):
2020-12-21 21:40:29 +00:00
# if the announce can't be downloaded then remove it
try:
2021-12-26 23:41:34 +00:00
os.remove(post_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _receive_announce unable to delete ' +
2021-12-26 23:41:34 +00:00
str(post_filename))
2020-12-21 21:40:29 +00:00
else:
2019-10-01 13:23:22 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Announce post downloaded for ' +
2021-12-25 23:51:19 +00:00
message_json['actor'] + ' -> ' + message_json['object'])
2021-12-29 21:55:09 +00:00
store_hash_tags(base_dir, nickname, domain,
http_prefix, domain_full,
post_json_object, translate)
2019-09-30 19:13:14 +00:00
# Try to obtain the actor for this person
# so that their avatar can be shown
2020-04-03 16:27:34 +00:00
lookupActor = None
2021-12-25 22:09:19 +00:00
if post_json_object.get('attributedTo'):
if isinstance(post_json_object['attributedTo'], str):
lookupActor = post_json_object['attributedTo']
2019-10-01 14:11:15 +00:00
else:
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('attributedTo'):
attrib = post_json_object['object']['attributedTo']
if isinstance(attrib, str):
lookupActor = attrib
2019-09-30 19:13:14 +00:00
if lookupActor:
2021-12-26 12:19:00 +00:00
if has_users_path(lookupActor):
2019-10-01 13:23:22 +00:00
if '/statuses/' in lookupActor:
2020-04-03 16:27:34 +00:00
lookupActor = lookupActor.split('/statuses/')[0]
2019-10-01 12:35:39 +00:00
2021-12-26 20:43:03 +00:00
if is_recent_post(post_json_object, 3):
2021-12-26 23:41:34 +00:00
if not os.path.isfile(post_filename + '.tts'):
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-29 21:55:09 +00:00
update_speaker(base_dir, http_prefix,
nickname, domain, domain_full,
post_json_object, person_cache,
translate, lookupActor,
theme_name)
2021-11-25 21:18:53 +00:00
try:
2021-12-26 23:41:34 +00:00
with open(post_filename + '.tts', 'w+') as ttsFile:
2021-11-25 21:18:53 +00:00
ttsFile.write('\n')
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write recent post ' +
2021-12-26 23:41:34 +00:00
post_filename)
2021-03-01 22:27:36 +00:00
2019-10-01 12:50:06 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Obtaining actor for announce post ' +
lookupActor)
2019-10-01 13:23:22 +00:00
for tries in range(6):
2020-04-03 16:27:34 +00:00
pubKey = \
2021-12-29 21:55:09 +00:00
get_person_pub_key(base_dir, session, lookupActor,
person_cache, debug,
__version__, http_prefix,
domain, onion_domain,
signing_priv_key_pem)
2019-10-01 13:23:22 +00:00
if pubKey:
2021-03-14 20:15:44 +00:00
if debug:
print('DEBUG: public key obtained for announce: ' +
lookupActor)
2019-10-01 13:23:22 +00:00
break
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Retry ' + str(tries + 1) +
' obtaining actor for ' + lookupActor)
2020-03-22 21:16:02 +00:00
time.sleep(5)
2020-12-21 21:40:29 +00:00
if debug:
print('DEBUG: announced/repeated post arrived in inbox')
2019-07-11 19:31:02 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _receive_undo_announce(recent_posts_cache: {},
session, handle: str, isGroup: bool, base_dir: str,
http_prefix: str, domain: str, port: int,
send_threads: [], postLog: [],
cached_webfingers: {},
person_cache: {}, message_json: {},
federation_list: [],
debug: bool) -> bool:
2019-07-12 09:41:57 +00:00
"""Receives an undo announce activity within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if message_json['type'] != 'Undo':
2019-07-12 09:41:57 +00:00
return False
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2019-07-12 09:41:57 +00:00
return False
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
2019-07-12 09:41:57 +00:00
return False
2021-12-26 15:54:46 +00:00
if not has_object_string_object(message_json, debug):
2019-07-12 09:41:57 +00:00
return False
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] != 'Announce':
2020-03-22 21:16:02 +00:00
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
2019-07-12 09:41:57 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: "users" or "profile" missing from actor in ' +
2021-12-25 23:51:19 +00:00
message_json['type'] + ' announce')
2019-07-12 09:41:57 +00:00
return False
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handle):
2020-04-03 16:27:34 +00:00
print('DEBUG: unknown recipient of undo announce - ' + handle)
2019-07-12 09:41:57 +00:00
# if this post in the outbox of the person?
handleName = handle.split('@')[0]
handleDom = handle.split('@')[1]
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, handleName, handleDom,
message_json['object']['object'])
if not post_filename:
2019-07-12 09:41:57 +00:00
if debug:
print('DEBUG: undo announce post not found in inbox or outbox')
2021-12-25 23:51:19 +00:00
print(message_json['object']['object'])
2019-07-12 09:41:57 +00:00
return True
if debug:
print('DEBUG: announced/repeated post to be undone found in inbox')
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename)
2021-12-25 22:09:19 +00:00
if post_json_object:
if not post_json_object.get('type'):
if post_json_object['type'] != 'Announce':
if debug:
2020-04-03 16:27:34 +00:00
print("DEBUG: Attempt to undo something " +
"which isn't an announcement")
2020-03-22 21:16:02 +00:00
return False
2021-12-27 10:55:48 +00:00
undo_announce_collection_entry(recent_posts_cache, base_dir, post_filename,
message_json['actor'], domain, debug)
2021-12-26 23:41:34 +00:00
if os.path.isfile(post_filename):
try:
2021-12-26 23:41:34 +00:00
os.remove(post_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _receive_undo_announce unable to delete ' +
2021-12-26 23:41:34 +00:00
str(post_filename))
2019-07-12 09:41:57 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def json_post_allows_comments(post_json_object: {}) -> bool:
2020-08-21 18:32:16 +00:00
"""Returns true if the given post allows comments/replies
"""
2021-12-25 22:09:19 +00:00
if 'commentsEnabled' in post_json_object:
return post_json_object['commentsEnabled']
if 'rejectReplies' in post_json_object:
return not post_json_object['rejectReplies']
if post_json_object.get('object'):
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2020-08-21 18:32:16 +00:00
return False
2021-12-25 22:09:19 +00:00
elif 'commentsEnabled' in post_json_object['object']:
return post_json_object['object']['commentsEnabled']
elif 'rejectReplies' in post_json_object['object']:
return not post_json_object['object']['rejectReplies']
2020-08-21 18:32:16 +00:00
return True
2021-12-29 21:55:09 +00:00
def _post_allow_comments(post_filename: str) -> bool:
2020-08-21 18:32:16 +00:00
"""Returns true if the given post allows comments/replies
"""
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2020-08-21 18:32:16 +00:00
return False
2021-12-29 21:55:09 +00:00
return json_post_allows_comments(post_json_object)
2020-08-21 18:32:16 +00:00
2021-12-28 20:32:11 +00:00
def populate_replies(base_dir: str, http_prefix: str, domain: str,
message_json: {}, max_replies: int, debug: bool) -> bool:
2020-03-22 21:16:02 +00:00
"""Updates the list of replies for a post on this domain if
a reply to it arrives
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('id'):
return False
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
return False
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('inReplyTo'):
return False
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('to'):
return False
2021-12-25 23:51:19 +00:00
replyTo = message_json['object']['inReplyTo']
2020-08-28 14:45:07 +00:00
if not isinstance(replyTo, str):
return False
if debug:
print('DEBUG: post contains a reply')
# is this a reply to a post on this domain?
2021-12-25 17:09:22 +00:00
if not replyTo.startswith(http_prefix + '://' + domain + '/'):
if debug:
print('DEBUG: post is a reply to another not on this domain')
2019-08-02 18:04:31 +00:00
print(replyTo)
2021-12-25 17:09:22 +00:00
print('Expected: ' + http_prefix + '://' + domain + '/')
return False
2021-12-27 22:19:18 +00:00
replyToNickname = get_nickname_from_actor(replyTo)
if not replyToNickname:
2020-04-03 16:27:34 +00:00
print('DEBUG: no nickname found for ' + replyTo)
return False
2021-12-27 19:05:25 +00:00
replyToDomain, replyToPort = get_domain_from_actor(replyTo)
if not replyToDomain:
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: no domain found for ' + replyTo)
return False
2021-03-23 23:33:33 +00:00
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, replyToNickname,
replyToDomain, replyTo)
if not post_filename:
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: post may have expired - ' + replyTo)
2020-03-22 21:16:02 +00:00
return False
2021-03-23 22:52:00 +00:00
2021-12-29 21:55:09 +00:00
if not _post_allow_comments(post_filename):
2020-08-21 18:32:16 +00:00
if debug:
print('DEBUG: post does not allow comments - ' + replyTo)
return False
2019-07-13 19:28:14 +00:00
# populate a text file containing the ids of replies
2021-12-26 23:41:34 +00:00
postRepliesFilename = post_filename.replace('.json', '.replies')
2021-12-27 11:20:57 +00:00
messageId = remove_id_ending(message_json['id'])
2019-07-13 19:28:14 +00:00
if os.path.isfile(postRepliesFilename):
2020-04-03 16:27:34 +00:00
numLines = sum(1 for line in open(postRepliesFilename))
2021-12-25 21:11:35 +00:00
if numLines > max_replies:
2019-07-13 21:00:12 +00:00
return False
2019-07-13 19:28:14 +00:00
if messageId not in open(postRepliesFilename).read():
2021-11-25 21:18:53 +00:00
try:
with open(postRepliesFilename, 'a+') as repliesFile:
repliesFile.write(messageId + '\n')
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to append ' + postRepliesFilename)
2019-07-13 19:28:14 +00:00
else:
2021-11-25 21:18:53 +00:00
try:
with open(postRepliesFilename, 'w+') as repliesFile:
repliesFile.write(messageId + '\n')
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + postRepliesFilename)
2019-07-13 19:28:14 +00:00
return True
2019-09-30 09:43:46 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _estimate_number_of_mentions(content: str) -> int:
2019-09-30 10:15:20 +00:00
"""Returns a rough estimate of the number of mentions
"""
2020-04-03 16:27:34 +00:00
return int(content.count('@') / 2)
2019-11-16 14:49:21 +00:00
2021-12-29 21:55:09 +00:00
def _estimate_number_of_emoji(content: str) -> int:
2019-11-16 14:49:21 +00:00
"""Returns a rough estimate of the number of emoji
"""
2020-04-03 16:27:34 +00:00
return int(content.count(':') / 2)
2019-11-16 14:49:21 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _valid_post_content(base_dir: str, nickname: str, domain: str,
message_json: {}, max_mentions: int, max_emoji: int,
allow_local_network_access: bool, debug: bool,
system_language: str,
http_prefix: str, domain_full: str,
person_cache: {}) -> bool:
2019-09-30 09:43:46 +00:00
"""Is the content of a received post valid?
2019-09-30 10:15:20 +00:00
Check for bad html
Check for hellthreads
2021-12-14 10:11:10 +00:00
Check that the language is understood
Check if it's a git patch
Check number of tags and mentions is reasonable
2019-09-30 09:43:46 +00:00
"""
2021-12-26 10:57:03 +00:00
if not has_object_dict(message_json):
2019-09-30 09:43:46 +00:00
return True
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('content'):
2019-09-30 09:43:46 +00:00
return True
2019-11-29 22:45:56 +00:00
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('published'):
2019-11-29 22:45:56 +00:00
return False
2021-12-25 23:51:19 +00:00
if 'T' not in message_json['object']['published']:
2019-11-29 22:45:56 +00:00
return False
2021-12-25 23:51:19 +00:00
if 'Z' not in message_json['object']['published']:
2019-11-29 22:45:56 +00:00
return False
2021-12-26 12:31:47 +00:00
if not valid_post_date(message_json['object']['published'], 90, debug):
return False
2020-07-10 14:15:01 +00:00
2021-08-19 22:16:32 +00:00
summary = None
2021-12-25 23:51:19 +00:00
if message_json['object'].get('summary'):
summary = message_json['object']['summary']
2020-08-25 19:45:15 +00:00
if not isinstance(summary, str):
print('WARN: content warning is not a string')
return False
2021-12-29 21:55:09 +00:00
if summary != valid_content_warning(summary):
2020-08-25 19:45:15 +00:00
print('WARN: invalid content warning ' + summary)
return False
# check for patches before dangeousMarkup, which excludes code
2021-12-29 21:55:09 +00:00
if is_git_patch(base_dir, nickname, domain,
message_json['object']['type'],
summary,
message_json['object']['content']):
2020-05-02 11:08:38 +00:00
return True
2020-07-10 14:15:01 +00:00
2021-12-26 11:29:40 +00:00
contentStr = get_base_content_from_post(message_json, system_language)
2021-12-27 21:42:08 +00:00
if dangerous_markup(contentStr, allow_local_network_access):
2021-12-25 23:51:19 +00:00
if message_json['object'].get('id'):
print('REJECT ARBITRARY HTML: ' + message_json['object']['id'])
2020-07-10 14:15:01 +00:00
print('REJECT ARBITRARY HTML: bad string in post - ' +
contentStr)
2020-07-10 14:15:01 +00:00
return False
# check (rough) number of mentions
2021-12-29 21:55:09 +00:00
mentionsEst = _estimate_number_of_mentions(contentStr)
2021-12-25 21:02:44 +00:00
if mentionsEst > max_mentions:
2021-12-25 23:51:19 +00:00
if message_json['object'].get('id'):
print('REJECT HELLTHREAD: ' + message_json['object']['id'])
2020-04-03 16:27:34 +00:00
print('REJECT HELLTHREAD: Too many mentions in post - ' +
contentStr)
2019-11-16 14:49:21 +00:00
return False
2021-12-29 21:55:09 +00:00
if _estimate_number_of_emoji(contentStr) > max_emoji:
2021-12-25 23:51:19 +00:00
if message_json['object'].get('id'):
print('REJECT EMOJI OVERLOAD: ' + message_json['object']['id'])
2020-04-03 16:27:34 +00:00
print('REJECT EMOJI OVERLOAD: Too many emoji in post - ' +
contentStr)
2019-09-30 10:15:20 +00:00
return False
# check number of tags
2021-12-25 23:51:19 +00:00
if message_json['object'].get('tag'):
if not isinstance(message_json['object']['tag'], list):
message_json['object']['tag'] = []
else:
2021-12-25 23:51:19 +00:00
if len(message_json['object']['tag']) > int(max_mentions * 2):
if message_json['object'].get('id'):
print('REJECT: ' + message_json['object']['id'])
2020-04-03 16:27:34 +00:00
print('REJECT: Too many tags in post - ' +
2021-12-25 23:51:19 +00:00
message_json['object']['tag'])
return False
# check that the post is in a language suitable for this account
2021-12-29 21:55:09 +00:00
if not understood_post_language(base_dir, nickname, domain,
message_json, system_language,
http_prefix, domain_full,
person_cache):
return False
2020-02-05 17:29:38 +00:00
# check for filtered content
2021-12-29 21:55:09 +00:00
if is_filtered(base_dir, nickname, domain, contentStr):
2020-02-05 17:29:38 +00:00
print('REJECT: content filtered')
return False
2021-12-25 23:51:19 +00:00
if message_json['object'].get('inReplyTo'):
if isinstance(message_json['object']['inReplyTo'], str):
originalPostId = message_json['object']['inReplyTo']
2021-12-26 20:36:08 +00:00
postPostFilename = locate_post(base_dir, nickname, domain,
originalPostId)
2020-08-21 18:32:16 +00:00
if postPostFilename:
2021-12-29 21:55:09 +00:00
if not _post_allow_comments(postPostFilename):
2020-08-21 18:32:16 +00:00
print('REJECT: reply to post which does not ' +
'allow comments: ' + originalPostId)
return False
2021-12-26 19:15:36 +00:00
if invalid_ciphertext(message_json['object']['content']):
2021-11-22 11:59:41 +00:00
print('REJECT: malformed ciphertext in content')
return False
2021-03-14 20:15:44 +00:00
if debug:
print('ACCEPT: post content is valid')
2019-09-30 09:43:46 +00:00
return True
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _obtain_avatar_for_reply_post(session, base_dir: str, http_prefix: str,
domain: str, onion_domain: str,
person_cache: {},
post_json_object: {}, debug: bool,
signing_priv_key_pem: str) -> None:
"""Tries to obtain the actor for the person being replied to
so that their avatar can later be shown
"""
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2019-09-30 19:39:48 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('inReplyTo'):
2019-09-30 19:39:48 +00:00
return
2021-12-25 22:09:19 +00:00
lookupActor = post_json_object['object']['inReplyTo']
2019-10-21 12:49:16 +00:00
if not lookupActor:
return
2020-08-28 14:45:07 +00:00
if not isinstance(lookupActor, str):
return
2021-12-26 12:19:00 +00:00
if not has_users_path(lookupActor):
2019-10-21 12:49:16 +00:00
return
if '/statuses/' in lookupActor:
2020-04-03 16:27:34 +00:00
lookupActor = lookupActor.split('/statuses/')[0]
2020-03-22 21:16:02 +00:00
2019-10-21 12:49:16 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Obtaining actor for reply post ' + lookupActor)
2019-10-01 13:23:22 +00:00
2019-10-21 12:49:16 +00:00
for tries in range(6):
2020-04-03 16:27:34 +00:00
pubKey = \
2021-12-29 21:55:09 +00:00
get_person_pub_key(base_dir, session, lookupActor,
person_cache, debug,
__version__, http_prefix,
domain, onion_domain, signing_priv_key_pem)
2019-10-21 12:49:16 +00:00
if pubKey:
2021-03-14 20:15:44 +00:00
if debug:
print('DEBUG: public key obtained for reply: ' + lookupActor)
2019-10-21 12:49:16 +00:00
break
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Retry ' + str(tries + 1) +
' obtaining actor for ' + lookupActor)
2020-03-22 21:16:02 +00:00
time.sleep(5)
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _dm_notify(base_dir: str, handle: str, url: str) -> None:
"""Creates a notification that a new DM has arrived
"""
2021-12-25 16:17:53 +00:00
accountDir = base_dir + '/accounts/' + handle
if not os.path.isdir(accountDir):
return
2020-04-03 16:27:34 +00:00
dmFile = accountDir + '/.newDM'
if not os.path.isfile(dmFile):
2021-11-25 21:18:53 +00:00
try:
with open(dmFile, 'w+') as fp:
fp.write(url)
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + dmFile)
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _already_liked(base_dir: str, nickname: str, domain: str,
postUrl: str, likerActor: str) -> bool:
2020-07-13 13:36:45 +00:00
"""Is the given post already liked by the given handle?
"""
2021-12-26 23:41:34 +00:00
post_filename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, nickname, domain, postUrl)
2021-12-26 23:41:34 +00:00
if not post_filename:
2020-07-13 13:36:45 +00:00
return False
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename, 1)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2020-07-13 13:36:45 +00:00
return False
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2020-07-13 13:36:45 +00:00
return False
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('likes'):
2020-07-13 13:36:45 +00:00
return False
2021-12-25 22:09:19 +00:00
if not post_json_object['object']['likes'].get('items'):
2020-07-13 13:36:45 +00:00
return False
2021-12-25 22:09:19 +00:00
for like in post_json_object['object']['likes']['items']:
2020-07-13 13:36:45 +00:00
if not like.get('type'):
continue
if not like.get('actor'):
continue
if like['type'] != 'Like':
continue
if like['actor'] == likerActor:
return True
return False
2021-12-29 21:55:09 +00:00
def _already_reacted(base_dir: str, nickname: str, domain: str,
postUrl: str, reactionActor: str,
emojiContent: str) -> bool:
2021-11-10 12:16:03 +00:00
"""Is the given post already emoji reacted by the given handle?
"""
2021-12-26 23:41:34 +00:00
post_filename = \
2021-12-26 20:36:08 +00:00
locate_post(base_dir, nickname, domain, postUrl)
2021-12-26 23:41:34 +00:00
if not post_filename:
2021-11-10 12:16:03 +00:00
return False
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename, 1)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2021-11-10 12:16:03 +00:00
return False
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2021-11-10 12:16:03 +00:00
return False
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('reactions'):
2021-11-10 12:16:03 +00:00
return False
2021-12-25 22:09:19 +00:00
if not post_json_object['object']['reactions'].get('items'):
2021-11-10 12:16:03 +00:00
return False
2021-12-25 22:09:19 +00:00
for react in post_json_object['object']['reactions']['items']:
2021-11-10 12:16:03 +00:00
if not react.get('type'):
continue
if not react.get('content'):
continue
if not react.get('actor'):
continue
if react['type'] != 'EmojiReact':
continue
if react['content'] != emojiContent:
continue
if react['actor'] == reactionActor:
return True
return False
2021-12-29 21:55:09 +00:00
def _like_notify(base_dir: str, domain: str, onion_domain: str,
handle: str, actor: str, url: str) -> None:
2020-07-08 19:49:15 +00:00
"""Creates a notification that a like has arrived
"""
# This is not you liking your own post
if actor in url:
return
# check that the liked post was by this handle
nickname = handle.split('@')[0]
if '/' + domain + '/users/' + nickname not in url:
2021-12-25 20:43:43 +00:00
if not onion_domain:
return
2021-12-25 20:43:43 +00:00
if '/' + onion_domain + '/users/' + nickname not in url:
return
2021-12-25 16:17:53 +00:00
accountDir = base_dir + '/accounts/' + handle
2020-08-27 09:35:26 +00:00
# are like notifications enabled?
2020-08-27 09:19:32 +00:00
notifyLikesEnabledFilename = accountDir + '/.notifyLikes'
if not os.path.isfile(notifyLikesEnabledFilename):
2020-07-08 19:49:15 +00:00
return
2020-08-27 09:19:32 +00:00
2020-07-08 19:49:15 +00:00
likeFile = accountDir + '/.newLike'
2020-07-08 21:41:48 +00:00
if os.path.isfile(likeFile):
2020-07-08 21:52:18 +00:00
if '##sent##' not in open(likeFile).read():
return
2020-07-08 19:49:15 +00:00
2021-12-27 22:19:18 +00:00
likerNickname = get_nickname_from_actor(actor)
2021-12-27 19:05:25 +00:00
likerDomain, likerPort = get_domain_from_actor(actor)
2020-07-08 21:04:19 +00:00
if likerNickname and likerDomain:
likerHandle = likerNickname + '@' + likerDomain
else:
2021-12-29 21:55:09 +00:00
print('_like_notify likerHandle: ' +
2020-07-08 21:18:50 +00:00
str(likerNickname) + '@' + str(likerDomain))
2020-07-08 21:04:19 +00:00
likerHandle = actor
2020-07-08 19:49:15 +00:00
if likerHandle != handle:
2020-07-13 19:42:30 +00:00
likeStr = likerHandle + ' ' + url + '?likedBy=' + actor
2020-07-08 22:17:21 +00:00
prevLikeFile = accountDir + '/.prevLike'
# was there a previous like notification?
if os.path.isfile(prevLikeFile):
# is it the same as the current notification ?
with open(prevLikeFile, 'r') as fp:
prevLikeStr = fp.read()
if prevLikeStr == likeStr:
return
try:
with open(prevLikeFile, 'w+') as fp:
fp.write(likeStr)
2021-11-25 21:18:53 +00:00
except OSError:
2021-10-29 18:48:15 +00:00
print('EX: ERROR: unable to save previous like notification ' +
prevLikeFile)
2021-11-25 21:18:53 +00:00
try:
with open(likeFile, 'w+') as fp:
fp.write(likeStr)
2021-11-25 21:18:53 +00:00
except OSError:
2021-10-29 18:48:15 +00:00
print('EX: ERROR: unable to write like notification file ' +
likeFile)
2020-07-08 19:49:15 +00:00
2021-12-29 21:55:09 +00:00
def _reaction_notify(base_dir: str, domain: str, onion_domain: str,
handle: str, actor: str,
url: str, emojiContent: str) -> None:
2021-11-10 12:16:03 +00:00
"""Creates a notification that an emoji reaction has arrived
"""
# This is not you reacting to your own post
if actor in url:
return
# check that the reaction post was by this handle
nickname = handle.split('@')[0]
if '/' + domain + '/users/' + nickname not in url:
2021-12-25 20:43:43 +00:00
if not onion_domain:
2021-11-10 12:16:03 +00:00
return
2021-12-25 20:43:43 +00:00
if '/' + onion_domain + '/users/' + nickname not in url:
2021-11-10 12:16:03 +00:00
return
2021-12-25 16:17:53 +00:00
accountDir = base_dir + '/accounts/' + handle
2021-11-10 12:16:03 +00:00
# are reaction notifications enabled?
notifyReactionEnabledFilename = accountDir + '/.notifyReactions'
2021-11-10 12:16:03 +00:00
if not os.path.isfile(notifyReactionEnabledFilename):
return
reactionFile = accountDir + '/.newReaction'
if os.path.isfile(reactionFile):
if '##sent##' not in open(reactionFile).read():
return
2021-12-27 22:19:18 +00:00
reactionNickname = get_nickname_from_actor(actor)
2021-12-27 19:05:25 +00:00
reactionDomain, reactionPort = get_domain_from_actor(actor)
2021-11-10 12:16:03 +00:00
if reactionNickname and reactionDomain:
reactionHandle = reactionNickname + '@' + reactionDomain
else:
2021-12-29 21:55:09 +00:00
print('_reaction_notify reactionHandle: ' +
2021-11-10 12:16:03 +00:00
str(reactionNickname) + '@' + str(reactionDomain))
reactionHandle = actor
if reactionHandle != handle:
reactionStr = \
reactionHandle + ' ' + url + '?reactBy=' + actor + \
';emoj=' + emojiContent
2021-11-10 12:16:03 +00:00
prevReactionFile = accountDir + '/.prevReaction'
# was there a previous reaction notification?
if os.path.isfile(prevReactionFile):
# is it the same as the current notification ?
with open(prevReactionFile, 'r') as fp:
prevReactionStr = fp.read()
if prevReactionStr == reactionStr:
return
try:
with open(prevReactionFile, 'w+') as fp:
fp.write(reactionStr)
2021-11-25 21:18:53 +00:00
except OSError:
2021-11-10 12:16:03 +00:00
print('EX: ERROR: unable to save previous reaction notification ' +
prevReactionFile)
2021-11-25 21:18:53 +00:00
2021-11-10 12:16:03 +00:00
try:
with open(reactionFile, 'w+') as fp:
fp.write(reactionStr)
2021-11-25 21:18:53 +00:00
except OSError:
2021-11-10 12:16:03 +00:00
print('EX: ERROR: unable to write reaction notification file ' +
reactionFile)
2021-12-29 21:55:09 +00:00
def _notify_post_arrival(base_dir: str, handle: str, url: str) -> None:
2021-07-07 09:32:48 +00:00
"""Creates a notification that a new post has arrived.
This is for followed accounts with the notify checkbox enabled
on the person options screen
"""
2021-12-25 16:17:53 +00:00
accountDir = base_dir + '/accounts/' + handle
if not os.path.isdir(accountDir):
return
notifyFile = accountDir + '/.newNotifiedPost'
if os.path.isfile(notifyFile):
# check that the same notification is not repeatedly sent
with open(notifyFile, 'r') as fp:
existingNotificationMessage = fp.read()
if url in existingNotificationMessage:
return
2021-11-25 21:18:53 +00:00
try:
with open(notifyFile, 'w+') as fp:
fp.write(url)
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + notifyFile)
2021-12-29 21:55:09 +00:00
def _reply_notify(base_dir: str, handle: str, url: str) -> None:
"""Creates a notification that a new reply has arrived
"""
2021-12-25 16:17:53 +00:00
accountDir = base_dir + '/accounts/' + handle
if not os.path.isdir(accountDir):
return
2020-04-03 16:27:34 +00:00
replyFile = accountDir + '/.newReply'
if not os.path.isfile(replyFile):
2021-11-25 21:18:53 +00:00
try:
with open(replyFile, 'w+') as fp:
fp.write(url)
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + replyFile)
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _git_patch_notify(base_dir: str, handle: str,
subject: str, content: str,
fromNickname: str, fromDomain: str) -> None:
"""Creates a notification that a new git patch has arrived
"""
2021-12-25 16:17:53 +00:00
accountDir = base_dir + '/accounts/' + handle
if not os.path.isdir(accountDir):
return
patchFile = accountDir + '/.newPatch'
subject = subject.replace('[PATCH]', '').strip()
handle = '@' + fromNickname + '@' + fromDomain
2021-11-25 21:18:53 +00:00
try:
with open(patchFile, 'w+') as fp:
fp.write('git ' + handle + ' ' + subject)
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + patchFile)
2020-05-02 17:16:24 +00:00
2021-12-29 21:55:09 +00:00
def _group_handle(base_dir: str, handle: str) -> bool:
2019-10-04 12:22:56 +00:00
"""Is the given account handle a group?
"""
2021-12-25 16:17:53 +00:00
actorFile = base_dir + '/accounts/' + handle + '.json'
2019-10-04 12:22:56 +00:00
if not os.path.isfile(actorFile):
return False
2021-12-26 15:13:34 +00:00
actor_json = load_json(actorFile)
2021-12-26 10:29:52 +00:00
if not actor_json:
2019-10-04 12:22:56 +00:00
return False
2021-12-26 10:29:52 +00:00
return actor_json['type'] == 'Group'
2020-04-03 16:27:34 +00:00
2019-10-04 12:22:56 +00:00
2021-12-29 21:55:09 +00:00
def _send_to_group_members(session, base_dir: str, handle: str, port: int,
post_json_object: {},
http_prefix: str, federation_list: [],
send_threads: [], postLog: [],
cached_webfingers: {},
person_cache: {}, debug: bool,
system_language: str,
onion_domain: str, i2p_domain: str,
signing_priv_key_pem: str) -> None:
2019-10-04 12:22:56 +00:00
"""When a post arrives for a group send it out to the group members
"""
2021-08-01 13:25:11 +00:00
if debug:
print('\n\n=========================================================')
print(handle + ' sending to group members')
2022-01-01 15:11:42 +00:00
shared_item_federation_tokens = {}
2021-12-25 18:05:01 +00:00
shared_items_federated_domains = []
shared_items_federated_domainsStr = \
2021-12-26 14:08:58 +00:00
get_config_param(base_dir, 'shared_items_federated_domains')
2021-12-25 18:05:01 +00:00
if shared_items_federated_domainsStr:
siFederatedDomainsList = \
2021-12-25 18:05:01 +00:00
shared_items_federated_domainsStr.split(',')
for sharedFederatedDomain in siFederatedDomainsList:
domainStr = sharedFederatedDomain.strip()
2021-12-25 18:05:01 +00:00
shared_items_federated_domains.append(domainStr)
2021-12-25 16:17:53 +00:00
followersFile = base_dir + '/accounts/' + handle + '/followers.txt'
2019-10-04 12:22:56 +00:00
if not os.path.isfile(followersFile):
return
2021-12-25 22:09:19 +00:00
if not post_json_object.get('to'):
2021-08-02 14:19:09 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object.get('object'):
2019-10-04 13:31:30 +00:00
return
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2021-07-30 19:20:49 +00:00
return
2021-08-02 14:19:09 +00:00
nickname = handle.split('@')[0].replace('!', '')
2020-04-03 16:27:34 +00:00
domain = handle.split('@')[1]
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:19:59 +00:00
groupActor = local_actor_url(http_prefix, nickname, domain_full)
2021-12-25 22:09:19 +00:00
if groupActor not in post_json_object['to']:
2021-08-02 14:19:09 +00:00
return
2020-04-03 16:27:34 +00:00
cc = ''
2021-08-02 14:19:09 +00:00
nickname = handle.split('@')[0].replace('!', '')
# save to the group outbox so that replies will be to the group
# rather than the original sender
2021-12-28 18:13:52 +00:00
save_post_to_box(base_dir, http_prefix, None,
nickname, domain, post_json_object, 'outbox')
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['object']['id'])
2021-08-02 14:19:09 +00:00
if debug:
2021-12-26 19:47:06 +00:00
print('Group announce: ' + post_id)
2021-08-02 14:19:09 +00:00
announceJson = \
2021-12-29 21:55:09 +00:00
create_announce(session, base_dir, federation_list,
nickname, domain, port,
groupActor + '/followers', cc,
http_prefix, post_id, False, False,
send_threads, postLog,
person_cache, cached_webfingers,
debug, __version__, signing_priv_key_pem)
2021-08-02 14:19:09 +00:00
2021-12-29 21:55:09 +00:00
send_to_followers_thread(session, base_dir, nickname, domain,
onion_domain, i2p_domain, port,
http_prefix, federation_list,
send_threads, postLog,
cached_webfingers, person_cache,
announceJson, debug, __version__,
shared_items_federated_domains,
2022-01-01 15:11:42 +00:00
shared_item_federation_tokens,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem)
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _inbox_update_calendar(base_dir: str, handle: str,
post_json_object: {}) -> None:
2019-10-11 12:31:06 +00:00
"""Detects whether the tag list on a post contains calendar events
and if so saves the post id to a file in the calendar directory
for the account
"""
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
return
2021-12-26 10:57:03 +00:00
if not has_object_dict(post_json_object):
2019-10-11 12:31:06 +00:00
return
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('tag'):
2019-10-11 12:31:06 +00:00
return
2021-12-25 22:09:19 +00:00
if not isinstance(post_json_object['object']['tag'], list):
2019-10-11 12:31:06 +00:00
return
2021-12-25 22:09:19 +00:00
actor = post_json_object['actor']
2021-12-27 22:19:18 +00:00
actorNickname = get_nickname_from_actor(actor)
2021-12-27 19:05:25 +00:00
actorDomain, actorPort = get_domain_from_actor(actor)
2020-07-11 21:01:08 +00:00
handleNickname = handle.split('@')[0]
handleDomain = handle.split('@')[1]
2021-12-29 21:55:09 +00:00
if not receiving_calendar_events(base_dir,
handleNickname, handleDomain,
actorNickname, actorDomain):
return
2020-08-13 09:37:11 +00:00
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(post_json_object['id']).replace('/', '#')
2020-08-13 11:58:05 +00:00
2020-08-13 09:37:11 +00:00
# look for events within the tags list
2021-12-25 22:09:19 +00:00
for tagDict in post_json_object['object']['tag']:
2020-07-11 22:36:52 +00:00
if not tagDict.get('type'):
continue
2020-04-03 16:27:34 +00:00
if tagDict['type'] != 'Event':
2019-10-11 12:31:06 +00:00
continue
2019-10-11 16:16:56 +00:00
if not tagDict.get('startTime'):
2019-10-11 12:31:06 +00:00
continue
2021-12-29 21:55:09 +00:00
save_event_post(base_dir, handle, post_id, tagDict)
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def inbox_update_index(boxname: str, base_dir: str, handle: str,
destinationFilename: str, debug: bool) -> bool:
2019-10-20 10:25:38 +00:00
"""Updates the index of received posts
The new entry is added to the top of the file
"""
2021-12-25 16:17:53 +00:00
indexFilename = base_dir + '/accounts/' + handle + '/' + boxname + '.index'
2019-10-20 10:40:09 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Updating index ' + indexFilename)
2019-11-18 13:16:21 +00:00
2020-04-03 16:27:34 +00:00
if '/' + boxname + '/' in destinationFilename:
destinationFilename = destinationFilename.split('/' + boxname + '/')[1]
2019-11-18 13:16:21 +00:00
# remove the path
if '/' in destinationFilename:
2020-04-03 16:27:34 +00:00
destinationFilename = destinationFilename.split('/')[-1]
2019-11-18 13:16:21 +00:00
written = False
2019-10-20 10:45:12 +00:00
if os.path.isfile(indexFilename):
2019-10-20 12:43:59 +00:00
try:
with open(indexFilename, 'r+') as indexFile:
2020-04-03 16:27:34 +00:00
content = indexFile.read()
2020-12-29 17:56:42 +00:00
if destinationFilename + '\n' not in content:
indexFile.seek(0, 0)
indexFile.write(destinationFilename + '\n' + content)
written = True
2019-10-20 12:43:59 +00:00
return True
2021-12-25 15:28:52 +00:00
except OSError as ex:
print('EX: Failed to write entry to index ' + str(ex))
2019-10-20 10:45:12 +00:00
else:
try:
2021-06-22 12:27:10 +00:00
with open(indexFilename, 'w+') as indexFile:
indexFile.write(destinationFilename + '\n')
written = True
2021-12-25 15:28:52 +00:00
except OSError as ex:
print('EX: Failed to write initial entry to index ' + str(ex))
2019-10-20 10:45:12 +00:00
return written
2019-10-20 10:25:38 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _update_last_seen(base_dir: str, handle: str, actor: str) -> None:
"""Updates the time when the given handle last saw the given actor
2020-12-13 11:28:23 +00:00
This can later be used to indicate if accounts are dormant/abandoned/moved
"""
if '@' not in handle:
return
nickname = handle.split('@')[0]
domain = handle.split('@')[1]
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-26 12:02:29 +00:00
accountPath = acct_dir(base_dir, nickname, domain)
if not os.path.isdir(accountPath):
return
2021-12-28 20:32:11 +00:00
if not is_following_actor(base_dir, nickname, domain, actor):
return
lastSeenPath = accountPath + '/lastseen'
if not os.path.isdir(lastSeenPath):
os.mkdir(lastSeenPath)
lastSeenFilename = lastSeenPath + '/' + actor.replace('/', '#') + '.txt'
2021-12-26 13:17:46 +00:00
curr_time = datetime.datetime.utcnow()
daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days
# has the value changed?
if os.path.isfile(lastSeenFilename):
with open(lastSeenFilename, 'r') as lastSeenFile:
daysSinceEpochFile = lastSeenFile.read()
if int(daysSinceEpochFile) == daysSinceEpoch:
# value hasn't changed, so we can save writing anything to file
return
2021-11-25 21:18:53 +00:00
try:
with open(lastSeenFilename, 'w+') as lastSeenFile:
lastSeenFile.write(str(daysSinceEpoch))
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + lastSeenFilename)
2021-12-29 21:55:09 +00:00
def _bounce_dm(senderPostId: str, session, http_prefix: str,
base_dir: str, nickname: str, domain: str, port: int,
sendingHandle: str, federation_list: [],
send_threads: [], postLog: [],
cached_webfingers: {}, person_cache: {},
translate: {}, debug: bool,
lastBounceMessage: [], system_language: str,
signing_priv_key_pem: str,
content_license_url: str) -> bool:
2021-02-24 11:01:44 +00:00
"""Sends a bounce message back to the sending handle
if a DM has been rejected
"""
print(nickname + '@' + domain +
' cannot receive DM from ' + sendingHandle +
' because they do not follow them')
# Don't send out bounce messages too frequently.
# Otherwise an adversary could try to DoS your instance
# by continuously sending DMs to you
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
if curr_time - lastBounceMessage[0] < 60:
return False
# record the last time that a bounce was generated
2021-12-26 13:17:46 +00:00
lastBounceMessage[0] = curr_time
2021-02-24 11:01:44 +00:00
senderNickname = sendingHandle.split('@')[0]
2021-12-26 00:07:44 +00:00
group_account = False
if sendingHandle.startswith('!'):
sendingHandle = sendingHandle[1:]
2021-12-26 00:07:44 +00:00
group_account = True
2021-02-24 11:01:44 +00:00
senderDomain = sendingHandle.split('@')[1]
senderPort = port
if ':' in senderDomain:
2021-12-26 18:14:21 +00:00
senderPort = get_port_from_domain(senderDomain)
2021-12-26 18:17:37 +00:00
senderDomain = remove_domain_port(senderDomain)
2021-02-24 11:01:44 +00:00
cc = []
# create the bounce DM
subject = None
content = translate['DM bounce']
followersOnly = False
saveToFile = False
2021-12-25 20:39:35 +00:00
client_to_server = False
2021-02-24 11:01:44 +00:00
commentsEnabled = False
attachImageFilename = None
mediaType = None
imageDescription = ''
2021-05-09 19:29:53 +00:00
city = 'London, England'
2021-12-27 11:20:57 +00:00
inReplyTo = remove_id_ending(senderPostId)
2021-02-24 11:01:44 +00:00
inReplyToAtomUri = None
schedulePost = False
eventDate = None
eventTime = None
location = None
2021-08-08 16:52:32 +00:00
conversationId = None
2021-12-25 18:20:56 +00:00
low_bandwidth = False
2021-12-25 22:09:19 +00:00
post_json_object = \
2021-12-28 19:33:29 +00:00
create_direct_message_post(base_dir, nickname, domain, port,
http_prefix, content, followersOnly,
saveToFile, client_to_server,
commentsEnabled,
attachImageFilename, mediaType,
imageDescription, city,
inReplyTo, inReplyToAtomUri,
subject, debug, schedulePost,
eventDate, eventTime, location,
system_language, conversationId,
low_bandwidth,
content_license_url)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2021-02-24 11:01:44 +00:00
print('WARN: unable to create bounce message to ' + sendingHandle)
return False
2021-02-24 11:01:44 +00:00
# bounce DM goes back to the sender
print('Sending bounce DM to ' + sendingHandle)
2021-12-29 21:55:09 +00:00
send_signed_json(post_json_object, session, base_dir,
nickname, domain, port,
senderNickname, senderDomain, senderPort, cc,
http_prefix, False, False, federation_list,
send_threads, postLog, cached_webfingers,
person_cache, debug, __version__, None, group_account,
signing_priv_key_pem, 7238634)
return True
2021-02-24 11:01:44 +00:00
2021-12-29 21:55:09 +00:00
def _is_valid_dm(base_dir: str, nickname: str, domain: str, port: int,
post_json_object: {}, updateIndexList: [],
session, http_prefix: str,
federation_list: [],
send_threads: [], postLog: [],
cached_webfingers: {},
person_cache: {},
translate: {}, debug: bool,
lastBounceMessage: [],
handle: str, system_language: str,
signing_priv_key_pem: str,
content_license_url: str) -> bool:
2021-06-07 16:34:08 +00:00
"""Is the given message a valid DM?
"""
if nickname == 'inbox':
# going to the shared inbox
return True
# check for the flag file which indicates to
# only receive DMs from people you are following
2021-12-26 12:02:29 +00:00
followDMsFilename = acct_dir(base_dir, nickname, domain) + '/.followDMs'
2021-06-07 16:34:08 +00:00
if not os.path.isfile(followDMsFilename):
# dm index will be updated
updateIndexList.append('dm')
2021-12-26 10:19:59 +00:00
actUrl = local_actor_url(http_prefix, nickname, domain)
2021-12-29 21:55:09 +00:00
_dm_notify(base_dir, handle, actUrl + '/dm')
2021-06-07 16:34:08 +00:00
return True
2021-06-07 17:49:10 +00:00
2021-06-07 16:34:08 +00:00
# get the file containing following handles
2021-12-26 12:02:29 +00:00
followingFilename = acct_dir(base_dir, nickname, domain) + '/following.txt'
2021-06-07 16:34:08 +00:00
# who is sending a DM?
2021-12-25 22:09:19 +00:00
if not post_json_object.get('actor'):
2021-06-07 16:34:08 +00:00
return False
2021-12-25 22:09:19 +00:00
sendingActor = post_json_object['actor']
2021-06-07 16:34:08 +00:00
sendingActorNickname = \
2021-12-27 22:19:18 +00:00
get_nickname_from_actor(sendingActor)
2021-06-07 16:34:08 +00:00
if not sendingActorNickname:
return False
sendingActorDomain, sendingActorPort = \
2021-12-27 19:05:25 +00:00
get_domain_from_actor(sendingActor)
2021-06-07 16:34:08 +00:00
if not sendingActorDomain:
return False
# Is this DM to yourself? eg. a reminder
sendingToSelf = False
if sendingActorNickname == nickname and \
sendingActorDomain == domain:
sendingToSelf = True
# check that the following file exists
if not sendingToSelf:
if not os.path.isfile(followingFilename):
print('No following.txt file exists for ' +
nickname + '@' + domain +
' so not accepting DM from ' +
sendingActorNickname + '@' +
sendingActorDomain)
return False
# Not sending to yourself
if not sendingToSelf:
# get the handle of the DM sender
sendH = sendingActorNickname + '@' + sendingActorDomain
# check the follow
2021-12-28 20:32:11 +00:00
if not is_following_actor(base_dir, nickname, domain, sendH):
2021-06-07 16:34:08 +00:00
# DMs may always be allowed from some domains
2021-12-26 19:09:04 +00:00
if not dm_allowed_from_domain(base_dir,
nickname, domain,
sendingActorDomain):
2021-06-07 16:34:08 +00:00
# send back a bounce DM
2021-12-25 22:09:19 +00:00
if post_json_object.get('id') and \
post_json_object.get('object'):
2021-06-07 16:34:08 +00:00
# don't send bounces back to
# replies to bounce messages
2021-12-25 22:09:19 +00:00
obj = post_json_object['object']
2021-06-07 16:34:08 +00:00
if isinstance(obj, dict):
if not obj.get('inReplyTo'):
2021-12-27 11:20:57 +00:00
bouncedId = \
remove_id_ending(post_json_object['id'])
2021-12-29 21:55:09 +00:00
_bounce_dm(bouncedId,
session, http_prefix,
base_dir,
nickname, domain,
port, sendH,
federation_list,
send_threads, postLog,
cached_webfingers,
person_cache,
translate, debug,
lastBounceMessage,
system_language,
signing_priv_key_pem,
content_license_url)
2021-06-07 16:34:08 +00:00
return False
# dm index will be updated
updateIndexList.append('dm')
2021-12-26 10:19:59 +00:00
actUrl = local_actor_url(http_prefix, nickname, domain)
2021-12-29 21:55:09 +00:00
_dm_notify(base_dir, handle, actUrl + '/dm')
2021-06-07 16:34:08 +00:00
return True
2021-12-29 21:55:09 +00:00
def _receive_question_vote(base_dir: str, nickname: str, domain: str,
http_prefix: str, handle: str, debug: bool,
post_json_object: {}, recent_posts_cache: {},
session, onion_domain: str,
i2p_domain: str, port: int,
federation_list: [], send_threads: [], postLog: [],
cached_webfingers: {}, person_cache: {},
signing_priv_key_pem: str,
max_recent_posts: int, translate: {},
allow_deletion: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
peertube_instances: [],
allow_local_network_access: bool,
theme_name: str, system_language: str,
max_like_count: int,
cw_lists: {}, lists_enabled: bool) -> None:
2021-11-04 11:48:23 +00:00
"""Updates the votes on a Question/poll
"""
# if this is a reply to a question then update the votes
questionJson, questionPostFilename = \
2021-12-29 21:55:09 +00:00
question_update_votes(base_dir, nickname, domain, post_json_object)
2021-11-04 11:48:23 +00:00
if not questionJson:
return
if not questionPostFilename:
return
2021-12-27 11:05:24 +00:00
remove_post_from_cache(questionJson, recent_posts_cache)
2021-11-04 11:48:23 +00:00
# ensure that the cached post is removed if it exists, so
# that it then will be recreated
cachedPostFilename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain, questionJson)
2021-11-04 11:48:23 +00:00
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
try:
os.remove(cachedPostFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-11-04 11:48:23 +00:00
print('EX: replytoQuestion unable to delete ' +
cachedPostFilename)
pageNumber = 1
2021-12-25 20:06:27 +00:00
show_published_date_only = False
showIndividualPostIcons = True
manuallyApproveFollowers = \
2021-12-28 20:32:11 +00:00
follower_approval_active(base_dir, nickname, domain)
2021-12-26 20:12:18 +00:00
notDM = not is_dm(questionJson)
2021-12-29 21:55:09 +00:00
individual_post_as_html(signing_priv_key_pem, False,
recent_posts_cache, max_recent_posts,
translate, pageNumber, base_dir,
session, cached_webfingers, person_cache,
nickname, domain, port, questionJson,
None, True, allow_deletion,
http_prefix, __version__,
'inbox',
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, notDM,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, False, cw_lists,
lists_enabled)
2021-11-05 10:26:20 +00:00
# add id to inbox index
2021-12-29 21:55:09 +00:00
inbox_update_index('inbox', base_dir, handle,
questionPostFilename, debug)
2021-11-05 10:26:20 +00:00
2021-11-04 11:48:23 +00:00
# Is this a question created by this instance?
2021-12-25 17:09:22 +00:00
idPrefix = http_prefix + '://' + domain
2021-11-04 11:48:23 +00:00
if not questionJson['object']['id'].startswith(idPrefix):
return
# if the votes on a question have changed then
# send out an update
questionJson['type'] = 'Update'
2021-12-25 18:05:01 +00:00
shared_items_federated_domains = []
2022-01-01 15:11:42 +00:00
shared_item_federation_tokens = {}
2021-12-29 21:55:09 +00:00
send_to_followers_thread(session, base_dir, nickname, domain,
onion_domain, i2p_domain, port,
http_prefix, federation_list,
send_threads, postLog,
cached_webfingers, person_cache,
post_json_object, debug, __version__,
shared_items_federated_domains,
2022-01-01 15:11:42 +00:00
shared_item_federation_tokens,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem)
def _create_reply_notification_file(base_dir: str, nickname: str, domain: str,
handle: str, debug: bool, postIsDM: bool,
post_json_object: {}, actor: str,
updateIndexList: [], http_prefix: str,
default_reply_interval_hrs: int) -> bool:
2021-11-04 12:29:53 +00:00
"""Generates a file indicating that a new reply has arrived
The file can then be used by other systems to create a notification
xmpp, matrix, email, etc
"""
2021-12-26 19:36:40 +00:00
is_replyToMutedPost = False
2021-11-04 12:29:53 +00:00
if postIsDM:
2021-12-26 19:36:40 +00:00
return is_replyToMutedPost
if not is_reply(post_json_object, actor):
return is_replyToMutedPost
2021-11-04 12:29:53 +00:00
if nickname == 'inbox':
2021-12-26 19:36:40 +00:00
return is_replyToMutedPost
2021-11-04 12:29:53 +00:00
# replies index will be updated
updateIndexList.append('tlreplies')
conversationId = None
2021-12-25 22:09:19 +00:00
if post_json_object['object'].get('conversation'):
conversationId = post_json_object['object']['conversation']
2021-11-04 12:29:53 +00:00
2021-12-25 22:09:19 +00:00
if not post_json_object['object'].get('inReplyTo'):
2021-12-26 19:36:40 +00:00
return is_replyToMutedPost
2021-12-25 22:09:19 +00:00
inReplyTo = post_json_object['object']['inReplyTo']
2021-11-04 12:29:53 +00:00
if not inReplyTo:
2021-12-26 19:36:40 +00:00
return is_replyToMutedPost
2021-11-04 12:29:53 +00:00
if not isinstance(inReplyTo, str):
2021-12-26 19:36:40 +00:00
return is_replyToMutedPost
2021-12-29 21:55:09 +00:00
if not is_muted_conv(base_dir, nickname, domain, inReplyTo,
conversationId):
2021-11-04 12:29:53 +00:00
# check if the reply is within the allowed time period
# after publication
replyIntervalHours = \
2021-12-28 10:25:50 +00:00
get_reply_interval_hours(base_dir, nickname, domain,
default_reply_interval_hrs)
2021-12-28 12:15:46 +00:00
if can_reply_to(base_dir, nickname, domain, inReplyTo,
replyIntervalHours):
2021-12-26 10:19:59 +00:00
actUrl = local_actor_url(http_prefix, nickname, domain)
2021-12-29 21:55:09 +00:00
_reply_notify(base_dir, handle, actUrl + '/tlreplies')
2021-11-04 12:29:53 +00:00
else:
if debug:
print('Reply to ' + inReplyTo + ' is outside of the ' +
'permitted interval of ' + str(replyIntervalHours) +
' hours')
return False
else:
2021-12-26 19:36:40 +00:00
is_replyToMutedPost = True
return is_replyToMutedPost
2021-11-04 12:29:53 +00:00
2021-12-29 21:55:09 +00:00
def _low_frequency_post_notification(base_dir: str, http_prefix: str,
nickname: str, domain: str,
port: int, handle: str,
postIsDM: bool, jsonObj: {}) -> None:
"""Should we notify that a post from this person has arrived?
This is for cases where the notify checkbox is enabled on the
person options screen
"""
if postIsDM:
return
if not jsonObj:
return
if not jsonObj.get('attributedTo'):
return
if not jsonObj.get('id'):
return
attributedTo = jsonObj['attributedTo']
if not isinstance(attributedTo, str):
return
2021-12-27 22:19:18 +00:00
fromNickname = get_nickname_from_actor(attributedTo)
2021-12-27 19:05:25 +00:00
fromDomain, fromPort = get_domain_from_actor(attributedTo)
2021-12-26 12:45:03 +00:00
fromDomainFull = get_full_domain(fromDomain, fromPort)
2021-12-29 21:55:09 +00:00
if notify_when_person_posts(base_dir, nickname, domain,
fromNickname, fromDomainFull):
2021-12-27 11:20:57 +00:00
post_id = remove_id_ending(jsonObj['id'])
2021-12-26 12:45:03 +00:00
domFull = get_full_domain(domain, port)
postLink = \
2021-12-26 10:19:59 +00:00
local_actor_url(http_prefix, nickname, domFull) + \
2021-12-26 19:47:06 +00:00
'?notifypost=' + post_id.replace('/', '-')
2021-12-29 21:55:09 +00:00
_notify_post_arrival(base_dir, handle, postLink)
2021-12-29 21:55:09 +00:00
def _check_for_git_patches(base_dir: str, nickname: str, domain: str,
handle: str, jsonObj: {}) -> int:
2021-11-04 13:05:04 +00:00
"""check for incoming git patches
"""
if not jsonObj:
return 0
if not jsonObj.get('content'):
return 0
if not jsonObj.get('summary'):
return 0
if not jsonObj.get('attributedTo'):
return 0
attributedTo = jsonObj['attributedTo']
if not isinstance(attributedTo, str):
return 0
2021-12-27 22:19:18 +00:00
fromNickname = get_nickname_from_actor(attributedTo)
2021-12-27 19:05:25 +00:00
fromDomain, fromPort = get_domain_from_actor(attributedTo)
2021-12-26 12:45:03 +00:00
fromDomainFull = get_full_domain(fromDomain, fromPort)
2021-12-29 21:55:09 +00:00
if receive_git_patch(base_dir, nickname, domain,
jsonObj['type'], jsonObj['summary'],
jsonObj['content'],
fromNickname, fromDomainFull):
_git_patch_notify(base_dir, handle,
jsonObj['summary'], jsonObj['content'],
fromNickname, fromDomainFull)
2021-11-04 13:05:04 +00:00
return 1
elif '[PATCH]' in jsonObj['content']:
print('WARN: git patch not accepted - ' + jsonObj['summary'])
return 2
return 0
2021-12-29 21:55:09 +00:00
def _inbox_after_initial(recent_posts_cache: {}, max_recent_posts: int,
session, keyId: str, handle: str, message_json: {},
base_dir: str, http_prefix: str, send_threads: [],
postLog: [], cached_webfingers: {}, person_cache: {},
queue: [], domain: str,
onion_domain: str, i2p_domain: str,
port: int, proxy_type: str,
federation_list: [], debug: bool,
queueFilename: str, destinationFilename: str,
max_replies: int, allow_deletion: bool,
max_mentions: int, max_emoji: int, translate: {},
unit_test: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
show_published_date_only: bool,
allow_local_network_access: bool,
peertube_instances: [],
lastBounceMessage: [],
theme_name: str, system_language: str,
max_like_count: int,
signing_priv_key_pem: str,
default_reply_interval_hrs: int,
cw_lists: {}, lists_enabled: str,
content_license_url: str) -> bool:
2020-09-27 18:35:35 +00:00
""" Anything which needs to be done after initial checks have passed
"""
2020-04-03 16:27:34 +00:00
actor = keyId
2019-09-29 10:41:21 +00:00
if '#' in actor:
2020-04-03 16:27:34 +00:00
actor = keyId.split('#')[0]
2021-12-29 21:55:09 +00:00
_update_last_seen(base_dir, handle, actor)
postIsDM = False
2021-12-29 21:55:09 +00:00
isGroup = _group_handle(base_dir, handle)
if _receive_like(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
onion_domain,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled):
2019-07-10 12:40:31 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Like accepted from ' + actor)
2019-07-10 12:40:31 +00:00
return False
2021-12-29 21:55:09 +00:00
if _receive_undo_like(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled):
2019-07-12 09:10:09 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Undo like accepted from ' + actor)
2019-07-12 09:10:09 +00:00
return False
2021-12-29 21:55:09 +00:00
if _receive_reaction(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
onion_domain,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled):
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: Reaction accepted from ' + actor)
return False
2021-12-29 21:55:09 +00:00
if _receive_undo_reaction(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled):
2021-11-10 12:16:03 +00:00
if debug:
print('DEBUG: Undo reaction accepted from ' + actor)
return False
2021-12-29 21:55:09 +00:00
if _receive_bookmark(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled):
2019-11-17 14:01:49 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Bookmark accepted from ' + actor)
2019-11-17 14:01:49 +00:00
return False
2021-12-29 21:55:09 +00:00
if _receive_undo_bookmark(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count, cw_lists, lists_enabled):
2019-11-17 14:01:49 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Undo bookmark accepted from ' + actor)
2019-11-17 14:01:49 +00:00
return False
2020-03-22 21:16:02 +00:00
2021-12-29 21:55:09 +00:00
if is_create_inside_announce(message_json):
2021-12-25 23:51:19 +00:00
message_json = message_json['object']
2021-09-11 14:30:37 +00:00
2021-12-29 21:55:09 +00:00
if _receive_announce(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, onion_domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, translate,
yt_replace_domain,
twitter_replacement_domain,
allow_local_network_access,
theme_name, system_language,
signing_priv_key_pem,
max_recent_posts,
allow_deletion,
peertube_instances,
max_like_count, cw_lists, lists_enabled):
2019-07-11 19:31:02 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Announce accepted from ' + actor)
2021-12-29 21:55:09 +00:00
if _receive_undo_announce(recent_posts_cache,
session, handle, isGroup,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug):
2019-07-12 09:41:57 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Undo announce accepted from ' + actor)
2019-07-12 11:35:03 +00:00
return False
2019-07-12 09:41:57 +00:00
2021-12-29 21:55:09 +00:00
if _receive_delete(session, handle, isGroup,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
message_json,
federation_list,
debug, allow_deletion,
recent_posts_cache):
2019-08-12 18:02:29 +00:00
if debug:
2020-04-03 16:27:34 +00:00
print('DEBUG: Delete accepted from ' + actor)
2019-08-12 18:02:29 +00:00
return False
2019-07-10 13:32:47 +00:00
if debug:
2020-09-27 18:35:35 +00:00
print('DEBUG: initial checks passed')
2020-04-03 16:27:34 +00:00
print('copy queue file from ' + queueFilename +
' to ' + destinationFilename)
2019-08-16 22:04:45 +00:00
if os.path.isfile(destinationFilename):
return True
2019-10-04 09:58:02 +00:00
2021-12-25 23:51:19 +00:00
if message_json.get('postNickname'):
post_json_object = message_json['post']
else:
2021-12-25 23:51:19 +00:00
post_json_object = message_json
2019-10-04 12:22:56 +00:00
2020-04-03 16:27:34 +00:00
nickname = handle.split('@')[0]
2021-07-07 09:52:05 +00:00
jsonObj = None
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-29 21:55:09 +00:00
if _valid_post_content(base_dir, nickname, domain,
post_json_object, max_mentions, max_emoji,
allow_local_network_access, debug,
system_language, http_prefix,
domain_full, person_cache):
# is the sending actor valid?
2021-12-29 21:55:09 +00:00
if not valid_sending_actor(session, base_dir, nickname, domain,
person_cache, post_json_object,
signing_priv_key_pem, debug, unit_test):
return False
2020-05-02 13:17:02 +00:00
2021-12-25 22:09:19 +00:00
if post_json_object.get('object'):
jsonObj = post_json_object['object']
2020-08-23 14:45:58 +00:00
if not isinstance(jsonObj, dict):
jsonObj = None
else:
2021-12-25 22:09:19 +00:00
jsonObj = post_json_object
2021-11-04 13:05:04 +00:00
2021-12-29 21:55:09 +00:00
if _check_for_git_patches(base_dir, nickname, domain,
handle, jsonObj) == 2:
2021-11-04 13:05:04 +00:00
return False
2020-05-02 11:08:38 +00:00
# replace YouTube links, so they get less tracking data
2021-12-28 21:36:27 +00:00
replace_you_tube(post_json_object, yt_replace_domain, system_language)
2021-09-18 17:08:14 +00:00
# replace twitter link domains, so that you can view twitter posts
# without having an account
2021-12-28 21:36:27 +00:00
replace_twitter(post_json_object, twitter_replacement_domain,
system_language)
2019-10-22 20:07:12 +00:00
# list of indexes to be updated
2020-04-03 16:27:34 +00:00
updateIndexList = ['inbox']
2021-12-28 20:32:11 +00:00
populate_replies(base_dir, http_prefix, domain, post_json_object,
max_replies, debug)
2019-11-29 19:22:11 +00:00
2021-12-29 21:55:09 +00:00
_receive_question_vote(base_dir, nickname, domain,
http_prefix, handle, debug,
post_json_object, recent_posts_cache,
session, onion_domain, i2p_domain, port,
federation_list, send_threads, postLog,
cached_webfingers, person_cache,
signing_priv_key_pem,
max_recent_posts, translate,
allow_deletion,
yt_replace_domain,
twitter_replacement_domain,
peertube_instances,
allow_local_network_access,
theme_name, system_language,
max_like_count,
cw_lists, lists_enabled)
2019-11-29 19:22:11 +00:00
2021-12-26 19:36:40 +00:00
is_replyToMutedPost = False
2020-08-27 17:40:09 +00:00
2019-10-04 12:22:56 +00:00
if not isGroup:
# create a DM notification file if needed
2021-12-26 20:12:18 +00:00
postIsDM = is_dm(post_json_object)
if postIsDM:
2021-12-29 21:55:09 +00:00
if not _is_valid_dm(base_dir, nickname, domain, port,
post_json_object, updateIndexList,
session, http_prefix,
federation_list,
send_threads, postLog,
cached_webfingers,
person_cache,
translate, debug,
lastBounceMessage,
handle, system_language,
signing_priv_key_pem,
content_license_url):
2021-06-07 16:34:08 +00:00
return False
2019-10-04 12:22:56 +00:00
# get the actor being replied to
2021-12-26 10:19:59 +00:00
actor = local_actor_url(http_prefix, nickname, domain_full)
2019-10-04 12:22:56 +00:00
# create a reply notification file if needed
2021-12-26 19:36:40 +00:00
is_replyToMutedPost = \
2021-12-29 21:55:09 +00:00
_create_reply_notification_file(base_dir, nickname, domain,
handle, debug, postIsDM,
post_json_object, actor,
updateIndexList, http_prefix,
default_reply_interval_hrs)
if is_image_media(session, base_dir, http_prefix,
nickname, domain, post_json_object,
translate,
yt_replace_domain,
twitter_replacement_domain,
allow_local_network_access,
recent_posts_cache, debug, system_language,
domain_full, person_cache, signing_priv_key_pem):
2019-10-22 20:30:43 +00:00
# media index will be updated
updateIndexList.append('tlmedia')
2021-12-28 13:49:44 +00:00
if is_blog_post(post_json_object):
2020-02-24 14:39:25 +00:00
# blogs index will be updated
updateIndexList.append('tlblogs')
2019-10-22 20:30:43 +00:00
2019-10-04 10:00:57 +00:00
# get the avatar for a reply/announce
2021-12-29 21:55:09 +00:00
_obtain_avatar_for_reply_post(session, base_dir,
http_prefix, domain, onion_domain,
person_cache, post_json_object, debug,
signing_priv_key_pem)
2019-10-04 10:00:57 +00:00
# save the post to file
2021-12-26 14:47:21 +00:00
if save_json(post_json_object, destinationFilename):
2021-12-29 21:55:09 +00:00
_low_frequency_post_notification(base_dir, http_prefix,
nickname, domain, port,
handle, postIsDM, jsonObj)
2020-08-27 17:40:09 +00:00
# If this is a reply to a muted post then also mute it.
# This enables you to ignore a threat that's getting boring
2021-12-26 19:36:40 +00:00
if is_replyToMutedPost:
2020-08-27 17:40:09 +00:00
print('MUTE REPLY: ' + destinationFilename)
2021-11-25 21:18:53 +00:00
destinationFilenameMuted = destinationFilename + '.muted'
try:
with open(destinationFilenameMuted, 'w+') as muteFile:
muteFile.write('\n')
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to write ' + destinationFilenameMuted)
2020-08-27 17:40:09 +00:00
2019-10-22 20:07:12 +00:00
# update the indexes for different timelines
for boxname in updateIndexList:
2021-12-29 21:55:09 +00:00
if not inbox_update_index(boxname, base_dir, handle,
destinationFilename, debug):
2020-04-03 16:27:34 +00:00
print('ERROR: unable to update ' + boxname + ' index')
else:
if boxname == 'inbox':
2021-12-26 20:43:03 +00:00
if is_recent_post(post_json_object, 3):
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-29 21:55:09 +00:00
update_speaker(base_dir, http_prefix,
nickname, domain, domain_full,
post_json_object, person_cache,
translate, None, theme_name)
2021-12-25 21:32:15 +00:00
if not unit_test:
2020-08-26 11:19:32 +00:00
if debug:
print('Saving inbox post as html to cache')
htmlCacheStartTime = time.time()
handleName = handle.split('@')[0]
2021-12-29 21:55:09 +00:00
allow_local_net_access = allow_local_network_access
show_pub_date_only = show_published_date_only
_inbox_store_post_to_html_cache(recent_posts_cache,
max_recent_posts,
translate, base_dir,
http_prefix,
session,
cached_webfingers,
person_cache,
handleName,
domain, port,
post_json_object,
allow_deletion,
boxname,
show_pub_date_only,
peertube_instances,
allow_local_net_access,
theme_name,
system_language,
max_like_count,
signing_priv_key_pem,
cw_lists,
lists_enabled)
2020-08-26 11:19:32 +00:00
if debug:
2021-12-31 21:18:12 +00:00
time_diff = \
2020-08-26 11:19:32 +00:00
str(int((time.time() - htmlCacheStartTime) *
1000))
print('Saved ' + boxname +
' post as html to cache in ' +
2021-12-31 21:18:12 +00:00
time_diff + ' mS')
2019-10-20 10:25:38 +00:00
2021-08-12 10:22:04 +00:00
handleName = handle.split('@')[0]
2021-10-14 15:12:35 +00:00
# is this an edit of a previous post?
# in Mastodon "delete and redraft"
2021-12-29 21:55:09 +00:00
# NOTE: this must be done before update_conversation is called
2021-10-14 15:12:35 +00:00
editedFilename = \
2021-12-29 21:55:09 +00:00
edited_post_filename(base_dir, handleName, domain,
post_json_object, debug, 300)
2021-10-14 15:12:35 +00:00
2021-12-29 21:55:09 +00:00
update_conversation(base_dir, handleName, domain, post_json_object)
2021-08-12 10:22:04 +00:00
2021-10-14 15:12:35 +00:00
# If this was an edit then delete the previous version of the post
if editedFilename:
2021-12-28 14:55:45 +00:00
delete_post(base_dir, http_prefix,
nickname, domain, editedFilename,
debug, recent_posts_cache)
2021-10-14 15:12:35 +00:00
# store the id of the last post made by this actor
2021-12-29 21:55:09 +00:00
_store_last_post_id(base_dir, nickname, domain, post_json_object)
2021-12-29 21:55:09 +00:00
_inbox_update_calendar(base_dir, handle, post_json_object)
2019-10-19 18:08:47 +00:00
2021-12-29 21:55:09 +00:00
store_hash_tags(base_dir, handleName, domain,
http_prefix, domain_full,
post_json_object, translate)
2019-12-12 17:34:31 +00:00
2019-10-19 13:00:46 +00:00
# send the post out to group members
if isGroup:
2021-12-29 21:55:09 +00:00
_send_to_group_members(session, base_dir, handle, port,
post_json_object,
http_prefix, federation_list,
send_threads,
postLog, cached_webfingers,
person_cache,
debug, system_language,
onion_domain, i2p_domain,
signing_priv_key_pem)
2019-10-04 12:22:56 +00:00
2019-10-04 10:00:57 +00:00
# if the post wasn't saved
2019-08-17 12:26:09 +00:00
if not os.path.isfile(destinationFilename):
return False
return True
2020-04-03 16:27:34 +00:00
2021-12-28 19:33:29 +00:00
def clear_queue_items(base_dir: str, queue: []) -> None:
2020-05-22 12:57:15 +00:00
"""Clears the queue for each account
2020-05-22 11:48:13 +00:00
"""
ctr = 0
queue.clear()
2021-12-25 16:17:53 +00:00
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
2020-05-22 11:48:13 +00:00
for account in dirs:
2021-12-25 16:17:53 +00:00
queueDir = base_dir + '/accounts/' + account + '/queue'
2020-06-02 09:05:55 +00:00
if not os.path.isdir(queueDir):
continue
for queuesubdir, queuedirs, queuefiles in os.walk(queueDir):
for qfile in queuefiles:
try:
os.remove(os.path.join(queueDir, qfile))
ctr += 1
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 19:33:29 +00:00
print('EX: clear_queue_items unable to delete ' +
qfile)
2021-06-05 21:09:11 +00:00
break
2020-12-13 22:13:45 +00:00
break
2020-05-22 11:48:13 +00:00
if ctr > 0:
print('Removed ' + str(ctr) + ' inbox queue items')
2021-12-29 21:55:09 +00:00
def _restore_queue_items(base_dir: str, queue: []) -> None:
2019-07-12 21:09:23 +00:00
"""Checks the queue for each account and appends filenames
"""
2019-08-15 16:45:07 +00:00
queue.clear()
2021-12-25 16:17:53 +00:00
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
2019-07-12 21:09:23 +00:00
for account in dirs:
2021-12-25 16:17:53 +00:00
queueDir = base_dir + '/accounts/' + account + '/queue'
2020-06-02 09:05:55 +00:00
if not os.path.isdir(queueDir):
continue
for queuesubdir, queuedirs, queuefiles in os.walk(queueDir):
for qfile in queuefiles:
queue.append(os.path.join(queueDir, qfile))
2021-06-05 21:09:11 +00:00
break
2020-12-13 22:13:45 +00:00
break
2020-04-03 16:27:34 +00:00
if len(queue) > 0:
print('Restored ' + str(len(queue)) + ' inbox queue items')
2019-09-02 21:52:43 +00:00
2020-04-03 16:27:34 +00:00
2021-12-28 20:32:11 +00:00
def run_inbox_queue_watchdog(project_version: str, httpd) -> None:
2019-09-02 21:52:43 +00:00
"""This tries to keep the inbox thread running even if it dies
"""
print('Starting inbox queue watchdog')
2021-12-28 20:32:11 +00:00
inbox_queueOriginal = httpd.thrInboxQueue.clone(run_inbox_queue)
2019-09-02 21:52:43 +00:00
httpd.thrInboxQueue.start()
while True:
2020-03-22 21:16:02 +00:00
time.sleep(20)
2022-01-01 15:11:42 +00:00
if not httpd.thrInboxQueue.is_alive() or httpd.restart_inbox_queue:
httpd.restart_inbox_queue_in_progress = True
2019-09-02 21:52:43 +00:00
httpd.thrInboxQueue.kill()
2021-12-28 20:32:11 +00:00
httpd.thrInboxQueue = inbox_queueOriginal.clone(run_inbox_queue)
2021-12-25 23:23:29 +00:00
httpd.inbox_queue.clear()
2019-09-02 21:52:43 +00:00
httpd.thrInboxQueue.start()
print('Restarting inbox queue...')
2022-01-01 15:11:42 +00:00
httpd.restart_inbox_queue_in_progress = False
httpd.restart_inbox_queue = False
2019-09-02 21:52:43 +00:00
2020-04-03 16:27:34 +00:00
2021-12-29 21:55:09 +00:00
def _inbox_quota_exceeded(queue: {}, queueFilename: str,
queueJson: {}, quotasDaily: {}, quotasPerMin: {},
domain_max_posts_per_day: int,
account_max_posts_per_day: int,
debug: bool) -> bool:
"""limit the number of posts which can arrive per domain per day
"""
postDomain = queueJson['postDomain']
if not postDomain:
return False
2021-12-25 21:13:55 +00:00
if domain_max_posts_per_day > 0:
if quotasDaily['domains'].get(postDomain):
if quotasDaily['domains'][postDomain] > \
2021-12-25 21:13:55 +00:00
domain_max_posts_per_day:
print('Queue: Quota per day - Maximum posts for ' +
postDomain + ' reached (' +
2021-12-25 21:13:55 +00:00
str(domain_max_posts_per_day) + ')')
if len(queue) > 0:
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _inbox_quota_exceeded unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
queue.pop(0)
return True
quotasDaily['domains'][postDomain] += 1
else:
quotasDaily['domains'][postDomain] = 1
if quotasPerMin['domains'].get(postDomain):
domainMaxPostsPerMin = \
2021-12-25 21:13:55 +00:00
int(domain_max_posts_per_day / (24 * 60))
if domainMaxPostsPerMin < 5:
domainMaxPostsPerMin = 5
if quotasPerMin['domains'][postDomain] > \
domainMaxPostsPerMin:
print('Queue: Quota per min - Maximum posts for ' +
postDomain + ' reached (' +
str(domainMaxPostsPerMin) + ')')
if len(queue) > 0:
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _inbox_quota_exceeded unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
queue.pop(0)
return True
quotasPerMin['domains'][postDomain] += 1
else:
quotasPerMin['domains'][postDomain] = 1
2021-12-25 21:18:07 +00:00
if account_max_posts_per_day > 0:
postHandle = queueJson['postNickname'] + '@' + postDomain
if quotasDaily['accounts'].get(postHandle):
if quotasDaily['accounts'][postHandle] > \
2021-12-25 21:18:07 +00:00
account_max_posts_per_day:
print('Queue: Quota account posts per day -' +
' Maximum posts for ' +
postHandle + ' reached (' +
2021-12-25 21:18:07 +00:00
str(account_max_posts_per_day) + ')')
if len(queue) > 0:
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _inbox_quota_exceeded unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
queue.pop(0)
return True
quotasDaily['accounts'][postHandle] += 1
else:
quotasDaily['accounts'][postHandle] = 1
if quotasPerMin['accounts'].get(postHandle):
accountMaxPostsPerMin = \
2021-12-25 21:18:07 +00:00
int(account_max_posts_per_day / (24 * 60))
if accountMaxPostsPerMin < 5:
accountMaxPostsPerMin = 5
if quotasPerMin['accounts'][postHandle] > \
accountMaxPostsPerMin:
print('Queue: Quota account posts per min -' +
' Maximum posts for ' +
postHandle + ' reached (' +
str(accountMaxPostsPerMin) + ')')
if len(queue) > 0:
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _inbox_quota_exceeded unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
queue.pop(0)
return True
quotasPerMin['accounts'][postHandle] += 1
else:
quotasPerMin['accounts'][postHandle] = 1
if debug:
2021-12-25 21:18:07 +00:00
if account_max_posts_per_day > 0 or domain_max_posts_per_day > 0:
pprint(quotasDaily)
return False
2021-12-29 21:55:09 +00:00
def _check_json_signature(base_dir: str, queueJson: {}) -> (bool, bool):
"""check if a json signature exists on this post
"""
hasJsonSignature = False
jwebsigType = None
originalJson = queueJson['original']
if not originalJson.get('@context') or \
not originalJson.get('signature'):
return hasJsonSignature, jwebsigType
if not isinstance(originalJson['signature'], dict):
return hasJsonSignature, jwebsigType
# see https://tools.ietf.org/html/rfc7515
jwebsig = originalJson['signature']
# signature exists and is of the expected type
if not jwebsig.get('type') or \
not jwebsig.get('signatureValue'):
return hasJsonSignature, jwebsigType
jwebsigType = jwebsig['type']
if jwebsigType == 'RsaSignature2017':
2021-12-29 21:55:09 +00:00
if has_valid_context(originalJson):
hasJsonSignature = True
else:
unknownContextsFile = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/unknownContexts.txt'
unknownContext = str(originalJson['@context'])
2021-10-27 22:05:44 +00:00
print('unrecognized @context: ' + unknownContext)
alreadyUnknown = False
if os.path.isfile(unknownContextsFile):
if unknownContext in \
open(unknownContextsFile).read():
alreadyUnknown = True
if not alreadyUnknown:
2021-11-25 21:18:53 +00:00
try:
with open(unknownContextsFile, 'a+') as unknownFile:
unknownFile.write(unknownContext + '\n')
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to append ' + unknownContextsFile)
else:
2021-10-27 22:05:44 +00:00
print('Unrecognized jsonld signature type: ' + jwebsigType)
unknownSignaturesFile = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/unknownJsonSignatures.txt'
alreadyUnknown = False
if os.path.isfile(unknownSignaturesFile):
if jwebsigType in \
open(unknownSignaturesFile).read():
alreadyUnknown = True
if not alreadyUnknown:
2021-11-25 21:18:53 +00:00
try:
with open(unknownSignaturesFile, 'a+') as unknownFile:
unknownFile.write(jwebsigType + '\n')
except OSError:
2021-11-25 22:22:54 +00:00
print('EX: unable to append ' + unknownSignaturesFile)
return hasJsonSignature, jwebsigType
2021-12-29 21:55:09 +00:00
def _receive_follow_request(session, base_dir: str, http_prefix: str,
port: int, send_threads: [], postLog: [],
cached_webfingers: {}, person_cache: {},
message_json: {}, federation_list: [],
debug: bool, project_version: str,
max_followers: int, onion_domain: str,
signing_priv_key_pem: str,
unit_test: bool) -> bool:
"""Receives a follow request within the POST section of HTTPServer
"""
2021-12-25 23:51:19 +00:00
if not message_json['type'].startswith('Follow'):
if not message_json['type'].startswith('Join'):
return False
print('Receiving follow request')
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
return False
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['actor']):
if debug:
print('DEBUG: users/profile/accounts/channel missing from actor')
return False
2021-12-27 19:05:25 +00:00
domain, tempPort = get_domain_from_actor(message_json['actor'])
fromPort = port
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, tempPort)
if tempPort:
fromPort = tempPort
2021-12-27 18:28:26 +00:00
if not domain_permitted(domain, federation_list):
if debug:
print('DEBUG: follower from domain not permitted - ' + domain)
return False
2021-12-27 22:19:18 +00:00
nickname = get_nickname_from_actor(message_json['actor'])
if not nickname:
# single user instance
nickname = 'dev'
if debug:
print('DEBUG: follow request does not contain a ' +
'nickname. Assuming single user instance.')
2021-12-25 23:51:19 +00:00
if not message_json.get('to'):
message_json['to'] = message_json['object']
2021-12-26 12:19:00 +00:00
if not has_users_path(message_json['object']):
if debug:
print('DEBUG: users/profile/channel/accounts ' +
'not found within object')
return False
2021-12-27 19:05:25 +00:00
domainToFollow, tempPort = get_domain_from_actor(message_json['object'])
2021-12-27 18:28:26 +00:00
if not domain_permitted(domainToFollow, federation_list):
if debug:
print('DEBUG: follow domain not permitted ' + domainToFollow)
return True
2021-12-26 12:45:03 +00:00
domainToFollowFull = get_full_domain(domainToFollow, tempPort)
2021-12-27 22:19:18 +00:00
nicknameToFollow = get_nickname_from_actor(message_json['object'])
if not nicknameToFollow:
if debug:
print('DEBUG: follow request does not contain a ' +
'nickname for the account followed')
return True
2021-12-27 15:41:04 +00:00
if is_system_account(nicknameToFollow):
if debug:
print('DEBUG: Cannot follow system account - ' +
nicknameToFollow)
return True
2021-12-25 19:37:10 +00:00
if max_followers > 0:
2021-12-29 21:55:09 +00:00
if get_no_of_followers(base_dir,
nicknameToFollow, domainToFollow,
True) > max_followers:
print('WARN: ' + nicknameToFollow +
' has reached their maximum number of followers')
return True
handleToFollow = nicknameToFollow + '@' + domainToFollow
if domainToFollow == domain:
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/accounts/' + handleToFollow):
if debug:
print('DEBUG: followed account not found - ' +
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/' + handleToFollow)
return True
2021-12-29 21:55:09 +00:00
if is_follower_of_person(base_dir,
nicknameToFollow, domainToFollowFull,
nickname, domain_full):
if debug:
print('DEBUG: ' + nickname + '@' + domain +
' is already a follower of ' +
nicknameToFollow + '@' + domainToFollow)
return True
2021-12-26 10:00:46 +00:00
approveHandle = nickname + '@' + domain_full
# is the actor sending the request valid?
2021-12-29 21:55:09 +00:00
if not valid_sending_actor(session, base_dir,
nicknameToFollow, domainToFollow,
person_cache, message_json,
signing_priv_key_pem, debug, unit_test):
print('REJECT spam follow request ' + approveHandle)
return False
# what is the followers policy?
2021-12-29 21:55:09 +00:00
if follow_approval_required(base_dir, nicknameToFollow,
domainToFollow, debug, approveHandle):
print('Follow approval is required')
if domain.endswith('.onion'):
2021-12-29 21:55:09 +00:00
if no_of_follow_requests(base_dir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'onion') > 5:
print('Too many follow requests from onion addresses')
return False
elif domain.endswith('.i2p'):
2021-12-29 21:55:09 +00:00
if no_of_follow_requests(base_dir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'i2p') > 5:
print('Too many follow requests from i2p addresses')
return False
else:
2021-12-29 21:55:09 +00:00
if no_of_follow_requests(base_dir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'') > 10:
print('Too many follow requests')
return False
# Get the actor for the follower and add it to the cache.
# Getting their public key has the same result
if debug:
2021-12-25 23:51:19 +00:00
print('Obtaining the following actor: ' + message_json['actor'])
2021-12-29 21:55:09 +00:00
if not get_person_pub_key(base_dir, session, message_json['actor'],
person_cache, debug, project_version,
http_prefix, domainToFollow, onion_domain,
signing_priv_key_pem):
if debug:
print('Unable to obtain following actor: ' +
2021-12-25 23:51:19 +00:00
message_json['actor'])
2021-12-26 00:07:44 +00:00
group_account = \
2021-12-26 17:53:07 +00:00
has_group_type(base_dir, message_json['actor'], person_cache)
2021-12-26 17:33:24 +00:00
if group_account and is_group_account(base_dir, nickname, domain):
print('Group cannot follow a group')
return False
print('Storing follow request for approval')
2021-12-29 21:55:09 +00:00
return store_follow_request(base_dir,
nicknameToFollow, domainToFollow, port,
nickname, domain, fromPort,
message_json, debug, message_json['actor'],
group_account)
else:
print('Follow request does not require approval ' + approveHandle)
# update the followers
accountToBeFollowed = \
2021-12-26 12:02:29 +00:00
acct_dir(base_dir, nicknameToFollow, domainToFollow)
if os.path.isdir(accountToBeFollowed):
followersFilename = accountToBeFollowed + '/followers.txt'
# for actors which don't follow the mastodon
# /users/ path convention store the full actor
2021-12-25 23:51:19 +00:00
if '/users/' not in message_json['actor']:
approveHandle = message_json['actor']
# Get the actor for the follower and add it to the cache.
# Getting their public key has the same result
if debug:
2021-12-25 23:51:19 +00:00
print('Obtaining the following actor: ' +
message_json['actor'])
2021-12-29 21:55:09 +00:00
if not get_person_pub_key(base_dir, session, message_json['actor'],
person_cache, debug, project_version,
http_prefix, domainToFollow,
onion_domain, signing_priv_key_pem):
if debug:
print('Unable to obtain following actor: ' +
2021-12-25 23:51:19 +00:00
message_json['actor'])
print('Updating followers file: ' +
followersFilename + ' adding ' + approveHandle)
if os.path.isfile(followersFilename):
if approveHandle not in open(followersFilename).read():
2021-12-26 00:07:44 +00:00
group_account = \
2021-12-26 17:53:07 +00:00
has_group_type(base_dir,
message_json['actor'], person_cache)
if debug:
2021-12-25 23:51:19 +00:00
print(approveHandle + ' / ' + message_json['actor'] +
2021-12-26 00:07:44 +00:00
' is Group: ' + str(group_account))
if group_account and \
2021-12-26 17:33:24 +00:00
is_group_account(base_dir, nickname, domain):
print('Group cannot follow a group')
return False
try:
with open(followersFilename, 'r+') as followersFile:
content = followersFile.read()
if approveHandle + '\n' not in content:
followersFile.seek(0, 0)
2021-12-26 00:07:44 +00:00
if not group_account:
followersFile.write(approveHandle +
'\n' + content)
else:
followersFile.write('!' + approveHandle +
'\n' + content)
2021-12-25 15:28:52 +00:00
except Exception as ex:
print('WARN: ' +
'Failed to write entry to followers file ' +
2021-12-25 15:28:52 +00:00
str(ex))
else:
try:
with open(followersFilename, 'w+') as followersFile:
followersFile.write(approveHandle + '\n')
except OSError:
print('EX: unable to write ' + followersFilename)
print('Beginning follow accept')
2021-12-29 21:55:09 +00:00
return followed_account_accepts(session, base_dir, http_prefix,
nicknameToFollow, domainToFollow, port,
nickname, domain, fromPort,
message_json['actor'], federation_list,
message_json, send_threads, postLog,
cached_webfingers, person_cache,
debug, project_version, True,
signing_priv_key_pem)
2021-12-28 20:32:11 +00:00
def run_inbox_queue(recent_posts_cache: {}, max_recent_posts: int,
project_version: str,
base_dir: str, http_prefix: str,
send_threads: [], postLog: [],
cached_webfingers: {}, person_cache: {}, queue: [],
domain: str,
onion_domain: str, i2p_domain: str,
port: int, proxy_type: str,
federation_list: [], max_replies: int,
domain_max_posts_per_day: int,
account_max_posts_per_day: int,
allow_deletion: bool, debug: bool, max_mentions: int,
max_emoji: int, translate: {}, unit_test: bool,
yt_replace_domain: str,
twitter_replacement_domain: str,
show_published_date_only: bool,
max_followers: int,
allow_local_network_access: bool,
peertube_instances: [],
verify_all_signatures: bool,
theme_name: str, system_language: str,
max_like_count: int, signing_priv_key_pem: str,
default_reply_interval_hrs: int,
cw_lists: {}) -> None:
"""Processes received items and moves them to the appropriate
directories
2019-07-04 12:23:53 +00:00
"""
2020-04-03 16:27:34 +00:00
currSessionTime = int(time.time())
2021-12-25 22:33:20 +00:00
session_last_update = currSessionTime
2020-06-24 09:04:58 +00:00
print('Starting new session when starting inbox queue')
2021-12-28 16:56:57 +00:00
session = create_session(proxy_type)
2020-04-03 16:27:34 +00:00
inboxHandle = 'inbox@' + domain
2019-07-04 12:23:53 +00:00
if debug:
print('DEBUG: Inbox queue running')
2019-07-12 21:09:23 +00:00
# if queue processing was interrupted (eg server crash)
# then this loads any outstanding items back into the queue
2021-12-29 21:55:09 +00:00
_restore_queue_items(base_dir, queue)
2019-07-15 10:22:19 +00:00
2020-03-25 10:21:25 +00:00
# keep track of numbers of incoming posts per day
2020-04-03 16:27:34 +00:00
quotasLastUpdateDaily = int(time.time())
quotasDaily = {
2019-07-15 10:22:19 +00:00
'domains': {},
'accounts': {}
}
2020-04-03 16:27:34 +00:00
quotasLastUpdatePerMin = int(time.time())
quotasPerMin = {
2020-03-25 10:36:37 +00:00
'domains': {},
'accounts': {}
}
2020-04-03 16:27:34 +00:00
heartBeatCtr = 0
queueRestoreCtr = 0
2019-09-03 08:46:26 +00:00
# time when the last DM bounce message was sent
# This is in a list so that it can be changed by reference
2021-12-29 21:55:09 +00:00
# within _bounce_dm
lastBounceMessage = [int(time.time())]
# how long it takes for broch mode to lapse
brochLapseDays = random.randrange(7, 14)
2019-07-04 12:23:53 +00:00
while True:
2020-04-16 18:25:59 +00:00
time.sleep(1)
2019-09-03 08:46:26 +00:00
# heartbeat to monitor whether the inbox queue is running
2021-03-14 21:37:33 +00:00
heartBeatCtr += 1
2020-04-03 16:27:34 +00:00
if heartBeatCtr >= 10:
2021-02-15 22:26:25 +00:00
# turn off broch mode after it has timed out
2021-12-25 18:38:19 +00:00
if broch_modeLapses(base_dir, brochLapseDays):
brochLapseDays = random.randrange(7, 14)
2020-04-16 10:14:05 +00:00
print('>>> Heartbeat Q:' + str(len(queue)) + ' ' +
2020-04-03 16:27:34 +00:00
'{:%F %T}'.format(datetime.datetime.now()))
heartBeatCtr = 0
if len(queue) == 0:
2019-09-03 09:11:33 +00:00
# restore any remaining queue items
2020-04-03 16:27:34 +00:00
queueRestoreCtr += 1
if queueRestoreCtr >= 30:
queueRestoreCtr = 0
2021-12-29 21:55:09 +00:00
_restore_queue_items(base_dir, queue)
2020-04-16 09:49:57 +00:00
continue
2020-04-16 10:14:05 +00:00
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
2020-04-16 09:49:57 +00:00
# recreate the session periodically
2021-12-26 13:17:46 +00:00
if not session or curr_time - session_last_update > 21600:
2020-06-24 09:40:17 +00:00
print('Regenerating inbox queue session at 6hr interval')
2021-12-28 16:56:57 +00:00
session = create_session(proxy_type)
2020-06-08 20:18:02 +00:00
if not session:
continue
2021-12-26 13:17:46 +00:00
session_last_update = curr_time
2020-04-16 09:49:57 +00:00
# oldest item first
queue.sort()
queueFilename = queue[0]
if not os.path.isfile(queueFilename):
print("Queue: queue item rejected because it has no file: " +
queueFilename)
if len(queue) > 0:
queue.pop(0)
continue
2019-07-04 12:23:53 +00:00
2021-03-14 19:22:58 +00:00
if debug:
print('Loading queue item ' + queueFilename)
2020-03-22 21:16:02 +00:00
2020-04-16 09:49:57 +00:00
# Load the queue json
2021-12-26 15:13:34 +00:00
queueJson = load_json(queueFilename, 1)
2020-04-16 09:49:57 +00:00
if not queueJson:
2021-12-28 20:32:11 +00:00
print('Queue: run_inbox_queue failed to load inbox queue item ' +
2020-04-16 09:49:57 +00:00
queueFilename)
# Assume that the file is probably corrupt/unreadable
if len(queue) > 0:
queue.pop(0)
# delete the queue file
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 1 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
continue
2020-03-22 21:16:02 +00:00
2020-04-16 09:49:57 +00:00
# clear the daily quotas for maximum numbers of received posts
2021-12-26 13:17:46 +00:00
if curr_time - quotasLastUpdateDaily > 60 * 60 * 24:
2020-04-16 09:49:57 +00:00
quotasDaily = {
'domains': {},
'accounts': {}
}
2021-12-26 13:17:46 +00:00
quotasLastUpdateDaily = curr_time
2020-04-16 09:49:57 +00:00
2021-12-26 13:17:46 +00:00
if curr_time - quotasLastUpdatePerMin > 60:
# clear the per minute quotas for maximum numbers of received posts
2020-04-16 09:49:57 +00:00
quotasPerMin = {
'domains': {},
'accounts': {}
}
# also check if the json signature enforcement has changed
2021-12-30 13:56:38 +00:00
verifyAllSigs = get_config_param(base_dir, "verifyAllSignatures")
if verifyAllSigs is not None:
2021-12-25 18:40:32 +00:00
verify_all_signatures = verifyAllSigs
# change the last time that this was done
2021-12-26 13:17:46 +00:00
quotasLastUpdatePerMin = curr_time
2020-04-16 09:49:57 +00:00
2021-12-29 21:55:09 +00:00
if _inbox_quota_exceeded(queue, queueFilename,
queueJson, quotasDaily, quotasPerMin,
domain_max_posts_per_day,
account_max_posts_per_day, debug):
continue
2019-07-15 10:22:19 +00:00
2021-03-14 19:22:58 +00:00
if debug and queueJson.get('actor'):
2020-08-23 14:45:58 +00:00
print('Obtaining public key for actor ' + queueJson['actor'])
2020-03-22 21:16:02 +00:00
2020-04-16 09:49:57 +00:00
# Try a few times to obtain the public key
pubKey = None
keyId = None
for tries in range(8):
2020-04-03 16:27:34 +00:00
keyId = None
2020-04-16 09:49:57 +00:00
signatureParams = \
queueJson['httpHeaders']['signature'].split(',')
for signatureItem in signatureParams:
if signatureItem.startswith('keyId='):
if '"' in signatureItem:
keyId = signatureItem.split('"')[1]
break
if not keyId:
print('Queue: No keyId in signature: ' +
queueJson['httpHeaders']['signature'])
pubKey = None
break
pubKey = \
2021-12-29 21:55:09 +00:00
get_person_pub_key(base_dir, session, keyId,
person_cache, debug,
project_version, http_prefix,
domain, onion_domain, signing_priv_key_pem)
2020-04-16 09:49:57 +00:00
if pubKey:
2019-07-04 17:31:41 +00:00
if debug:
2020-04-16 09:49:57 +00:00
print('DEBUG: public key: ' + str(pubKey))
break
2019-07-04 17:31:41 +00:00
2019-08-15 08:36:49 +00:00
if debug:
2020-04-16 09:49:57 +00:00
print('DEBUG: Retry ' + str(tries+1) +
' obtaining public key for ' + keyId)
2020-04-16 18:25:59 +00:00
time.sleep(1)
2019-07-04 12:23:53 +00:00
2020-04-16 09:49:57 +00:00
if not pubKey:
2021-03-14 19:39:00 +00:00
if debug:
print('Queue: public key could not be obtained from ' + keyId)
2020-04-16 09:49:57 +00:00
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 2 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2021-01-03 09:44:33 +00:00
# check the http header signature
2020-04-16 09:49:57 +00:00
if debug:
2021-01-03 09:44:33 +00:00
print('DEBUG: checking http header signature')
2020-04-16 09:49:57 +00:00
pprint(queueJson['httpHeaders'])
postStr = json.dumps(queueJson['post'])
httpSignatureFailed = False
2021-12-29 21:55:09 +00:00
if not verify_post_headers(http_prefix,
pubKey,
queueJson['httpHeaders'],
queueJson['path'], False,
queueJson['digest'],
postStr,
debug):
httpSignatureFailed = True
2020-04-16 09:49:57 +00:00
print('Queue: Header signature check failed')
2021-03-14 18:16:24 +00:00
pprint(queueJson['httpHeaders'])
else:
if debug:
print('DEBUG: http header signature check success')
2020-03-22 21:16:02 +00:00
# check if a json signature exists on this post
2021-12-25 16:17:53 +00:00
hasJsonSignature, jwebsigType = \
2021-12-29 21:55:09 +00:00
_check_json_signature(base_dir, queueJson)
2021-01-05 10:54:50 +00:00
# strict enforcement of json signatures
if not hasJsonSignature:
if httpSignatureFailed:
2021-02-14 15:45:42 +00:00
if jwebsigType:
print('Queue: Header signature check failed and does ' +
'not have a recognised jsonld signature type ' +
jwebsigType)
else:
print('Queue: Header signature check failed and ' +
'does not have jsonld signature')
if debug:
pprint(queueJson['httpHeaders'])
2021-12-25 18:40:32 +00:00
if verify_all_signatures:
originalJson = queueJson['original']
print('Queue: inbox post does not have a jsonld signature ' +
keyId + ' ' + str(originalJson))
2021-12-25 18:40:32 +00:00
if httpSignatureFailed or verify_all_signatures:
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 3 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
if len(queue) > 0:
queue.pop(0)
continue
else:
2021-12-25 18:40:32 +00:00
if httpSignatureFailed or verify_all_signatures:
# use the original json message received, not one which
# may have been modified along the way
originalJson = queueJson['original']
2021-12-29 21:55:09 +00:00
if not verify_json_signature(originalJson, pubKey):
if debug:
print('WARN: jsonld inbox signature check failed ' +
keyId + ' ' + pubKey + ' ' + str(originalJson))
else:
print('WARN: jsonld inbox signature check failed ' +
keyId)
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 4 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
if len(queue) > 0:
queue.pop(0)
continue
else:
if httpSignatureFailed:
print('jsonld inbox signature check success ' +
'via relay ' + keyId)
else:
print('jsonld inbox signature check success ' + keyId)
2020-04-16 09:49:57 +00:00
# set the id to the same as the post filename
# This makes the filename and the id consistent
# if queueJson['post'].get('id'):
2021-09-28 10:28:42 +00:00
# queueJson['post']['id'] = queueJson['id']
2020-04-16 09:49:57 +00:00
2021-12-29 21:55:09 +00:00
if _receive_undo(session,
base_dir, http_prefix, port,
send_threads, postLog,
cached_webfingers,
person_cache,
queueJson['post'],
federation_list,
debug):
2020-04-16 09:49:57 +00:00
print('Queue: Undo accepted from ' + keyId)
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 5 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2019-07-17 10:34:00 +00:00
2020-04-16 09:49:57 +00:00
if debug:
print('DEBUG: checking for follow requests')
2021-12-29 21:55:09 +00:00
if _receive_follow_request(session,
base_dir, http_prefix, port,
send_threads, postLog,
cached_webfingers,
person_cache,
queueJson['post'],
federation_list,
debug, project_version,
max_followers, onion_domain,
signing_priv_key_pem, unit_test):
2020-04-16 09:49:57 +00:00
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 6 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
print('Queue: Follow activity for ' + keyId +
2020-06-28 19:04:43 +00:00
' removed from queue')
2020-04-16 09:49:57 +00:00
continue
else:
2019-08-15 16:05:28 +00:00
if debug:
2020-04-16 09:49:57 +00:00
print('DEBUG: No follow requests')
2021-12-29 21:55:09 +00:00
if receive_accept_reject(session,
base_dir, http_prefix, domain, port,
send_threads, postLog,
cached_webfingers, person_cache,
queueJson['post'],
federation_list, debug):
2020-04-16 09:49:57 +00:00
print('Queue: Accept/Reject received from ' + keyId)
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 7 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2019-07-06 15:17:21 +00:00
2021-12-29 21:55:09 +00:00
if _receive_update_activity(recent_posts_cache, session,
base_dir, http_prefix,
domain, port,
send_threads, postLog,
cached_webfingers,
person_cache,
queueJson['post'],
federation_list,
queueJson['postNickname'],
debug):
2021-03-14 19:46:46 +00:00
if debug:
print('Queue: Update accepted from ' + keyId)
2020-04-16 09:49:57 +00:00
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 8 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
# get recipients list
recipientsDict, recipientsDictFollowers = \
2021-12-29 21:55:09 +00:00
_inbox_post_recipients(base_dir, queueJson['post'],
http_prefix, domain, port, debug)
2020-04-16 09:49:57 +00:00
if len(recipientsDict.items()) == 0 and \
len(recipientsDictFollowers.items()) == 0:
2021-03-14 19:22:58 +00:00
if debug:
print('Queue: no recipients were resolved ' +
'for post arriving in inbox')
2020-04-16 09:49:57 +00:00
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 9 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)
continue
2019-07-09 14:20:23 +00:00
2020-04-16 09:49:57 +00:00
# if there are only a small number of followers then
# process them as if they were specifically
# addresses to particular accounts
noOfFollowItems = len(recipientsDictFollowers.items())
if noOfFollowItems > 0:
# always deliver to individual inboxes
if noOfFollowItems < 999999:
if debug:
print('DEBUG: moving ' + str(noOfFollowItems) +
' inbox posts addressed to followers')
for handle, postItem in recipientsDictFollowers.items():
recipientsDict[handle] = postItem
recipientsDictFollowers = {}
# recipientsList = [recipientsDict, recipientsDictFollowers]
if debug:
print('*************************************')
print('Resolved recipients list:')
pprint(recipientsDict)
print('Resolved followers list:')
pprint(recipientsDictFollowers)
print('*************************************')
# Copy any posts addressed to followers into the shared inbox
# this avoid copying file multiple times to potentially many
# individual inboxes
if len(recipientsDictFollowers) > 0:
sharedInboxPostFilename = \
queueJson['destination'].replace(inboxHandle, inboxHandle)
if not os.path.isfile(sharedInboxPostFilename):
2021-12-26 14:47:21 +00:00
save_json(queueJson['post'], sharedInboxPostFilename)
2020-04-16 09:49:57 +00:00
2021-12-30 13:56:38 +00:00
lists_enabled = get_config_param(base_dir, "listsEnabled")
content_license_url = get_config_param(base_dir, "contentLicenseUrl")
2020-04-16 09:49:57 +00:00
# for posts addressed to specific accounts
for handle, capsId in recipientsDict.items():
destination = \
queueJson['destination'].replace(inboxHandle, handle)
2021-12-29 21:55:09 +00:00
_inbox_after_initial(recent_posts_cache,
max_recent_posts,
session, keyId, handle,
queueJson['post'],
base_dir, http_prefix,
send_threads, postLog,
cached_webfingers,
person_cache, queue,
domain,
onion_domain, i2p_domain,
port, proxy_type,
federation_list,
debug,
queueFilename, destination,
max_replies, allow_deletion,
max_mentions, max_emoji,
translate, unit_test,
yt_replace_domain,
twitter_replacement_domain,
show_published_date_only,
allow_local_network_access,
peertube_instances,
lastBounceMessage,
theme_name, system_language,
max_like_count,
signing_priv_key_pem,
default_reply_interval_hrs,
cw_lists, lists_enabled,
content_license_url)
2020-09-27 18:35:35 +00:00
if debug:
pprint(queueJson['post'])
2021-03-14 19:22:58 +00:00
print('Queue: Queue post accepted')
2020-04-16 09:49:57 +00:00
if os.path.isfile(queueFilename):
try:
os.remove(queueFilename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-28 20:32:11 +00:00
print('EX: run_inbox_queue 10 unable to delete ' +
2021-10-29 18:48:15 +00:00
str(queueFilename))
2020-04-16 09:49:57 +00:00
if len(queue) > 0:
queue.pop(0)