2020-04-04 10:05:27 +00:00
|
|
|
__filename__ = "posts.py"
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
__license__ = "AGPL3+"
|
2021-01-26 10:07:42 +00:00
|
|
|
__version__ = "1.2.0"
|
2020-04-04 10:05:27 +00:00
|
|
|
__maintainer__ = "Bob Mottram"
|
2021-09-10 16:14:50 +00:00
|
|
|
__email__ = "bob@libreserver.org"
|
2020-04-04 10:05:27 +00:00
|
|
|
__status__ = "Production"
|
2021-06-15 15:08:12 +00:00
|
|
|
__module_group__ = "ActivityPub"
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-06-28 18:55:29 +00:00
|
|
|
import json
|
|
|
|
import html
|
2019-06-29 10:08:59 +00:00
|
|
|
import datetime
|
2019-06-30 15:03:26 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
2019-07-01 11:48:54 +00:00
|
|
|
import time
|
2020-09-25 12:33:28 +00:00
|
|
|
import random
|
2020-06-23 21:39:19 +00:00
|
|
|
from socket import error as SocketError
|
2019-10-10 13:48:05 +00:00
|
|
|
from time import gmtime, strftime
|
2019-07-14 09:17:50 +00:00
|
|
|
from collections import OrderedDict
|
2021-12-28 21:36:27 +00:00
|
|
|
from threads import thread_with_trace
|
2021-12-29 21:55:09 +00:00
|
|
|
from cache import store_person_in_cache
|
|
|
|
from cache import get_person_from_cache
|
|
|
|
from cache import expire_person_cache
|
2019-06-29 10:08:59 +00:00
|
|
|
from pprint import pprint
|
2021-12-28 16:56:57 +00:00
|
|
|
from session import create_session
|
2021-12-29 21:55:09 +00:00
|
|
|
from session import get_json
|
|
|
|
from session import post_json
|
|
|
|
from session import post_json_string
|
|
|
|
from session import post_image
|
|
|
|
from webfinger import webfinger_handle
|
|
|
|
from httpsig import create_signed_header
|
|
|
|
from siteactive import site_is_active
|
|
|
|
from languages import understood_post_language
|
2021-12-26 12:24:40 +00:00
|
|
|
from utils import get_user_paths
|
2021-12-26 19:15:36 +00:00
|
|
|
from utils import invalid_ciphertext
|
2021-12-26 17:12:07 +00:00
|
|
|
from utils import has_object_stringType
|
2021-12-27 11:20:57 +00:00
|
|
|
from utils import remove_id_ending
|
2021-12-26 17:21:37 +00:00
|
|
|
from utils import replace_users_with_at
|
2021-12-26 17:53:07 +00:00
|
|
|
from utils import has_group_type
|
2021-12-26 11:29:40 +00:00
|
|
|
from utils import get_base_content_from_post
|
2021-12-26 18:17:37 +00:00
|
|
|
from utils import remove_domain_port
|
2021-12-26 18:14:21 +00:00
|
|
|
from utils import get_port_from_domain
|
2021-12-26 10:57:03 +00:00
|
|
|
from utils import has_object_dict
|
2021-12-26 20:20:36 +00:00
|
|
|
from utils import reject_post_id
|
2021-12-27 19:33:45 +00:00
|
|
|
from utils import remove_invalid_chars
|
2021-12-28 14:01:37 +00:00
|
|
|
from utils import file_last_modified
|
2021-12-28 14:41:10 +00:00
|
|
|
from utils import is_public_post
|
2021-12-26 12:19:00 +00:00
|
|
|
from utils import has_users_path
|
2021-12-26 12:31:47 +00:00
|
|
|
from utils import valid_post_date
|
2021-12-26 12:45:03 +00:00
|
|
|
from utils import get_full_domain
|
2021-12-27 13:58:17 +00:00
|
|
|
from utils import get_followers_list
|
2021-12-27 17:49:35 +00:00
|
|
|
from utils import is_evil
|
2021-12-27 17:42:35 +00:00
|
|
|
from utils import get_status_number
|
2021-12-27 19:26:54 +00:00
|
|
|
from utils import create_person_dir
|
2021-12-27 20:47:05 +00:00
|
|
|
from utils import url_permitted
|
2021-12-27 22:19:18 +00:00
|
|
|
from utils import get_nickname_from_actor
|
2021-12-27 19:05:25 +00:00
|
|
|
from utils import get_domain_from_actor
|
2021-12-28 14:55:45 +00:00
|
|
|
from utils import delete_post
|
2021-12-28 14:41:10 +00:00
|
|
|
from utils import valid_nickname
|
2021-12-26 20:36:08 +00:00
|
|
|
from utils import locate_post
|
2021-12-26 15:13:34 +00:00
|
|
|
from utils import load_json
|
2021-12-26 14:47:21 +00:00
|
|
|
from utils import save_json
|
2021-12-26 14:08:58 +00:00
|
|
|
from utils import get_config_param
|
2021-12-27 22:38:48 +00:00
|
|
|
from utils import locate_news_votes
|
2021-12-27 22:46:10 +00:00
|
|
|
from utils import locate_news_arrival
|
2021-12-27 22:32:59 +00:00
|
|
|
from utils import votes_on_newswire_item
|
2021-12-27 15:43:22 +00:00
|
|
|
from utils import remove_html
|
2021-12-27 21:42:08 +00:00
|
|
|
from utils import dangerous_markup
|
2021-12-26 12:02:29 +00:00
|
|
|
from utils import acct_dir
|
2021-12-26 10:19:59 +00:00
|
|
|
from utils import local_actor_url
|
2021-12-28 21:36:27 +00:00
|
|
|
from media import attach_media
|
|
|
|
from media import replace_you_tube
|
|
|
|
from media import replace_twitter
|
2021-12-29 21:55:09 +00:00
|
|
|
from content import words_similarity
|
|
|
|
from content import limit_repeated_words
|
|
|
|
from content import post_tag_exists
|
|
|
|
from content import remove_long_words
|
|
|
|
from content import add_html_tags
|
|
|
|
from content import replace_emoji_from_tags
|
|
|
|
from content import remove_text_formatting
|
2021-12-28 21:36:27 +00:00
|
|
|
from auth import create_basic_auth_header
|
2021-12-29 21:55:09 +00:00
|
|
|
from blocking import is_blocked
|
2021-12-28 21:55:38 +00:00
|
|
|
from blocking import is_blocked_domain
|
2021-12-29 21:55:09 +00:00
|
|
|
from filters import is_filtered
|
|
|
|
from git import convert_post_to_patch
|
|
|
|
from linked_data_sig import generate_json_signature
|
|
|
|
from petnames import resolve_petnames
|
|
|
|
from video import convert_video_to_note
|
|
|
|
from context import get_individual_post_context
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def is_moderator(base_dir: str, nickname: str) -> bool:
|
2019-08-12 13:22:17 +00:00
|
|
|
"""Returns true if the given nickname is a moderator
|
|
|
|
"""
|
2021-12-25 16:17:53 +00:00
|
|
|
moderatorsFile = base_dir + '/accounts/moderators.txt'
|
2019-08-12 13:22:17 +00:00
|
|
|
|
|
|
|
if not os.path.isfile(moderatorsFile):
|
2021-12-26 14:11:30 +00:00
|
|
|
admin_name = get_config_param(base_dir, 'admin')
|
|
|
|
if not admin_name:
|
2020-10-10 16:10:32 +00:00
|
|
|
return False
|
2021-12-26 14:11:30 +00:00
|
|
|
if admin_name == nickname:
|
2019-08-12 13:22:17 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(moderatorsFile, 'r') as f:
|
2020-04-04 10:05:27 +00:00
|
|
|
lines = f.readlines()
|
|
|
|
if len(lines) == 0:
|
2021-12-26 14:11:30 +00:00
|
|
|
admin_name = get_config_param(base_dir, 'admin')
|
|
|
|
if not admin_name:
|
2020-10-10 16:10:32 +00:00
|
|
|
return False
|
2021-12-26 14:11:30 +00:00
|
|
|
if admin_name == nickname:
|
2019-08-12 13:22:17 +00:00
|
|
|
return True
|
|
|
|
for moderator in lines:
|
2020-05-22 11:32:38 +00:00
|
|
|
moderator = moderator.strip('\n').strip('\r')
|
2020-04-04 10:05:27 +00:00
|
|
|
if moderator == nickname:
|
2019-08-12 13:22:17 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def no_of_followers_on_domain(base_dir: str, handle: str,
|
|
|
|
domain: str, followFile='followers.txt') -> int:
|
2019-07-05 14:39:24 +00:00
|
|
|
"""Returns the number of followers of the given handle from the given domain
|
|
|
|
"""
|
2021-12-25 16:17:53 +00:00
|
|
|
filename = base_dir + '/accounts/' + handle + '/' + followFile
|
2019-07-05 14:39:24 +00:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
ctr = 0
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(filename, 'r') as followersFilename:
|
2019-07-05 14:39:24 +00:00
|
|
|
for followerHandle in followersFilename:
|
|
|
|
if '@' in followerHandle:
|
2020-05-22 11:32:38 +00:00
|
|
|
followerDomain = followerHandle.split('@')[1]
|
|
|
|
followerDomain = followerDomain.replace('\n', '')
|
|
|
|
followerDomain = followerDomain.replace('\r', '')
|
2020-04-04 10:05:27 +00:00
|
|
|
if domain == followerDomain:
|
|
|
|
ctr += 1
|
2019-07-05 14:39:24 +00:00
|
|
|
return ctr
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_local_private_key(base_dir: str, nickname: str, domain: str) -> str:
|
2021-09-01 17:26:40 +00:00
|
|
|
"""Returns the private key for a local account
|
|
|
|
"""
|
2021-09-02 10:29:35 +00:00
|
|
|
if not domain or not nickname:
|
|
|
|
return None
|
2021-09-01 17:26:40 +00:00
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
keyFilename = base_dir + '/keys/private/' + handle.lower() + '.key'
|
2021-09-01 17:26:40 +00:00
|
|
|
if not os.path.isfile(keyFilename):
|
|
|
|
return None
|
|
|
|
with open(keyFilename, 'r') as pemFile:
|
|
|
|
return pemFile.read()
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-12-28 18:13:52 +00:00
|
|
|
def get_instance_actor_key(base_dir: str, domain: str) -> str:
|
2021-09-01 17:26:40 +00:00
|
|
|
"""Returns the private key for the instance actor used for
|
|
|
|
signing GET posts
|
|
|
|
"""
|
2021-12-29 21:55:09 +00:00
|
|
|
return _get_local_private_key(base_dir, 'inbox', domain)
|
2021-09-01 17:26:40 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_local_public_key(base_dir: str, nickname: str, domain: str) -> str:
|
2021-09-01 17:26:40 +00:00
|
|
|
"""Returns the public key for a local account
|
|
|
|
"""
|
2021-09-02 10:29:35 +00:00
|
|
|
if not domain or not nickname:
|
|
|
|
return None
|
2021-09-01 17:26:40 +00:00
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
keyFilename = base_dir + '/keys/public/' + handle.lower() + '.key'
|
2021-09-01 17:26:40 +00:00
|
|
|
if not os.path.isfile(keyFilename):
|
|
|
|
return None
|
|
|
|
with open(keyFilename, 'r') as pemFile:
|
|
|
|
return pemFile.read()
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_person_key(nickname: str, domain: str, base_dir: str,
|
|
|
|
keyType: str = 'public', debug: bool = False):
|
2019-06-30 15:03:26 +00:00
|
|
|
"""Returns the public or private key of a person
|
|
|
|
"""
|
2021-09-01 17:26:40 +00:00
|
|
|
if keyType == 'private':
|
2021-12-29 21:55:09 +00:00
|
|
|
keyPem = _get_local_private_key(base_dir, nickname, domain)
|
2021-09-01 17:26:40 +00:00
|
|
|
else:
|
2021-12-29 21:55:09 +00:00
|
|
|
keyPem = _get_local_public_key(base_dir, nickname, domain)
|
2021-09-01 17:26:40 +00:00
|
|
|
if not keyPem:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2021-09-01 17:26:40 +00:00
|
|
|
print('DEBUG: ' + keyType + ' key file not found')
|
2019-06-30 15:03:26 +00:00
|
|
|
return ''
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(keyPem) < 20:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: private key was too short: ' + keyPem)
|
2019-06-30 15:03:26 +00:00
|
|
|
return ''
|
|
|
|
return keyPem
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _clean_html(rawHtml: str) -> str:
|
2020-04-04 10:05:27 +00:00
|
|
|
# text=BeautifulSoup(rawHtml, 'html.parser').get_text()
|
|
|
|
text = rawHtml
|
2019-06-28 18:55:29 +00:00
|
|
|
return html.unescape(text)
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_user_url(wfRequest: {}, sourceId: int, debug: bool) -> str:
|
2020-12-18 17:08:35 +00:00
|
|
|
"""Gets the actor url from a webfinger request
|
|
|
|
"""
|
2020-12-18 17:02:26 +00:00
|
|
|
if not wfRequest.get('links'):
|
2020-12-30 10:29:14 +00:00
|
|
|
if sourceId == 72367:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('get_user_url ' + str(sourceId) +
|
2021-01-10 10:13:10 +00:00
|
|
|
' failed to get display name for webfinger ' +
|
2020-12-30 10:29:14 +00:00
|
|
|
str(wfRequest))
|
|
|
|
else:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('get_user_url webfinger activity+json contains no links ' +
|
2020-12-30 10:29:14 +00:00
|
|
|
str(sourceId) + ' ' + str(wfRequest))
|
2020-12-18 17:02:26 +00:00
|
|
|
return None
|
|
|
|
for link in wfRequest['links']:
|
|
|
|
if not (link.get('type') and link.get('href')):
|
|
|
|
continue
|
|
|
|
if link['type'] != 'application/activity+json':
|
|
|
|
continue
|
2021-01-24 22:20:23 +00:00
|
|
|
if '/@' not in link['href']:
|
2021-12-26 12:19:00 +00:00
|
|
|
if debug and not has_users_path(link['href']):
|
2021-12-29 21:55:09 +00:00
|
|
|
print('get_user_url webfinger activity+json ' +
|
2021-01-24 22:20:23 +00:00
|
|
|
'contains single user instance actor ' +
|
|
|
|
str(sourceId) + ' ' + str(link))
|
2021-01-24 22:25:08 +00:00
|
|
|
else:
|
|
|
|
return link['href'].replace('/@', '/users/')
|
2020-12-18 17:02:26 +00:00
|
|
|
return link['href']
|
2019-06-28 18:55:29 +00:00
|
|
|
return None
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def parse_user_feed(signing_priv_key_pem: str,
|
|
|
|
session, feedUrl: str, asHeader: {},
|
|
|
|
project_version: str, http_prefix: str,
|
|
|
|
originDomain: str, debug: bool, depth: int = 0) -> []:
|
2021-01-08 21:43:04 +00:00
|
|
|
if depth > 10:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Maximum search depth reached')
|
2020-07-08 12:28:41 +00:00
|
|
|
return None
|
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting user feed for ' + feedUrl)
|
|
|
|
print('User feed header ' + str(asHeader))
|
2021-12-25 17:09:22 +00:00
|
|
|
print('http_prefix ' + str(http_prefix))
|
2021-09-15 12:07:24 +00:00
|
|
|
print('originDomain ' + str(originDomain))
|
2021-09-11 13:10:50 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
feedJson = get_json(signing_priv_key_pem, session, feedUrl, asHeader, None,
|
|
|
|
debug, project_version, http_prefix, originDomain)
|
2021-09-11 13:10:50 +00:00
|
|
|
if not feedJson:
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
acceptStr = 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
if asHeader['Accept'] != acceptStr:
|
|
|
|
asHeader = {
|
|
|
|
'Accept': acceptStr
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
feedJson = get_json(signing_priv_key_pem, session, feedUrl,
|
|
|
|
asHeader, None, debug, project_version,
|
|
|
|
http_prefix, originDomain)
|
2019-07-04 17:31:41 +00:00
|
|
|
if not feedJson:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('No user feed was returned')
|
2020-07-08 12:28:41 +00:00
|
|
|
return None
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-08-01 14:11:20 +00:00
|
|
|
if debug:
|
|
|
|
print('User feed:')
|
|
|
|
pprint(feedJson)
|
|
|
|
|
2019-06-29 10:08:59 +00:00
|
|
|
if 'orderedItems' in feedJson:
|
2021-08-01 14:47:31 +00:00
|
|
|
return feedJson['orderedItems']
|
2021-09-11 12:10:44 +00:00
|
|
|
elif 'items' in feedJson:
|
|
|
|
return feedJson['items']
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
nextUrl = None
|
2019-06-29 10:08:59 +00:00
|
|
|
if 'first' in feedJson:
|
2020-04-04 10:05:27 +00:00
|
|
|
nextUrl = feedJson['first']
|
2019-06-29 10:08:59 +00:00
|
|
|
elif 'next' in feedJson:
|
2020-04-04 10:05:27 +00:00
|
|
|
nextUrl = feedJson['next']
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('User feed next url: ' + str(nextUrl))
|
2021-08-01 16:23:32 +00:00
|
|
|
|
2019-06-28 18:55:29 +00:00
|
|
|
if nextUrl:
|
2019-09-01 13:13:52 +00:00
|
|
|
if isinstance(nextUrl, str):
|
2020-06-28 10:02:59 +00:00
|
|
|
if '?max_id=0' not in nextUrl:
|
|
|
|
userFeed = \
|
2021-12-29 21:55:09 +00:00
|
|
|
parse_user_feed(signing_priv_key_pem,
|
|
|
|
session, nextUrl, asHeader,
|
|
|
|
project_version, http_prefix,
|
|
|
|
originDomain, debug, depth + 1)
|
2021-08-01 16:23:32 +00:00
|
|
|
if userFeed:
|
2021-08-01 14:47:31 +00:00
|
|
|
return userFeed
|
2019-09-01 13:13:52 +00:00
|
|
|
elif isinstance(nextUrl, dict):
|
2020-04-04 10:05:27 +00:00
|
|
|
userFeed = nextUrl
|
2019-09-01 13:13:52 +00:00
|
|
|
if userFeed.get('orderedItems'):
|
2021-08-01 14:47:31 +00:00
|
|
|
return userFeed['orderedItems']
|
2021-09-11 12:10:44 +00:00
|
|
|
elif userFeed.get('items'):
|
|
|
|
return userFeed['items']
|
2021-08-01 14:47:31 +00:00
|
|
|
return None
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_person_box_actor(session, base_dir: str, actor: str,
|
|
|
|
profileStr: str, asHeader: {},
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
http_prefix: str, originDomain: str,
|
|
|
|
person_cache: {},
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
sourceId: int) -> {}:
|
2021-07-29 22:24:22 +00:00
|
|
|
"""Returns the actor json for the given actor url
|
|
|
|
"""
|
|
|
|
personJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_person_from_cache(base_dir, actor, person_cache, True)
|
2021-07-29 22:24:22 +00:00
|
|
|
if personJson:
|
|
|
|
return personJson
|
|
|
|
|
|
|
|
if '/channel/' in actor or '/accounts/' in actor:
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
personJson = get_json(signing_priv_key_pem, session, actor, asHeader, None,
|
|
|
|
debug, project_version, http_prefix, originDomain)
|
2021-07-29 22:24:22 +00:00
|
|
|
if personJson:
|
|
|
|
return personJson
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
personJson = get_json(signing_priv_key_pem, session, actor, asHeader, None,
|
|
|
|
debug, project_version, http_prefix, originDomain)
|
2021-07-29 22:24:22 +00:00
|
|
|
if personJson:
|
|
|
|
return personJson
|
2021-09-20 14:22:41 +00:00
|
|
|
print('Unable to get actor for ' + actor + ' ' + str(sourceId))
|
2021-12-25 23:03:28 +00:00
|
|
|
if not signing_priv_key_pem:
|
2021-09-20 13:20:30 +00:00
|
|
|
print('No signing key provided when getting actor')
|
2021-07-29 22:24:22 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_person_box(signing_priv_key_pem: str, originDomain: str,
|
|
|
|
base_dir: str, session, wfRequest: {}, person_cache: {},
|
|
|
|
project_version: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
boxName: str = 'inbox',
|
|
|
|
sourceId=0) -> (str, str, str, str, str, str, str, bool):
|
2021-03-14 20:55:37 +00:00
|
|
|
debug = False
|
2020-04-04 10:05:27 +00:00
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/activity+json; profile="' + profileStr + '"'
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2021-01-13 23:43:11 +00:00
|
|
|
if not wfRequest:
|
|
|
|
print('No webfinger given')
|
2021-09-26 21:28:33 +00:00
|
|
|
return None, None, None, None, None, None, None, None
|
2021-01-13 23:43:11 +00:00
|
|
|
|
2021-07-29 22:24:22 +00:00
|
|
|
# get the actor / personUrl
|
2019-10-17 15:55:05 +00:00
|
|
|
if not wfRequest.get('errors'):
|
2021-07-29 22:27:54 +00:00
|
|
|
# get the actor url from webfinger links
|
2021-12-29 21:55:09 +00:00
|
|
|
personUrl = get_user_url(wfRequest, sourceId, debug)
|
2019-10-17 15:55:05 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
if nickname == 'dev':
|
2019-10-21 16:03:44 +00:00
|
|
|
# try single user instance
|
2021-12-29 21:55:09 +00:00
|
|
|
print('get_person_box: Trying single user instance with ld+json')
|
2021-12-25 17:09:22 +00:00
|
|
|
personUrl = http_prefix + '://' + domain
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2019-10-21 16:03:44 +00:00
|
|
|
else:
|
2021-07-29 22:27:54 +00:00
|
|
|
# the final fallback is a mastodon style url
|
2021-12-26 10:19:59 +00:00
|
|
|
personUrl = local_actor_url(http_prefix, nickname, domain)
|
2019-06-30 10:14:02 +00:00
|
|
|
if not personUrl:
|
2021-09-26 21:28:33 +00:00
|
|
|
return None, None, None, None, None, None, None, None
|
2021-07-29 22:24:22 +00:00
|
|
|
|
|
|
|
# get the actor json from the url
|
2020-08-29 10:21:29 +00:00
|
|
|
personJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_get_person_box_actor(session, base_dir, personUrl,
|
|
|
|
profileStr, asHeader,
|
|
|
|
debug, project_version,
|
|
|
|
http_prefix, originDomain,
|
|
|
|
person_cache, signing_priv_key_pem,
|
|
|
|
sourceId)
|
2019-06-30 11:34:19 +00:00
|
|
|
if not personJson:
|
2021-09-26 21:28:33 +00:00
|
|
|
return None, None, None, None, None, None, None, None
|
2021-07-29 22:24:22 +00:00
|
|
|
|
2021-09-22 09:29:48 +00:00
|
|
|
isGroup = False
|
|
|
|
if personJson.get('type'):
|
|
|
|
if personJson['type'] == 'Group':
|
|
|
|
isGroup = True
|
|
|
|
|
2021-07-29 22:24:22 +00:00
|
|
|
# get the url for the box/collection
|
2020-04-04 10:05:27 +00:00
|
|
|
boxJson = None
|
2019-06-30 10:14:02 +00:00
|
|
|
if not personJson.get(boxName):
|
2019-07-05 13:38:29 +00:00
|
|
|
if personJson.get('endpoints'):
|
|
|
|
if personJson['endpoints'].get(boxName):
|
2020-04-04 10:05:27 +00:00
|
|
|
boxJson = personJson['endpoints'][boxName]
|
2019-07-05 13:38:29 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
boxJson = personJson[boxName]
|
2019-07-05 13:38:29 +00:00
|
|
|
if not boxJson:
|
2021-09-26 21:28:33 +00:00
|
|
|
return None, None, None, None, None, None, None, None
|
2019-07-05 13:38:29 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
personId = None
|
2019-06-30 10:14:02 +00:00
|
|
|
if personJson.get('id'):
|
2020-04-04 10:05:27 +00:00
|
|
|
personId = personJson['id']
|
|
|
|
pubKeyId = None
|
|
|
|
pubKey = None
|
2019-06-30 10:14:02 +00:00
|
|
|
if personJson.get('publicKey'):
|
2019-07-01 10:25:03 +00:00
|
|
|
if personJson['publicKey'].get('id'):
|
2020-04-04 10:05:27 +00:00
|
|
|
pubKeyId = personJson['publicKey']['id']
|
2019-06-30 10:14:02 +00:00
|
|
|
if personJson['publicKey'].get('publicKeyPem'):
|
2020-04-04 10:05:27 +00:00
|
|
|
pubKey = personJson['publicKey']['publicKeyPem']
|
|
|
|
sharedInbox = None
|
2019-07-05 13:50:27 +00:00
|
|
|
if personJson.get('sharedInbox'):
|
2020-04-04 10:05:27 +00:00
|
|
|
sharedInbox = personJson['sharedInbox']
|
2019-07-05 13:50:27 +00:00
|
|
|
else:
|
|
|
|
if personJson.get('endpoints'):
|
|
|
|
if personJson['endpoints'].get('sharedInbox'):
|
2020-04-04 10:05:27 +00:00
|
|
|
sharedInbox = personJson['endpoints']['sharedInbox']
|
|
|
|
avatarUrl = None
|
2019-07-22 14:09:21 +00:00
|
|
|
if personJson.get('icon'):
|
|
|
|
if personJson['icon'].get('url'):
|
2020-04-04 10:05:27 +00:00
|
|
|
avatarUrl = personJson['icon']['url']
|
|
|
|
displayName = None
|
2019-08-22 18:36:07 +00:00
|
|
|
if personJson.get('name'):
|
2021-01-31 11:05:17 +00:00
|
|
|
displayName = personJson['name']
|
2021-12-27 21:42:08 +00:00
|
|
|
if dangerous_markup(personJson['name'], False):
|
2021-01-31 11:05:17 +00:00
|
|
|
displayName = '*ADVERSARY*'
|
2021-12-29 21:55:09 +00:00
|
|
|
elif is_filtered(base_dir,
|
|
|
|
nickname, domain,
|
|
|
|
displayName):
|
2021-01-31 11:22:19 +00:00
|
|
|
displayName = '*FILTERED*'
|
2021-01-12 10:38:57 +00:00
|
|
|
# have they moved?
|
|
|
|
if personJson.get('movedTo'):
|
2021-01-12 11:08:54 +00:00
|
|
|
displayName += ' ⌂'
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
store_person_in_cache(base_dir, personUrl, personJson, person_cache, True)
|
2019-06-30 10:21:07 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
return boxJson, pubKeyId, pubKey, personId, sharedInbox, \
|
2021-09-22 09:29:48 +00:00
|
|
|
avatarUrl, displayName, isGroup
|
2019-06-30 10:21:07 +00:00
|
|
|
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _is_public_feed_post(item: {}, personPosts: {}, debug: bool) -> bool:
|
2021-09-11 13:42:17 +00:00
|
|
|
"""Is the given post a public feed post?
|
|
|
|
"""
|
2021-09-11 13:59:40 +00:00
|
|
|
if not isinstance(item, dict):
|
|
|
|
if debug:
|
|
|
|
print('item object is not a dict')
|
|
|
|
pprint(item)
|
|
|
|
return False
|
2021-09-11 13:42:17 +00:00
|
|
|
if not item.get('id'):
|
|
|
|
if debug:
|
|
|
|
print('No id')
|
|
|
|
return False
|
|
|
|
if not item.get('type'):
|
|
|
|
if debug:
|
|
|
|
print('No type')
|
|
|
|
return False
|
2021-10-27 12:46:38 +00:00
|
|
|
if item['type'] != 'Create' and \
|
|
|
|
item['type'] != 'Announce' and \
|
2021-11-18 18:43:58 +00:00
|
|
|
item['type'] != 'Page' and \
|
2021-10-27 12:46:38 +00:00
|
|
|
item['type'] != 'Note':
|
2021-09-11 13:42:17 +00:00
|
|
|
if debug:
|
2021-10-27 12:46:38 +00:00
|
|
|
print('Not a Create/Note/Announce type')
|
2021-09-11 13:42:17 +00:00
|
|
|
return False
|
|
|
|
if item.get('object'):
|
|
|
|
if isinstance(item['object'], dict):
|
|
|
|
if not item['object'].get('published'):
|
|
|
|
if debug:
|
|
|
|
print('No published attribute')
|
|
|
|
return False
|
|
|
|
elif isinstance(item['object'], str):
|
|
|
|
if not item.get('published'):
|
|
|
|
if debug:
|
|
|
|
print('No published attribute')
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('object is not a dict or string')
|
|
|
|
return False
|
2021-11-18 18:43:58 +00:00
|
|
|
elif item['type'] == 'Note' or item['type'] == 'Page':
|
2021-10-27 12:46:38 +00:00
|
|
|
if not item.get('published'):
|
|
|
|
if debug:
|
|
|
|
print('No published attribute')
|
|
|
|
return False
|
2021-09-11 13:42:17 +00:00
|
|
|
if not personPosts.get(item['id']):
|
2021-10-27 12:46:38 +00:00
|
|
|
thisItem = item
|
|
|
|
if item.get('object'):
|
|
|
|
thisItem = item['object']
|
2021-09-11 13:42:17 +00:00
|
|
|
# check that this is a public post
|
|
|
|
# #Public should appear in the "to" list
|
2021-11-18 18:43:58 +00:00
|
|
|
itemIsNote = False
|
|
|
|
if item['type'] == 'Note' or item['type'] == 'Page':
|
|
|
|
itemIsNote = True
|
|
|
|
|
2021-10-27 12:46:38 +00:00
|
|
|
if isinstance(thisItem, dict):
|
|
|
|
if thisItem.get('to'):
|
2021-09-11 13:42:17 +00:00
|
|
|
isPublic = False
|
2021-10-27 12:46:38 +00:00
|
|
|
for recipient in thisItem['to']:
|
2021-09-11 13:42:17 +00:00
|
|
|
if recipient.endswith('#Public'):
|
|
|
|
isPublic = True
|
|
|
|
break
|
|
|
|
if not isPublic:
|
|
|
|
return False
|
2021-11-18 18:43:58 +00:00
|
|
|
elif isinstance(thisItem, str) or itemIsNote:
|
2021-09-11 13:42:17 +00:00
|
|
|
if item.get('to'):
|
|
|
|
isPublic = False
|
|
|
|
for recipient in item['to']:
|
|
|
|
if recipient.endswith('#Public'):
|
|
|
|
isPublic = True
|
|
|
|
break
|
|
|
|
if not isPublic:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def is_create_inside_announce(item: {}) -> bool:
|
2021-09-11 13:59:40 +00:00
|
|
|
""" is this a Create inside of an Announce?
|
|
|
|
eg. lemmy feed item
|
|
|
|
"""
|
|
|
|
if not isinstance(item, dict):
|
|
|
|
return False
|
|
|
|
if item['type'] != 'Announce':
|
|
|
|
return False
|
|
|
|
if not item.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(item['object'], dict):
|
|
|
|
return False
|
|
|
|
if not item['object'].get('type'):
|
|
|
|
return False
|
|
|
|
if item['object']['type'] != 'Create':
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_posts(session, outboxUrl: str, maxPosts: int,
|
|
|
|
max_mentions: int,
|
|
|
|
max_emoji: int, maxAttachments: int,
|
|
|
|
federation_list: [],
|
|
|
|
person_cache: {}, raw: bool,
|
|
|
|
simple: bool, debug: bool,
|
|
|
|
project_version: str, http_prefix: str,
|
|
|
|
originDomain: str, system_language: str,
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2019-07-28 11:08:14 +00:00
|
|
|
"""Gets public posts from an outbox
|
|
|
|
"""
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting outbox posts for ' + outboxUrl)
|
2020-04-04 10:05:27 +00:00
|
|
|
personPosts = {}
|
2019-07-02 09:25:29 +00:00
|
|
|
if not outboxUrl:
|
|
|
|
return personPosts
|
2020-04-04 10:05:27 +00:00
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/activity+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2019-10-17 22:26:47 +00:00
|
|
|
if '/outbox/' in outboxUrl:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/ld+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2019-07-03 11:24:38 +00:00
|
|
|
if raw:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Returning the raw feed')
|
2020-04-04 10:05:27 +00:00
|
|
|
result = []
|
|
|
|
i = 0
|
2021-12-29 21:55:09 +00:00
|
|
|
userFeed = parse_user_feed(signing_priv_key_pem,
|
|
|
|
session, outboxUrl, asHeader,
|
|
|
|
project_version, http_prefix,
|
|
|
|
originDomain, debug)
|
2019-09-01 13:13:52 +00:00
|
|
|
for item in userFeed:
|
2019-07-03 11:24:38 +00:00
|
|
|
result.append(item)
|
|
|
|
i += 1
|
|
|
|
if i == maxPosts:
|
|
|
|
break
|
|
|
|
pprint(result)
|
|
|
|
return None
|
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Returning a human readable version of the feed')
|
2021-12-29 21:55:09 +00:00
|
|
|
userFeed = parse_user_feed(signing_priv_key_pem,
|
|
|
|
session, outboxUrl, asHeader,
|
|
|
|
project_version, http_prefix,
|
|
|
|
originDomain, debug)
|
2021-08-02 20:43:53 +00:00
|
|
|
if not userFeed:
|
|
|
|
return personPosts
|
|
|
|
|
|
|
|
i = 0
|
2019-09-01 13:13:52 +00:00
|
|
|
for item in userFeed:
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_create_inside_announce(item):
|
2021-09-11 13:59:40 +00:00
|
|
|
item = item['object']
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _is_public_feed_post(item, personPosts, debug):
|
2019-06-28 18:55:29 +00:00
|
|
|
continue
|
2021-09-11 13:42:17 +00:00
|
|
|
|
2021-10-27 12:46:38 +00:00
|
|
|
thisItem = item
|
2021-11-18 18:43:58 +00:00
|
|
|
if item['type'] != 'Note' and item['type'] != 'Page':
|
2021-10-27 12:46:38 +00:00
|
|
|
thisItem = item['object']
|
|
|
|
|
2021-12-26 11:29:40 +00:00
|
|
|
content = get_base_content_from_post(item, system_language)
|
2021-09-11 13:42:17 +00:00
|
|
|
content = content.replace(''', "'")
|
|
|
|
|
|
|
|
mentions = []
|
|
|
|
emoji = {}
|
|
|
|
summary = ''
|
|
|
|
inReplyTo = ''
|
|
|
|
attachment = []
|
|
|
|
sensitive = False
|
2021-10-27 12:46:38 +00:00
|
|
|
if isinstance(thisItem, dict):
|
|
|
|
if thisItem.get('tag'):
|
|
|
|
for tagItem in thisItem['tag']:
|
2021-10-27 15:10:18 +00:00
|
|
|
if not tagItem.get('type'):
|
|
|
|
continue
|
2021-09-11 13:42:17 +00:00
|
|
|
tagType = tagItem['type'].lower()
|
|
|
|
if tagType == 'emoji':
|
|
|
|
if tagItem.get('name') and tagItem.get('icon'):
|
|
|
|
if tagItem['icon'].get('url'):
|
|
|
|
# No emoji from non-permitted domains
|
2021-12-27 20:47:05 +00:00
|
|
|
if url_permitted(tagItem['icon']['url'],
|
|
|
|
federation_list):
|
2021-09-11 13:42:17 +00:00
|
|
|
emojiName = tagItem['name']
|
|
|
|
emojiIcon = tagItem['icon']['url']
|
|
|
|
emoji[emojiName] = emojiIcon
|
2021-08-01 14:47:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('url not permitted ' +
|
2021-09-11 13:42:17 +00:00
|
|
|
tagItem['icon']['url'])
|
|
|
|
if tagType == 'mention':
|
|
|
|
if tagItem.get('name'):
|
|
|
|
if tagItem['name'] not in mentions:
|
|
|
|
mentions.append(tagItem['name'])
|
2021-12-25 21:02:44 +00:00
|
|
|
if len(mentions) > max_mentions:
|
2021-09-11 13:42:17 +00:00
|
|
|
if debug:
|
|
|
|
print('max mentions reached')
|
|
|
|
continue
|
2021-12-25 21:04:51 +00:00
|
|
|
if len(emoji) > max_emoji:
|
2021-09-11 13:42:17 +00:00
|
|
|
if debug:
|
|
|
|
print('max emojis reached')
|
|
|
|
continue
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-10-27 12:46:38 +00:00
|
|
|
if thisItem.get('summary'):
|
|
|
|
if thisItem['summary']:
|
|
|
|
summary = thisItem['summary']
|
2021-09-11 13:42:17 +00:00
|
|
|
|
2021-10-27 12:46:38 +00:00
|
|
|
if thisItem.get('inReplyTo'):
|
|
|
|
if thisItem['inReplyTo']:
|
|
|
|
if isinstance(thisItem['inReplyTo'], str):
|
2021-09-11 13:42:17 +00:00
|
|
|
# No replies to non-permitted domains
|
2021-12-27 20:47:05 +00:00
|
|
|
if not url_permitted(thisItem['inReplyTo'],
|
|
|
|
federation_list):
|
2021-09-11 13:42:17 +00:00
|
|
|
if debug:
|
|
|
|
print('url not permitted ' +
|
2021-10-27 12:46:38 +00:00
|
|
|
thisItem['inReplyTo'])
|
2021-09-11 13:42:17 +00:00
|
|
|
continue
|
2021-10-27 12:46:38 +00:00
|
|
|
inReplyTo = thisItem['inReplyTo']
|
2021-09-11 13:42:17 +00:00
|
|
|
|
2021-10-27 12:46:38 +00:00
|
|
|
if thisItem.get('attachment'):
|
|
|
|
if thisItem['attachment']:
|
|
|
|
for attach in thisItem['attachment']:
|
2021-09-11 13:42:17 +00:00
|
|
|
if attach.get('name') and attach.get('url'):
|
|
|
|
# no attachments from non-permitted domains
|
2021-12-27 20:47:05 +00:00
|
|
|
if url_permitted(attach['url'],
|
|
|
|
federation_list):
|
2021-09-11 13:42:17 +00:00
|
|
|
attachment.append([attach['name'],
|
|
|
|
attach['url']])
|
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('url not permitted ' +
|
|
|
|
attach['url'])
|
2019-07-03 11:24:38 +00:00
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
sensitive = False
|
2021-10-27 12:46:38 +00:00
|
|
|
if thisItem.get('sensitive'):
|
|
|
|
sensitive = thisItem['sensitive']
|
2021-09-11 13:42:17 +00:00
|
|
|
|
|
|
|
if content:
|
2019-07-03 11:24:38 +00:00
|
|
|
if simple:
|
2021-12-29 21:55:09 +00:00
|
|
|
print(_clean_html(content) + '\n')
|
2019-07-03 11:24:38 +00:00
|
|
|
else:
|
2019-07-19 16:56:55 +00:00
|
|
|
pprint(item)
|
2020-04-04 10:05:27 +00:00
|
|
|
personPosts[item['id']] = {
|
2019-07-03 11:24:38 +00:00
|
|
|
"sensitive": sensitive,
|
|
|
|
"inreplyto": inReplyTo,
|
|
|
|
"summary": summary,
|
|
|
|
"html": content,
|
2021-12-29 21:55:09 +00:00
|
|
|
"plaintext": _clean_html(content),
|
2019-07-03 11:24:38 +00:00
|
|
|
"attachment": attachment,
|
|
|
|
"mentions": mentions,
|
2021-08-01 16:23:32 +00:00
|
|
|
"emoji": emoji
|
2019-07-03 11:24:38 +00:00
|
|
|
}
|
2021-09-11 13:42:17 +00:00
|
|
|
i += 1
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
if i == maxPosts:
|
|
|
|
break
|
2019-07-02 09:25:29 +00:00
|
|
|
return personPosts
|
2019-06-29 10:08:59 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_common_words() -> str:
|
2021-07-23 11:57:39 +00:00
|
|
|
"""Returns a list of common words
|
2021-01-11 13:14:22 +00:00
|
|
|
"""
|
2021-07-23 11:57:39 +00:00
|
|
|
return (
|
2021-01-11 13:57:08 +00:00
|
|
|
'that', 'some', 'about', 'then', 'they', 'were',
|
2021-01-11 14:01:26 +00:00
|
|
|
'also', 'from', 'with', 'this', 'have', 'more',
|
|
|
|
'need', 'here', 'would', 'these', 'into', 'very',
|
2021-01-11 14:05:16 +00:00
|
|
|
'well', 'when', 'what', 'your', 'there', 'which',
|
2021-01-11 14:13:17 +00:00
|
|
|
'even', 'there', 'such', 'just', 'those', 'only',
|
2021-01-11 20:57:27 +00:00
|
|
|
'will', 'much', 'than', 'them', 'each', 'goes',
|
2021-07-23 11:31:23 +00:00
|
|
|
'been', 'over', 'their', 'where', 'could', 'though',
|
|
|
|
'like', 'think', 'same', 'maybe', 'really', 'thing',
|
|
|
|
'something', 'possible', 'actual', 'actually',
|
2021-07-23 11:57:39 +00:00
|
|
|
'because', 'around', 'having', 'especially', 'other',
|
|
|
|
'making', 'made', 'make', 'makes', 'including',
|
|
|
|
'includes', 'know', 'knowing', 'knows', 'things',
|
|
|
|
'say', 'says', 'saying', 'many', 'somewhat',
|
2021-07-23 12:05:24 +00:00
|
|
|
'problem', 'problems', 'idea', 'ideas',
|
2021-07-23 12:12:48 +00:00
|
|
|
'using', 'uses', 'https', 'still', 'want', 'wants'
|
2021-01-11 13:57:08 +00:00
|
|
|
)
|
2021-07-23 11:57:39 +00:00
|
|
|
|
2021-07-23 13:50:32 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _update_word_frequency(content: str, wordFrequency: {}) -> None:
|
2021-07-23 11:57:39 +00:00
|
|
|
"""Creates a dictionary containing words and the number of times
|
|
|
|
that they appear
|
|
|
|
"""
|
2021-12-27 15:43:22 +00:00
|
|
|
plainText = remove_html(content)
|
2021-07-23 11:57:39 +00:00
|
|
|
removeChars = ('.', ';', '?', '\n', ':')
|
|
|
|
for ch in removeChars:
|
|
|
|
plainText = plainText.replace(ch, ' ')
|
|
|
|
wordsList = plainText.split(' ')
|
2021-12-29 21:55:09 +00:00
|
|
|
commonWords = _get_common_words()
|
2021-01-11 13:14:22 +00:00
|
|
|
for word in wordsList:
|
|
|
|
wordLen = len(word)
|
|
|
|
if wordLen < 3:
|
|
|
|
continue
|
|
|
|
if wordLen < 4:
|
|
|
|
if word.upper() != word:
|
|
|
|
continue
|
2021-01-11 13:57:08 +00:00
|
|
|
if '&' in word or \
|
|
|
|
'"' in word or \
|
2021-01-11 14:13:17 +00:00
|
|
|
'@' in word or \
|
2021-07-23 12:05:24 +00:00
|
|
|
"'" in word or \
|
2021-07-23 12:08:17 +00:00
|
|
|
"--" in word or \
|
|
|
|
'//' in word:
|
2021-01-11 13:57:08 +00:00
|
|
|
continue
|
2021-01-11 14:16:19 +00:00
|
|
|
if word.lower() in commonWords:
|
2021-01-11 13:57:08 +00:00
|
|
|
continue
|
2021-01-11 13:14:22 +00:00
|
|
|
if wordFrequency.get(word):
|
|
|
|
wordFrequency[word] += 1
|
|
|
|
else:
|
|
|
|
wordFrequency[word] = 1
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_post_domains(session, outboxUrl: str, maxPosts: int,
|
|
|
|
max_mentions: int,
|
|
|
|
max_emoji: int, maxAttachments: int,
|
|
|
|
federation_list: [],
|
|
|
|
person_cache: {},
|
|
|
|
debug: bool,
|
|
|
|
project_version: str, http_prefix: str,
|
|
|
|
domain: str,
|
|
|
|
wordFrequency: {},
|
|
|
|
domainList: [], system_language: str,
|
|
|
|
signing_priv_key_pem: str) -> []:
|
2020-07-08 10:09:51 +00:00
|
|
|
"""Returns a list of domains referenced within public posts
|
|
|
|
"""
|
|
|
|
if not outboxUrl:
|
|
|
|
return []
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/activity+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-07-08 10:09:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-07-08 10:09:51 +00:00
|
|
|
}
|
|
|
|
if '/outbox/' in outboxUrl:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/ld+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-07-08 10:09:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-07-08 10:09:51 +00:00
|
|
|
}
|
|
|
|
|
2020-07-08 10:30:29 +00:00
|
|
|
postDomains = domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
|
|
|
|
i = 0
|
2021-12-29 21:55:09 +00:00
|
|
|
userFeed = parse_user_feed(signing_priv_key_pem,
|
|
|
|
session, outboxUrl, asHeader,
|
|
|
|
project_version, http_prefix, domain, debug)
|
2020-07-08 10:09:51 +00:00
|
|
|
for item in userFeed:
|
2020-07-08 12:28:41 +00:00
|
|
|
i += 1
|
|
|
|
if i > maxPosts:
|
|
|
|
break
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(item):
|
2020-07-08 10:09:51 +00:00
|
|
|
continue
|
2021-12-26 11:29:40 +00:00
|
|
|
contentStr = get_base_content_from_post(item, system_language)
|
2021-07-18 14:15:16 +00:00
|
|
|
if contentStr:
|
2021-12-29 21:55:09 +00:00
|
|
|
_update_word_frequency(contentStr, wordFrequency)
|
2020-07-08 10:09:51 +00:00
|
|
|
if item['object'].get('inReplyTo'):
|
2020-08-28 14:45:07 +00:00
|
|
|
if isinstance(item['object']['inReplyTo'], str):
|
|
|
|
postDomain, postPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(item['object']['inReplyTo'])
|
2020-08-28 14:45:07 +00:00
|
|
|
if postDomain not in postDomains:
|
|
|
|
postDomains.append(postDomain)
|
2020-07-08 10:09:51 +00:00
|
|
|
|
|
|
|
if item['object'].get('tag'):
|
|
|
|
for tagItem in item['object']['tag']:
|
2021-10-27 15:10:18 +00:00
|
|
|
if not tagItem.get('type'):
|
|
|
|
continue
|
2020-07-08 10:09:51 +00:00
|
|
|
tagType = tagItem['type'].lower()
|
|
|
|
if tagType == 'mention':
|
|
|
|
if tagItem.get('href'):
|
|
|
|
postDomain, postPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(tagItem['href'])
|
2020-07-08 10:09:51 +00:00
|
|
|
if postDomain not in postDomains:
|
|
|
|
postDomains.append(postDomain)
|
|
|
|
return postDomains
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_posts_for_blocked_domains(base_dir: str,
|
|
|
|
session, outboxUrl: str, maxPosts: int,
|
|
|
|
max_mentions: int,
|
|
|
|
max_emoji: int, maxAttachments: int,
|
|
|
|
federation_list: [],
|
|
|
|
person_cache: {},
|
|
|
|
debug: bool,
|
|
|
|
project_version: str, http_prefix: str,
|
|
|
|
domain: str,
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2020-12-16 16:43:51 +00:00
|
|
|
"""Returns a dictionary of posts for blocked domains
|
|
|
|
"""
|
|
|
|
if not outboxUrl:
|
|
|
|
return {}
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/activity+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-12-16 16:43:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-12-16 16:43:51 +00:00
|
|
|
}
|
|
|
|
if '/outbox/' in outboxUrl:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/ld+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-12-16 16:43:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-12-16 16:43:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
blockedPosts = {}
|
|
|
|
|
|
|
|
i = 0
|
2021-12-29 21:55:09 +00:00
|
|
|
userFeed = parse_user_feed(signing_priv_key_pem,
|
|
|
|
session, outboxUrl, asHeader,
|
|
|
|
project_version, http_prefix, domain, debug)
|
2020-12-16 16:43:51 +00:00
|
|
|
for item in userFeed:
|
|
|
|
i += 1
|
|
|
|
if i > maxPosts:
|
|
|
|
break
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(item):
|
2020-12-16 16:43:51 +00:00
|
|
|
continue
|
|
|
|
if item['object'].get('inReplyTo'):
|
|
|
|
if isinstance(item['object']['inReplyTo'], str):
|
|
|
|
postDomain, postPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(item['object']['inReplyTo'])
|
2021-12-28 21:55:38 +00:00
|
|
|
if is_blocked_domain(base_dir, postDomain):
|
2020-12-17 19:54:07 +00:00
|
|
|
if item['object'].get('url'):
|
|
|
|
url = item['object']['url']
|
|
|
|
else:
|
|
|
|
url = item['object']['id']
|
2020-12-16 16:43:51 +00:00
|
|
|
if not blockedPosts.get(postDomain):
|
2020-12-16 17:09:08 +00:00
|
|
|
blockedPosts[postDomain] = [url]
|
2020-12-16 16:43:51 +00:00
|
|
|
else:
|
2020-12-17 11:11:31 +00:00
|
|
|
if url not in blockedPosts[postDomain]:
|
|
|
|
blockedPosts[postDomain].append(url)
|
2020-12-16 16:43:51 +00:00
|
|
|
|
|
|
|
if item['object'].get('tag'):
|
|
|
|
for tagItem in item['object']['tag']:
|
2021-10-27 15:10:18 +00:00
|
|
|
if not tagItem.get('type'):
|
|
|
|
continue
|
2020-12-16 16:43:51 +00:00
|
|
|
tagType = tagItem['type'].lower()
|
2021-10-27 19:38:35 +00:00
|
|
|
if tagType == 'mention' and tagItem.get('href'):
|
|
|
|
postDomain, postPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(tagItem['href'])
|
2021-12-28 21:55:38 +00:00
|
|
|
if is_blocked_domain(base_dir, postDomain):
|
2021-10-27 19:38:35 +00:00
|
|
|
if item['object'].get('url'):
|
|
|
|
url = item['object']['url']
|
|
|
|
else:
|
|
|
|
url = item['object']['id']
|
|
|
|
if not blockedPosts.get(postDomain):
|
|
|
|
blockedPosts[postDomain] = [url]
|
|
|
|
else:
|
|
|
|
if url not in blockedPosts[postDomain]:
|
|
|
|
blockedPosts[postDomain].append(url)
|
2020-12-16 16:43:51 +00:00
|
|
|
return blockedPosts
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def delete_all_posts(base_dir: str,
|
|
|
|
nickname: str, domain: str, boxname: str) -> None:
|
2019-07-04 16:24:23 +00:00
|
|
|
"""Deletes all posts for a person from inbox or outbox
|
2019-06-29 11:47:33 +00:00
|
|
|
"""
|
2020-08-23 11:13:35 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'outbox' and \
|
2021-07-01 21:30:36 +00:00
|
|
|
boxname != 'tlblogs' and boxname != 'tlnews':
|
2019-07-04 16:24:23 +00:00
|
|
|
return
|
2021-12-27 19:26:54 +00:00
|
|
|
boxDir = create_person_dir(nickname, domain, base_dir, boxname)
|
2019-09-27 12:09:04 +00:00
|
|
|
for deleteFilename in os.scandir(boxDir):
|
2020-04-04 10:05:27 +00:00
|
|
|
deleteFilename = deleteFilename.name
|
|
|
|
filePath = os.path.join(boxDir, deleteFilename)
|
2019-06-29 11:47:33 +00:00
|
|
|
try:
|
|
|
|
if os.path.isfile(filePath):
|
|
|
|
os.unlink(filePath)
|
2020-04-04 10:05:27 +00:00
|
|
|
elif os.path.isdir(filePath):
|
2021-10-29 18:48:15 +00:00
|
|
|
shutil.rmtree(filePath, ignore_errors=False, onerror=None)
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('ERROR: delete_all_posts ' + str(ex))
|
2019-07-03 22:16:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 18:13:52 +00:00
|
|
|
def save_post_to_box(base_dir: str, http_prefix: str, post_id: str,
|
|
|
|
nickname: str, domain: str, post_json_object: {},
|
|
|
|
boxname: str) -> str:
|
2019-07-04 16:24:23 +00:00
|
|
|
"""Saves the give json to the give box
|
2019-08-01 11:43:22 +00:00
|
|
|
Returns the filename
|
2019-07-03 22:16:03 +00:00
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'outbox' and \
|
2020-10-07 09:10:42 +00:00
|
|
|
boxname != 'tlblogs' and boxname != 'tlnews' and \
|
2020-08-23 11:13:35 +00:00
|
|
|
boxname != 'scheduled':
|
2019-08-01 11:43:22 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
originalDomain = domain
|
2021-12-26 18:17:37 +00:00
|
|
|
domain = remove_domain_port(domain)
|
2019-07-04 16:24:23 +00:00
|
|
|
|
2021-12-26 19:47:06 +00:00
|
|
|
if not post_id:
|
2021-12-27 17:42:35 +00:00
|
|
|
statusNumber, published = get_status_number()
|
2021-12-26 19:47:06 +00:00
|
|
|
post_id = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, originalDomain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/statuses/' + statusNumber
|
2021-12-26 19:47:06 +00:00
|
|
|
post_json_object['id'] = post_id + '/activity'
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-12-26 19:47:06 +00:00
|
|
|
post_json_object['object']['id'] = post_id
|
|
|
|
post_json_object['object']['atomUri'] = post_id
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-27 19:26:54 +00:00
|
|
|
boxDir = create_person_dir(nickname, domain, base_dir, boxname)
|
2021-12-26 19:47:06 +00:00
|
|
|
filename = boxDir + '/' + post_id.replace('/', '#') + '.json'
|
2020-11-27 19:52:01 +00:00
|
|
|
|
2021-12-26 14:47:21 +00:00
|
|
|
save_json(post_json_object, filename)
|
2019-08-01 11:43:22 +00:00
|
|
|
return filename
|
2019-07-03 22:16:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _update_hashtags_index(base_dir: str, tag: {}, newPostId: str) -> None:
|
2019-08-09 11:12:08 +00:00
|
|
|
"""Writes the post url for hashtags to a file
|
|
|
|
This allows posts for a hashtag to be quickly looked up
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if tag['type'] != 'Hashtag':
|
2019-08-09 17:42:11 +00:00
|
|
|
return
|
2019-12-17 10:24:52 +00:00
|
|
|
|
2020-03-22 21:16:02 +00:00
|
|
|
# create hashtags directory
|
2021-12-25 16:17:53 +00:00
|
|
|
tagsDir = base_dir + '/tags'
|
2019-08-09 11:12:08 +00:00
|
|
|
if not os.path.isdir(tagsDir):
|
|
|
|
os.mkdir(tagsDir)
|
2020-04-04 10:05:27 +00:00
|
|
|
tagName = tag['name']
|
|
|
|
tagsFilename = tagsDir + '/' + tagName[1:] + '.txt'
|
|
|
|
tagline = newPostId + '\n'
|
2019-12-17 10:24:52 +00:00
|
|
|
|
|
|
|
if not os.path.isfile(tagsFilename):
|
|
|
|
# create a new tags index file
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(tagsFilename, 'w+') as tagsFile:
|
2019-12-17 10:24:52 +00:00
|
|
|
tagsFile.write(tagline)
|
|
|
|
else:
|
2021-06-21 22:53:04 +00:00
|
|
|
# prepend to tags index file
|
|
|
|
if tagline not in open(tagsFilename).read():
|
|
|
|
try:
|
|
|
|
with open(tagsFilename, 'r+') as tagsFile:
|
|
|
|
content = tagsFile.read()
|
|
|
|
if tagline not in content:
|
|
|
|
tagsFile.seek(0, 0)
|
|
|
|
tagsFile.write(tagline + content)
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
2021-06-21 22:53:04 +00:00
|
|
|
print('WARN: Failed to write entry to tags file ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
tagsFilename + ' ' + str(ex))
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-08-09 11:12:08 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _add_schedule_post(base_dir: str, nickname: str, domain: str,
|
|
|
|
eventDateStr: str, post_id: str) -> None:
|
2020-01-13 10:49:03 +00:00
|
|
|
"""Adds a scheduled post to the index
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
scheduleIndexFilename = \
|
|
|
|
base_dir + '/accounts/' + handle + '/schedule.index'
|
2020-01-13 10:49:03 +00:00
|
|
|
|
2021-12-26 19:47:06 +00:00
|
|
|
indexStr = eventDateStr + ' ' + post_id.replace('/', '#')
|
2020-01-13 10:49:03 +00:00
|
|
|
if os.path.isfile(scheduleIndexFilename):
|
|
|
|
if indexStr not in open(scheduleIndexFilename).read():
|
|
|
|
try:
|
|
|
|
with open(scheduleIndexFilename, 'r+') as scheduleFile:
|
2020-04-04 10:05:27 +00:00
|
|
|
content = scheduleFile.read()
|
2020-12-29 20:22:28 +00:00
|
|
|
if indexStr + '\n' not in content:
|
|
|
|
scheduleFile.seek(0, 0)
|
|
|
|
scheduleFile.write(indexStr + '\n' + content)
|
|
|
|
print('DEBUG: scheduled post added to index')
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('WARN: Failed to write entry to scheduled posts index ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
scheduleIndexFilename + ' ' + str(ex))
|
2020-01-13 10:49:03 +00:00
|
|
|
else:
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(scheduleIndexFilename, 'w+') as scheduleFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
scheduleFile.write(indexStr + '\n')
|
2020-01-13 10:49:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def valid_content_warning(cw: str) -> str:
|
2020-08-25 19:35:55 +00:00
|
|
|
"""Returns a validated content warning
|
|
|
|
"""
|
2021-12-27 15:43:22 +00:00
|
|
|
cw = remove_html(cw)
|
2020-08-25 19:35:55 +00:00
|
|
|
# hashtags within content warnings apparently cause a lot of trouble
|
|
|
|
# so remove them
|
|
|
|
if '#' in cw:
|
|
|
|
cw = cw.replace('#', '').replace(' ', ' ')
|
2021-12-27 19:33:45 +00:00
|
|
|
return remove_invalid_chars(cw)
|
2020-08-25 19:35:55 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _load_auto_cw(base_dir: str, nickname: str, domain: str) -> []:
|
2020-09-13 18:56:41 +00:00
|
|
|
"""Loads automatic CWs file and returns a list containing
|
|
|
|
the lines of the file
|
|
|
|
"""
|
2021-12-26 12:02:29 +00:00
|
|
|
filename = acct_dir(base_dir, nickname, domain) + '/autocw.txt'
|
2020-09-13 18:56:41 +00:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return []
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(filename, 'r') as f:
|
2020-09-13 18:56:41 +00:00
|
|
|
return f.readlines()
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _add_auto_cw(base_dir: str, nickname: str, domain: str,
|
|
|
|
subject: str, content: str) -> str:
|
2020-09-13 18:56:41 +00:00
|
|
|
"""Appends any automatic CW to the subject line
|
|
|
|
and returns the new subject line
|
|
|
|
"""
|
|
|
|
newSubject = subject
|
2021-12-29 21:55:09 +00:00
|
|
|
autoCWList = _load_auto_cw(base_dir, nickname, domain)
|
2020-09-13 18:56:41 +00:00
|
|
|
for cwRule in autoCWList:
|
|
|
|
if '->' not in cwRule:
|
|
|
|
continue
|
2021-10-07 19:03:01 +00:00
|
|
|
rulematch = cwRule.split('->')[0].strip()
|
|
|
|
if rulematch not in content:
|
2020-09-13 18:56:41 +00:00
|
|
|
continue
|
|
|
|
cwStr = cwRule.split('->')[1].strip()
|
|
|
|
if newSubject:
|
|
|
|
if cwStr not in newSubject:
|
|
|
|
newSubject += ', ' + cwStr
|
|
|
|
else:
|
|
|
|
newSubject = cwStr
|
|
|
|
return newSubject
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_cw_from_reply(base_dir: str, nickname: str, domain: str,
|
|
|
|
inReplyTo: str,
|
|
|
|
sensitive: bool, summary: str) -> (bool, str):
|
2021-06-26 21:29:49 +00:00
|
|
|
"""If this is a reply and the original post has a CW
|
|
|
|
then use the same CW
|
|
|
|
"""
|
|
|
|
if inReplyTo and not sensitive:
|
|
|
|
# locate the post which this is a reply to and check if
|
|
|
|
# it has a content warning. If it does then reproduce
|
|
|
|
# the same warning
|
|
|
|
replyPostFilename = \
|
2021-12-26 20:36:08 +00:00
|
|
|
locate_post(base_dir, nickname, domain, inReplyTo)
|
2021-06-26 21:29:49 +00:00
|
|
|
if replyPostFilename:
|
2021-12-26 15:13:34 +00:00
|
|
|
replyToJson = load_json(replyPostFilename)
|
2021-06-26 21:29:49 +00:00
|
|
|
if replyToJson:
|
|
|
|
if replyToJson.get('object'):
|
|
|
|
if replyToJson['object'].get('sensitive'):
|
|
|
|
if replyToJson['object']['sensitive']:
|
|
|
|
sensitive = True
|
|
|
|
if replyToJson['object'].get('summary'):
|
|
|
|
summary = replyToJson['object']['summary']
|
|
|
|
return sensitive, summary
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_s2s(base_dir: str, nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str, content: str, statusNumber: str,
|
|
|
|
published: str, newPostId: str, postContext: {},
|
|
|
|
toRecipients: [], toCC: [], inReplyTo: str,
|
|
|
|
sensitive: bool, commentsEnabled: bool,
|
|
|
|
tags: [], attachImageFilename: str,
|
|
|
|
mediaType: str, imageDescription: str, city: str,
|
|
|
|
postObjectType: str, summary: str,
|
|
|
|
inReplyToAtomUri: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2021-06-27 14:58:54 +00:00
|
|
|
"""Creates a new server-to-server post
|
|
|
|
"""
|
2021-12-26 10:19:59 +00:00
|
|
|
actorUrl = local_actor_url(http_prefix, nickname, domain)
|
2021-06-27 14:58:54 +00:00
|
|
|
idStr = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2021-06-27 14:58:54 +00:00
|
|
|
'/statuses/' + statusNumber + '/replies'
|
|
|
|
newPostUrl = \
|
2021-12-25 17:09:22 +00:00
|
|
|
http_prefix + '://' + domain + '/@' + nickname + '/' + statusNumber
|
2021-06-27 14:58:54 +00:00
|
|
|
newPostAttributedTo = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain)
|
2021-08-08 16:52:32 +00:00
|
|
|
if not conversationId:
|
|
|
|
conversationId = newPostId
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = {
|
|
|
|
'@context': postContext,
|
|
|
|
'id': newPostId + '/activity',
|
|
|
|
'type': 'Create',
|
|
|
|
'actor': actorUrl,
|
|
|
|
'published': published,
|
|
|
|
'to': toRecipients,
|
|
|
|
'cc': toCC,
|
|
|
|
'object': {
|
|
|
|
'id': newPostId,
|
2021-08-08 16:52:32 +00:00
|
|
|
'conversation': conversationId,
|
2021-06-27 14:58:54 +00:00
|
|
|
'type': postObjectType,
|
|
|
|
'summary': summary,
|
|
|
|
'inReplyTo': inReplyTo,
|
|
|
|
'published': published,
|
|
|
|
'url': newPostUrl,
|
|
|
|
'attributedTo': newPostAttributedTo,
|
|
|
|
'to': toRecipients,
|
|
|
|
'cc': toCC,
|
|
|
|
'sensitive': sensitive,
|
|
|
|
'atomUri': newPostId,
|
|
|
|
'inReplyToAtomUri': inReplyToAtomUri,
|
|
|
|
'commentsEnabled': commentsEnabled,
|
|
|
|
'rejectReplies': not commentsEnabled,
|
|
|
|
'mediaType': 'text/html',
|
|
|
|
'content': content,
|
|
|
|
'contentMap': {
|
2021-12-25 23:03:28 +00:00
|
|
|
system_language: content
|
2021-06-27 14:58:54 +00:00
|
|
|
},
|
|
|
|
'attachment': [],
|
|
|
|
'tag': tags,
|
|
|
|
'replies': {
|
|
|
|
'id': idStr,
|
|
|
|
'type': 'Collection',
|
|
|
|
'first': {
|
|
|
|
'type': 'CollectionPage',
|
2021-09-26 13:27:06 +00:00
|
|
|
'next': idStr + '?only_other_accounts=true&page=true',
|
2021-06-27 14:58:54 +00:00
|
|
|
'partOf': idStr,
|
|
|
|
'items': []
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if attachImageFilename:
|
|
|
|
newPost['object'] = \
|
2021-12-28 21:36:27 +00:00
|
|
|
attach_media(base_dir, http_prefix, nickname, domain, port,
|
|
|
|
newPost['object'], attachImageFilename,
|
|
|
|
mediaType, imageDescription, city, low_bandwidth,
|
|
|
|
content_license_url)
|
2021-06-27 14:58:54 +00:00
|
|
|
return newPost
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_c2s(base_dir: str, nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str, content: str, statusNumber: str,
|
|
|
|
published: str, newPostId: str, postContext: {},
|
|
|
|
toRecipients: [], toCC: [], inReplyTo: str,
|
|
|
|
sensitive: bool, commentsEnabled: bool,
|
|
|
|
tags: [], attachImageFilename: str,
|
|
|
|
mediaType: str, imageDescription: str, city: str,
|
|
|
|
postObjectType: str, summary: str,
|
|
|
|
inReplyToAtomUri: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: str,
|
|
|
|
content_license_url: str) -> {}:
|
2021-06-27 14:58:54 +00:00
|
|
|
"""Creates a new client-to-server post
|
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-06-27 14:58:54 +00:00
|
|
|
idStr = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain_full) + \
|
2021-06-27 14:58:54 +00:00
|
|
|
'/statuses/' + statusNumber + '/replies'
|
|
|
|
newPostUrl = \
|
2021-12-25 17:09:22 +00:00
|
|
|
http_prefix + '://' + domain + '/@' + nickname + '/' + statusNumber
|
2021-08-08 16:52:32 +00:00
|
|
|
if not conversationId:
|
|
|
|
conversationId = newPostId
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = {
|
|
|
|
"@context": postContext,
|
|
|
|
'id': newPostId,
|
2021-08-08 16:52:32 +00:00
|
|
|
'conversation': conversationId,
|
2021-06-27 14:58:54 +00:00
|
|
|
'type': postObjectType,
|
|
|
|
'summary': summary,
|
|
|
|
'inReplyTo': inReplyTo,
|
|
|
|
'published': published,
|
|
|
|
'url': newPostUrl,
|
2021-12-26 10:19:59 +00:00
|
|
|
'attributedTo': local_actor_url(http_prefix, nickname, domain_full),
|
2021-06-27 14:58:54 +00:00
|
|
|
'to': toRecipients,
|
|
|
|
'cc': toCC,
|
|
|
|
'sensitive': sensitive,
|
|
|
|
'atomUri': newPostId,
|
|
|
|
'inReplyToAtomUri': inReplyToAtomUri,
|
|
|
|
'commentsEnabled': commentsEnabled,
|
|
|
|
'rejectReplies': not commentsEnabled,
|
|
|
|
'mediaType': 'text/html',
|
|
|
|
'content': content,
|
|
|
|
'contentMap': {
|
2021-12-25 23:03:28 +00:00
|
|
|
system_language: content
|
2021-06-27 14:58:54 +00:00
|
|
|
},
|
|
|
|
'attachment': [],
|
|
|
|
'tag': tags,
|
|
|
|
'replies': {
|
|
|
|
'id': idStr,
|
|
|
|
'type': 'Collection',
|
|
|
|
'first': {
|
|
|
|
'type': 'CollectionPage',
|
2021-09-26 13:27:06 +00:00
|
|
|
'next': idStr + '?only_other_accounts=true&page=true',
|
2021-06-27 14:58:54 +00:00
|
|
|
'partOf': idStr,
|
|
|
|
'items': []
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if attachImageFilename:
|
|
|
|
newPost = \
|
2021-12-28 21:36:27 +00:00
|
|
|
attach_media(base_dir, http_prefix, nickname, domain, port,
|
|
|
|
newPost, attachImageFilename,
|
|
|
|
mediaType, imageDescription, city, low_bandwidth,
|
|
|
|
content_license_url)
|
2021-06-27 14:58:54 +00:00
|
|
|
return newPost
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_place_and_time(eventDate: str, endDate: str,
|
|
|
|
eventTime: str, endTime: str,
|
|
|
|
summary: str, content: str,
|
|
|
|
schedulePost: bool,
|
|
|
|
eventUUID: str,
|
|
|
|
location: str,
|
|
|
|
tags: []) -> str:
|
2021-06-27 15:48:02 +00:00
|
|
|
"""Adds a place and time to the tags on a new post
|
|
|
|
"""
|
|
|
|
endDateStr = None
|
|
|
|
if endDate:
|
|
|
|
eventName = summary
|
|
|
|
if not eventName:
|
|
|
|
eventName = content
|
|
|
|
endDateStr = endDate
|
|
|
|
if endTime:
|
|
|
|
if endTime.endswith('Z'):
|
|
|
|
endDateStr = endDate + 'T' + endTime
|
|
|
|
else:
|
|
|
|
endDateStr = endDate + 'T' + endTime + \
|
|
|
|
':00' + strftime("%z", gmtime())
|
|
|
|
else:
|
|
|
|
endDateStr = endDate + 'T12:00:00Z'
|
|
|
|
|
|
|
|
# get the starting date and time
|
|
|
|
eventDateStr = None
|
|
|
|
if eventDate:
|
|
|
|
eventName = summary
|
|
|
|
if not eventName:
|
|
|
|
eventName = content
|
|
|
|
eventDateStr = eventDate
|
|
|
|
if eventTime:
|
|
|
|
if eventTime.endswith('Z'):
|
|
|
|
eventDateStr = eventDate + 'T' + eventTime
|
|
|
|
else:
|
|
|
|
eventDateStr = eventDate + 'T' + eventTime + \
|
|
|
|
':00' + strftime("%z", gmtime())
|
|
|
|
else:
|
|
|
|
eventDateStr = eventDate + 'T12:00:00Z'
|
|
|
|
if not endDateStr:
|
|
|
|
endDateStr = eventDateStr
|
|
|
|
if not schedulePost and not eventUUID:
|
|
|
|
tags.append({
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
"type": "Event",
|
|
|
|
"name": eventName,
|
|
|
|
"startTime": eventDateStr,
|
|
|
|
"endTime": endDateStr
|
|
|
|
})
|
|
|
|
if location and not eventUUID:
|
|
|
|
tags.append({
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
"type": "Place",
|
|
|
|
"name": location
|
|
|
|
})
|
|
|
|
return eventDateStr
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _consolidate_actors_list(actorsList: []) -> None:
|
2021-12-06 19:54:47 +00:00
|
|
|
""" consolidate duplicated actors
|
|
|
|
https://domain/@nick gets merged with https://domain/users/nick
|
|
|
|
"""
|
|
|
|
possibleDuplicateActors = []
|
|
|
|
for ccActor in actorsList:
|
|
|
|
if '/@' in ccActor:
|
|
|
|
if ccActor not in possibleDuplicateActors:
|
|
|
|
possibleDuplicateActors.append(ccActor)
|
|
|
|
if possibleDuplicateActors:
|
2021-12-26 12:24:40 +00:00
|
|
|
uPaths = get_user_paths()
|
2021-12-06 19:54:47 +00:00
|
|
|
removeActors = []
|
|
|
|
for ccActor in possibleDuplicateActors:
|
|
|
|
for usrPath in uPaths:
|
|
|
|
ccActorFull = ccActor.replace('/@', usrPath)
|
|
|
|
if ccActorFull in actorsList:
|
|
|
|
if ccActor not in removeActors:
|
|
|
|
removeActors.append(ccActor)
|
|
|
|
break
|
|
|
|
for ccActor in removeActors:
|
|
|
|
actorsList.remove(ccActor)
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_mentions(ccUrl: str, newPost: {},
|
|
|
|
toRecipients: [], tags: []) -> None:
|
2021-06-27 16:12:10 +00:00
|
|
|
"""Updates mentions for a new post
|
|
|
|
"""
|
|
|
|
if not ccUrl:
|
|
|
|
return
|
|
|
|
if len(ccUrl) == 0:
|
|
|
|
return
|
2021-12-06 19:54:47 +00:00
|
|
|
|
2021-06-27 16:12:10 +00:00
|
|
|
if newPost.get('object'):
|
2021-12-06 19:54:47 +00:00
|
|
|
if ccUrl not in newPost['object']['cc']:
|
|
|
|
newPost['object']['cc'] = [ccUrl] + newPost['object']['cc']
|
2021-06-27 16:12:10 +00:00
|
|
|
|
|
|
|
# if this is a public post then include any mentions in cc
|
|
|
|
toCC = newPost['object']['cc']
|
|
|
|
if len(toRecipients) != 1:
|
|
|
|
return
|
|
|
|
if toRecipients[0].endswith('#Public') and \
|
|
|
|
ccUrl.endswith('/followers'):
|
|
|
|
for tag in tags:
|
|
|
|
if tag['type'] != 'Mention':
|
|
|
|
continue
|
|
|
|
if tag['href'] not in toCC:
|
|
|
|
newPost['object']['cc'].append(tag['href'])
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
_consolidate_actors_list(newPost['object']['cc'])
|
2021-12-06 19:54:47 +00:00
|
|
|
newPost['cc'] = newPost['object']['cc']
|
|
|
|
else:
|
|
|
|
if ccUrl not in newPost['cc']:
|
|
|
|
newPost['cc'] = [ccUrl] + newPost['cc']
|
2021-12-29 21:55:09 +00:00
|
|
|
_consolidate_actors_list(['cc'])
|
2021-12-06 19:54:47 +00:00
|
|
|
|
2021-06-27 16:12:10 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_mod_report(base_dir: str,
|
|
|
|
isModerationReport: bool, newPost: {},
|
|
|
|
newPostId: str) -> None:
|
2021-06-27 16:12:10 +00:00
|
|
|
""" if this is a moderation report then add a status
|
|
|
|
"""
|
|
|
|
if not isModerationReport:
|
|
|
|
return
|
|
|
|
# add status
|
|
|
|
if newPost.get('object'):
|
|
|
|
newPost['object']['moderationStatus'] = 'pending'
|
|
|
|
else:
|
|
|
|
newPost['moderationStatus'] = 'pending'
|
|
|
|
# save to index file
|
2021-12-25 16:17:53 +00:00
|
|
|
moderationIndexFile = base_dir + '/accounts/moderation.txt'
|
2021-06-27 16:12:10 +00:00
|
|
|
with open(moderationIndexFile, 'a+') as modFile:
|
|
|
|
modFile.write(newPostId + '\n')
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_actor_from_in_reply_to(inReplyTo: str) -> str:
|
2021-12-06 15:26:46 +00:00
|
|
|
"""Tries to get the replied to actor from the inReplyTo post id
|
|
|
|
Note: this will not always be successful for some instance types
|
|
|
|
"""
|
2021-12-27 22:19:18 +00:00
|
|
|
replyNickname = get_nickname_from_actor(inReplyTo)
|
2021-12-06 15:26:46 +00:00
|
|
|
if not replyNickname:
|
|
|
|
return None
|
|
|
|
replyActor = None
|
|
|
|
if '/' + replyNickname + '/' in inReplyTo:
|
|
|
|
replyActor = \
|
|
|
|
inReplyTo.split('/' + replyNickname + '/')[0] + \
|
|
|
|
'/' + replyNickname
|
|
|
|
elif '#' + replyNickname + '#' in inReplyTo:
|
|
|
|
replyActor = \
|
|
|
|
inReplyTo.split('#' + replyNickname + '#')[0] + \
|
|
|
|
'#' + replyNickname
|
|
|
|
replyActor = replyActor.replace('#', '/')
|
|
|
|
if not replyActor:
|
|
|
|
return None
|
|
|
|
if '://' not in replyActor:
|
|
|
|
return None
|
|
|
|
return replyActor
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_post_base(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
toUrl: str, ccUrl: str, http_prefix: str, content: str,
|
|
|
|
followersOnly: bool, saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str,
|
|
|
|
mediaType: str, imageDescription: str, city: str,
|
|
|
|
isModerationReport: bool,
|
|
|
|
isArticle: bool,
|
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str,
|
|
|
|
eventUUID: str, category: str,
|
|
|
|
joinMode: str,
|
|
|
|
endDate: str, endTime: str,
|
|
|
|
maximumAttendeeCapacity: int,
|
|
|
|
repliesModerationOption: str,
|
|
|
|
anonymousParticipationEnabled: bool,
|
|
|
|
eventStatus: str, ticketUrl: str,
|
|
|
|
system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-07-01 12:14:49 +00:00
|
|
|
"""Creates a message
|
2019-06-29 22:29:18 +00:00
|
|
|
"""
|
2021-12-27 19:33:45 +00:00
|
|
|
content = remove_invalid_chars(content)
|
2021-02-11 10:33:56 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
subject = _add_auto_cw(base_dir, nickname, domain, subject, content)
|
2020-09-13 18:56:41 +00:00
|
|
|
|
2020-10-10 11:38:52 +00:00
|
|
|
if nickname != 'news':
|
|
|
|
mentionedRecipients = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_mentioned_people(base_dir, http_prefix, content, domain, False)
|
2020-10-10 11:38:52 +00:00
|
|
|
else:
|
|
|
|
mentionedRecipients = ''
|
2019-08-19 09:37:14 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
tags = []
|
|
|
|
hashtagsDict = {}
|
2019-07-15 14:41:15 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
domain = get_full_domain(domain, port)
|
2019-11-01 10:19:21 +00:00
|
|
|
|
|
|
|
# add tags
|
2020-10-10 11:38:52 +00:00
|
|
|
if nickname != 'news':
|
|
|
|
content = \
|
2021-12-29 21:55:09 +00:00
|
|
|
add_html_tags(base_dir, http_prefix,
|
|
|
|
nickname, domain, content,
|
|
|
|
mentionedRecipients,
|
|
|
|
hashtagsDict, True)
|
2020-02-21 15:17:55 +00:00
|
|
|
|
|
|
|
# replace emoji with unicode
|
2020-04-04 10:05:27 +00:00
|
|
|
tags = []
|
|
|
|
for tagName, tag in hashtagsDict.items():
|
2020-02-21 15:17:55 +00:00
|
|
|
tags.append(tag)
|
2021-11-01 18:33:32 +00:00
|
|
|
|
2020-02-21 15:17:55 +00:00
|
|
|
# get list of tags
|
2020-10-10 11:38:52 +00:00
|
|
|
if nickname != 'news':
|
2021-11-01 17:12:17 +00:00
|
|
|
content = \
|
2021-12-29 21:55:09 +00:00
|
|
|
replace_emoji_from_tags(None, base_dir, content, tags, 'content',
|
|
|
|
False)
|
2020-02-21 15:17:55 +00:00
|
|
|
# remove replaced emoji
|
2020-04-04 10:05:27 +00:00
|
|
|
hashtagsDictCopy = hashtagsDict.copy()
|
|
|
|
for tagName, tag in hashtagsDictCopy.items():
|
2020-02-21 15:17:55 +00:00
|
|
|
if tag.get('name'):
|
|
|
|
if tag['name'].startswith(':'):
|
|
|
|
if tag['name'] not in content:
|
|
|
|
del hashtagsDict[tagName]
|
|
|
|
|
2021-12-27 17:42:35 +00:00
|
|
|
statusNumber, published = get_status_number()
|
2020-04-04 10:05:27 +00:00
|
|
|
newPostId = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2021-08-14 11:13:39 +00:00
|
|
|
'/statuses/' + statusNumber
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
sensitive = False
|
|
|
|
summary = None
|
2019-06-29 10:23:40 +00:00
|
|
|
if subject:
|
2021-12-29 21:55:09 +00:00
|
|
|
summary = remove_invalid_chars(valid_content_warning(subject))
|
2020-04-04 10:05:27 +00:00
|
|
|
sensitive = True
|
2019-07-15 14:41:15 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
toRecipients = []
|
|
|
|
toCC = []
|
2019-08-11 18:32:29 +00:00
|
|
|
if toUrl:
|
|
|
|
if not isinstance(toUrl, str):
|
|
|
|
print('ERROR: toUrl is not a string')
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
toRecipients = [toUrl]
|
2019-08-11 18:32:29 +00:00
|
|
|
|
2019-08-05 16:56:32 +00:00
|
|
|
# who to send to
|
2019-08-19 12:40:59 +00:00
|
|
|
if mentionedRecipients:
|
|
|
|
for mention in mentionedRecipients:
|
2019-11-04 12:09:59 +00:00
|
|
|
if mention not in toCC:
|
|
|
|
toCC.append(mention)
|
2019-08-09 11:12:08 +00:00
|
|
|
|
2021-12-06 16:02:47 +00:00
|
|
|
isPublic = False
|
|
|
|
for recipient in toRecipients:
|
|
|
|
if recipient.endswith('#Public'):
|
|
|
|
isPublic = True
|
|
|
|
break
|
|
|
|
|
2019-08-09 11:12:08 +00:00
|
|
|
# create a list of hashtags
|
2019-09-05 11:37:41 +00:00
|
|
|
# Only posts which are #Public are searchable by hashtag
|
2019-08-10 16:55:17 +00:00
|
|
|
if hashtagsDict:
|
2020-04-04 10:05:27 +00:00
|
|
|
for tagName, tag in hashtagsDict.items():
|
2021-12-29 21:55:09 +00:00
|
|
|
if not post_tag_exists(tag['type'], tag['name'], tags):
|
2020-12-13 20:07:45 +00:00
|
|
|
tags.append(tag)
|
2019-08-10 11:51:54 +00:00
|
|
|
if isPublic:
|
2021-12-29 21:55:09 +00:00
|
|
|
_update_hashtags_index(base_dir, tag, newPostId)
|
2021-03-10 20:39:20 +00:00
|
|
|
# print('Content tags: ' + str(tags))
|
2019-10-02 14:40:39 +00:00
|
|
|
|
2021-06-26 21:29:49 +00:00
|
|
|
sensitive, summary = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_cw_from_reply(base_dir, nickname, domain,
|
|
|
|
inReplyTo, sensitive, summary)
|
2020-08-21 16:10:47 +00:00
|
|
|
|
2021-06-27 15:48:02 +00:00
|
|
|
eventDateStr = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_place_and_time(eventDate, endDate,
|
|
|
|
eventTime, endTime,
|
|
|
|
summary, content, schedulePost,
|
|
|
|
eventUUID, location, tags)
|
2019-10-19 15:59:49 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
postContext = get_individual_post_context()
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-06 18:11:17 +00:00
|
|
|
if not isPublic:
|
|
|
|
# make sure that CC doesn't also contain a To address
|
|
|
|
# eg. To: [ "https://mydomain/users/foo/followers" ]
|
|
|
|
# CC: [ "X", "Y", "https://mydomain/users/foo", "Z" ]
|
|
|
|
removeFromCC = []
|
|
|
|
for ccRecipient in toCC:
|
|
|
|
for sendToActor in toRecipients:
|
|
|
|
if ccRecipient in sendToActor and \
|
|
|
|
ccRecipient not in removeFromCC:
|
|
|
|
removeFromCC.append(ccRecipient)
|
|
|
|
break
|
|
|
|
for ccRemoval in removeFromCC:
|
|
|
|
toCC.remove(ccRemoval)
|
|
|
|
else:
|
|
|
|
if inReplyTo:
|
|
|
|
# If this is a public post then get the actor being
|
|
|
|
# replied to end ensure that it is within the CC list
|
2021-12-29 21:55:09 +00:00
|
|
|
replyActor = get_actor_from_in_reply_to(inReplyTo)
|
2021-12-06 18:11:17 +00:00
|
|
|
if replyActor:
|
|
|
|
if replyActor not in toCC:
|
|
|
|
toCC.append(replyActor)
|
2021-12-06 15:26:46 +00:00
|
|
|
|
2020-08-21 11:08:31 +00:00
|
|
|
# the type of post to be made
|
|
|
|
postObjectType = 'Note'
|
2021-02-10 18:13:41 +00:00
|
|
|
if isArticle:
|
|
|
|
postObjectType = 'Article'
|
2020-08-21 11:08:31 +00:00
|
|
|
|
2021-12-25 20:39:35 +00:00
|
|
|
if not client_to_server:
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_s2s(base_dir, nickname, domain, port,
|
|
|
|
http_prefix, content, statusNumber,
|
|
|
|
published, newPostId, postContext,
|
|
|
|
toRecipients, toCC, inReplyTo,
|
|
|
|
sensitive, commentsEnabled,
|
|
|
|
tags, attachImageFilename,
|
|
|
|
mediaType, imageDescription, city,
|
|
|
|
postObjectType, summary,
|
|
|
|
inReplyToAtomUri, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
2019-07-03 15:10:18 +00:00
|
|
|
else:
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_c2s(base_dir, nickname, domain, port,
|
|
|
|
http_prefix, content, statusNumber,
|
|
|
|
published, newPostId, postContext,
|
|
|
|
toRecipients, toCC, inReplyTo,
|
|
|
|
sensitive, commentsEnabled,
|
|
|
|
tags, attachImageFilename,
|
|
|
|
mediaType, imageDescription, city,
|
|
|
|
postObjectType, summary,
|
|
|
|
inReplyToAtomUri, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
|
|
|
|
|
|
|
_create_post_mentions(ccUrl, newPost, toRecipients, tags)
|
|
|
|
|
|
|
|
_create_post_mod_report(base_dir, isModerationReport, newPost, newPostId)
|
2019-08-11 20:38:10 +00:00
|
|
|
|
2020-05-03 12:52:13 +00:00
|
|
|
# If a patch has been posted - i.e. the output from
|
2020-05-03 12:39:54 +00:00
|
|
|
# git format-patch - then convert the activitypub type
|
2021-12-29 21:55:09 +00:00
|
|
|
convert_post_to_patch(base_dir, nickname, domain, newPost)
|
2020-05-03 12:39:54 +00:00
|
|
|
|
2020-01-12 20:53:00 +00:00
|
|
|
if schedulePost:
|
2020-03-22 21:16:02 +00:00
|
|
|
if eventDate and eventTime:
|
2020-01-12 20:53:00 +00:00
|
|
|
# add an item to the scheduled post index file
|
2021-12-29 21:55:09 +00:00
|
|
|
_add_schedule_post(base_dir, nickname, domain,
|
|
|
|
eventDateStr, newPostId)
|
2021-12-28 18:13:52 +00:00
|
|
|
save_post_to_box(base_dir, http_prefix, newPostId,
|
|
|
|
nickname, domain, newPost, 'scheduled')
|
2020-01-12 20:53:00 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Unable to create scheduled post without ' +
|
|
|
|
'date and time values')
|
2020-01-12 20:53:00 +00:00
|
|
|
return newPost
|
|
|
|
elif saveToFile:
|
2020-08-23 11:13:35 +00:00
|
|
|
if isArticle:
|
2021-12-28 18:13:52 +00:00
|
|
|
save_post_to_box(base_dir, http_prefix, newPostId,
|
|
|
|
nickname, domain, newPost, 'tlblogs')
|
2020-02-24 22:34:54 +00:00
|
|
|
else:
|
2021-12-28 18:13:52 +00:00
|
|
|
save_post_to_box(base_dir, http_prefix, newPostId,
|
|
|
|
nickname, domain, newPost, 'outbox')
|
2019-06-28 18:55:29 +00:00
|
|
|
return newPost
|
2019-06-29 10:08:59 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def outbox_message_create_wrap(http_prefix: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
message_json: {}) -> {}:
|
2019-07-03 21:37:46 +00:00
|
|
|
"""Wraps a received message in a Create
|
|
|
|
https://www.w3.org/TR/activitypub/#object-without-create
|
|
|
|
"""
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
domain = get_full_domain(domain, port)
|
2021-12-27 17:42:35 +00:00
|
|
|
statusNumber, published = get_status_number()
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json.get('published'):
|
|
|
|
published = message_json['published']
|
2020-04-04 10:05:27 +00:00
|
|
|
newPostId = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/statuses/' + statusNumber
|
|
|
|
cc = []
|
2021-12-25 23:51:19 +00:00
|
|
|
if message_json.get('cc'):
|
|
|
|
cc = message_json['cc']
|
2020-04-04 10:05:27 +00:00
|
|
|
newPost = {
|
2019-08-18 11:07:06 +00:00
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
2020-08-23 11:13:35 +00:00
|
|
|
'id': newPostId + '/activity',
|
2019-07-03 21:37:46 +00:00
|
|
|
'type': 'Create',
|
2021-12-26 10:19:59 +00:00
|
|
|
'actor': local_actor_url(http_prefix, nickname, domain),
|
2019-07-03 21:37:46 +00:00
|
|
|
'published': published,
|
2021-12-25 23:51:19 +00:00
|
|
|
'to': message_json['to'],
|
2019-07-03 21:37:46 +00:00
|
|
|
'cc': cc,
|
2021-12-25 23:51:19 +00:00
|
|
|
'object': message_json
|
2019-07-03 21:37:46 +00:00
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
newPost['object']['id'] = newPost['id']
|
|
|
|
newPost['object']['url'] = \
|
2021-12-25 17:09:22 +00:00
|
|
|
http_prefix + '://' + domain + '/@' + nickname + '/' + statusNumber
|
2020-04-04 10:05:27 +00:00
|
|
|
newPost['object']['atomUri'] = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/statuses/' + statusNumber
|
2019-07-03 21:37:46 +00:00
|
|
|
return newPost
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _post_is_addressed_to_followers(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
post_json_object: {}) -> bool:
|
2019-07-08 13:30:04 +00:00
|
|
|
"""Returns true if the given post is addressed to followers of the nickname
|
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2019-07-08 13:30:04 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('object'):
|
2019-07-08 13:30:04 +00:00
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
toList = []
|
|
|
|
ccList = []
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['type'] != 'Update' and \
|
2021-12-26 10:57:03 +00:00
|
|
|
has_object_dict(post_json_object):
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('to'):
|
|
|
|
toList = post_json_object['object']['to']
|
|
|
|
if post_json_object['object'].get('cc'):
|
|
|
|
ccList = post_json_object['object']['cc']
|
2019-07-16 19:07:45 +00:00
|
|
|
else:
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('to'):
|
|
|
|
toList = post_json_object['to']
|
|
|
|
if post_json_object.get('cc'):
|
|
|
|
ccList = post_json_object['cc']
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
followersUrl = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain_full) + '/followers'
|
2019-07-08 13:30:04 +00:00
|
|
|
|
|
|
|
# does the followers url exist in 'to' or 'cc' lists?
|
2020-04-04 10:05:27 +00:00
|
|
|
addressedToFollowers = False
|
2019-07-16 19:07:45 +00:00
|
|
|
if followersUrl in toList:
|
2020-04-04 10:05:27 +00:00
|
|
|
addressedToFollowers = True
|
2019-11-04 12:46:51 +00:00
|
|
|
elif followersUrl in ccList:
|
2020-04-04 10:05:27 +00:00
|
|
|
addressedToFollowers = True
|
2019-07-08 13:30:04 +00:00
|
|
|
return addressedToFollowers
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def pin_post(base_dir: str, nickname: str, domain: str,
|
|
|
|
pinnedContent: str, followersOnly: bool) -> None:
|
2021-01-24 18:09:21 +00:00
|
|
|
"""Pins the given post Id to the profile of then given account
|
|
|
|
"""
|
2021-12-26 12:02:29 +00:00
|
|
|
accountDir = acct_dir(base_dir, nickname, domain)
|
2021-01-24 18:09:21 +00:00
|
|
|
pinnedFilename = accountDir + '/pinToProfile.txt'
|
2021-11-25 21:18:53 +00:00
|
|
|
try:
|
|
|
|
with open(pinnedFilename, 'w+') as pinFile:
|
|
|
|
pinFile.write(pinnedContent)
|
|
|
|
except OSError:
|
2021-11-25 22:22:54 +00:00
|
|
|
print('EX: unable to write ' + pinnedFilename)
|
2021-01-24 18:09:21 +00:00
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def undo_pinned_post(base_dir: str, nickname: str, domain: str) -> None:
|
2021-01-24 18:35:42 +00:00
|
|
|
"""Removes pinned content for then given account
|
|
|
|
"""
|
2021-12-26 12:02:29 +00:00
|
|
|
accountDir = acct_dir(base_dir, nickname, domain)
|
2021-01-24 18:35:42 +00:00
|
|
|
pinnedFilename = accountDir + '/pinToProfile.txt'
|
2021-12-29 21:55:09 +00:00
|
|
|
if not os.path.isfile(pinnedFilename):
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
os.remove(pinnedFilename)
|
|
|
|
except OSError:
|
|
|
|
print('EX: undo_pinned_post unable to delete ' + pinnedFilename)
|
2021-11-09 18:03:17 +00:00
|
|
|
|
2021-01-24 18:35:42 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def get_pinned_post_as_json(base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
domain_full: str, system_language: str) -> {}:
|
2021-01-25 14:31:37 +00:00
|
|
|
"""Returns the pinned profile post as json
|
2021-01-24 21:35:26 +00:00
|
|
|
"""
|
2021-12-26 12:02:29 +00:00
|
|
|
accountDir = acct_dir(base_dir, nickname, domain)
|
2021-01-24 21:35:26 +00:00
|
|
|
pinnedFilename = accountDir + '/pinToProfile.txt'
|
|
|
|
pinnedPostJson = {}
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-01-24 21:35:26 +00:00
|
|
|
if os.path.isfile(pinnedFilename):
|
|
|
|
pinnedContent = None
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(pinnedFilename, 'r') as pinFile:
|
2021-01-24 21:35:26 +00:00
|
|
|
pinnedContent = pinFile.read()
|
|
|
|
if pinnedContent:
|
|
|
|
pinnedPostJson = {
|
|
|
|
'atomUri': actor + '/pinned',
|
|
|
|
'attachment': [],
|
|
|
|
'attributedTo': actor,
|
|
|
|
'cc': [
|
|
|
|
actor + '/followers'
|
|
|
|
],
|
|
|
|
'content': pinnedContent,
|
|
|
|
'contentMap': {
|
2021-12-25 23:03:28 +00:00
|
|
|
system_language: pinnedContent
|
2021-01-24 21:35:26 +00:00
|
|
|
},
|
|
|
|
'id': actor + '/pinned',
|
|
|
|
'inReplyTo': None,
|
|
|
|
'inReplyToAtomUri': None,
|
2021-12-28 14:01:37 +00:00
|
|
|
'published': file_last_modified(pinnedFilename),
|
2021-01-24 21:35:26 +00:00
|
|
|
'replies': {},
|
|
|
|
'sensitive': False,
|
|
|
|
'summary': None,
|
|
|
|
'tag': [],
|
|
|
|
'to': ['https://www.w3.org/ns/activitystreams#Public'],
|
|
|
|
'type': 'Note',
|
2021-12-26 17:21:37 +00:00
|
|
|
'url': replace_users_with_at(actor) + '/pinned'
|
2021-01-24 21:35:26 +00:00
|
|
|
}
|
2021-01-25 14:31:37 +00:00
|
|
|
return pinnedPostJson
|
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def json_pin_post(base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
domain_full: str, system_language: str) -> {}:
|
2021-01-25 14:31:37 +00:00
|
|
|
"""Returns a pinned post as json
|
|
|
|
"""
|
|
|
|
pinnedPostJson = \
|
2021-12-28 19:33:29 +00:00
|
|
|
get_pinned_post_as_json(base_dir, http_prefix,
|
|
|
|
nickname, domain,
|
|
|
|
domain_full, system_language)
|
2021-01-25 14:31:37 +00:00
|
|
|
itemsList = []
|
|
|
|
if pinnedPostJson:
|
|
|
|
itemsList = [pinnedPostJson]
|
|
|
|
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-12-29 21:55:09 +00:00
|
|
|
postContext = get_individual_post_context()
|
2021-01-24 21:35:26 +00:00
|
|
|
return {
|
2021-10-11 09:42:43 +00:00
|
|
|
'@context': postContext,
|
2021-01-24 21:35:26 +00:00
|
|
|
'id': actor + '/collections/featured',
|
|
|
|
'orderedItems': itemsList,
|
|
|
|
'totalItems': len(itemsList),
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def regenerate_index_for_box(base_dir: str,
|
|
|
|
nickname: str, domain: str, boxName: str) -> None:
|
2021-08-01 19:19:45 +00:00
|
|
|
"""Generates an index for the given box if it doesn't exist
|
|
|
|
Used by unit tests to artificially create an index
|
|
|
|
"""
|
2021-12-26 12:02:29 +00:00
|
|
|
boxDir = acct_dir(base_dir, nickname, domain) + '/' + boxName
|
2021-08-01 19:19:45 +00:00
|
|
|
boxIndexFilename = boxDir + '.index'
|
|
|
|
|
|
|
|
if not os.path.isdir(boxDir):
|
|
|
|
return
|
|
|
|
if os.path.isfile(boxIndexFilename):
|
|
|
|
return
|
|
|
|
|
|
|
|
indexLines = []
|
|
|
|
for subdir, dirs, files in os.walk(boxDir):
|
|
|
|
for f in files:
|
|
|
|
if ':##' not in f:
|
|
|
|
continue
|
|
|
|
indexLines.append(f)
|
|
|
|
break
|
|
|
|
|
|
|
|
indexLines.sort(reverse=True)
|
|
|
|
|
|
|
|
result = ''
|
|
|
|
with open(boxIndexFilename, 'w+') as fp:
|
|
|
|
for line in indexLines:
|
|
|
|
result += line + '\n'
|
|
|
|
fp.write(line + '\n')
|
|
|
|
print('Index generated for ' + boxName + '\n' + result)
|
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_public_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int, http_prefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str, subject: str,
|
|
|
|
schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str,
|
|
|
|
isArticle: bool,
|
|
|
|
system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Public post
|
2019-06-30 10:14:02 +00:00
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-02-10 17:03:51 +00:00
|
|
|
isModerationReport = False
|
|
|
|
eventUUID = None
|
|
|
|
category = None
|
|
|
|
joinMode = None
|
|
|
|
endDate = None
|
|
|
|
endTime = None
|
|
|
|
maximumAttendeeCapacity = None
|
|
|
|
repliesModerationOption = None
|
|
|
|
anonymousParticipationEnabled = None
|
|
|
|
eventStatus = None
|
|
|
|
ticketUrl = None
|
2021-12-26 10:19:59 +00:00
|
|
|
localActor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-12-29 21:55:09 +00:00
|
|
|
return _create_post_base(base_dir, nickname, domain, port,
|
|
|
|
'https://www.w3.org/ns/activitystreams#Public',
|
|
|
|
localActor + '/followers',
|
|
|
|
http_prefix, content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
isModerationReport, isArticle,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
eventUUID, category, joinMode, endDate, endTime,
|
|
|
|
maximumAttendeeCapacity,
|
|
|
|
repliesModerationOption,
|
|
|
|
anonymousParticipationEnabled,
|
|
|
|
eventStatus, ticketUrl, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
|
|
|
|
|
|
|
|
|
|
|
def _append_citations_to_blog_post(base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
blogJson: {}) -> None:
|
2021-02-11 10:01:27 +00:00
|
|
|
"""Appends any citations to a new blog post
|
|
|
|
"""
|
|
|
|
# append citations tags, stored in a file
|
|
|
|
citationsFilename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
acct_dir(base_dir, nickname, domain) + '/.citations.txt'
|
2021-02-11 10:01:27 +00:00
|
|
|
if not os.path.isfile(citationsFilename):
|
|
|
|
return
|
|
|
|
citationsSeparator = '#####'
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(citationsFilename, 'r') as f:
|
2021-02-11 10:01:27 +00:00
|
|
|
citations = f.readlines()
|
|
|
|
for line in citations:
|
|
|
|
if citationsSeparator not in line:
|
|
|
|
continue
|
|
|
|
sections = line.strip().split(citationsSeparator)
|
|
|
|
if len(sections) != 3:
|
|
|
|
continue
|
|
|
|
# dateStr = sections[0]
|
|
|
|
title = sections[1]
|
|
|
|
link = sections[2]
|
|
|
|
tagJson = {
|
|
|
|
"type": "Article",
|
|
|
|
"name": title,
|
|
|
|
"url": link
|
|
|
|
}
|
|
|
|
blogJson['object']['tag'].append(tagJson)
|
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_blog_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int, http_prefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
inReplyTo: str, inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2021-02-11 10:01:27 +00:00
|
|
|
blogJson = \
|
2021-12-28 19:33:29 +00:00
|
|
|
create_public_post(base_dir,
|
|
|
|
nickname, domain, port, http_prefix,
|
|
|
|
content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost,
|
|
|
|
eventDate, eventTime, location,
|
|
|
|
True, system_language, conversationId,
|
|
|
|
low_bandwidth, content_license_url)
|
2021-02-17 11:26:14 +00:00
|
|
|
blogJson['object']['url'] = \
|
|
|
|
blogJson['object']['url'].replace('/@', '/users/')
|
2021-12-29 21:55:09 +00:00
|
|
|
_append_citations_to_blog_post(base_dir, nickname, domain, blogJson)
|
2020-11-06 11:21:41 +00:00
|
|
|
|
2021-02-11 10:01:27 +00:00
|
|
|
return blogJson
|
2020-10-07 21:26:03 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_news_post(base_dir: str,
|
|
|
|
domain: str, port: int, http_prefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
subject: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2021-12-25 20:39:35 +00:00
|
|
|
client_to_server = False
|
2020-10-07 21:26:03 +00:00
|
|
|
inReplyTo = None
|
|
|
|
inReplyToAtomUri = None
|
2020-10-07 21:40:52 +00:00
|
|
|
schedulePost = False
|
2020-10-07 21:26:03 +00:00
|
|
|
eventDate = None
|
|
|
|
eventTime = None
|
|
|
|
location = None
|
|
|
|
blog = \
|
2021-12-28 19:33:29 +00:00
|
|
|
create_public_post(base_dir,
|
|
|
|
'news', domain, port, http_prefix,
|
|
|
|
content, followersOnly, saveToFile,
|
|
|
|
client_to_server, False,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost,
|
|
|
|
eventDate, eventTime, location,
|
|
|
|
True, system_language, conversationId,
|
|
|
|
low_bandwidth, content_license_url)
|
2020-10-07 12:05:49 +00:00
|
|
|
blog['object']['type'] = 'Article'
|
|
|
|
return blog
|
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_question_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
content: str, qOptions: [],
|
|
|
|
followersOnly: bool, saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
subject: str, durationDays: int,
|
|
|
|
system_language: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-11-25 22:34:26 +00:00
|
|
|
"""Question post with multiple choice options
|
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:19:59 +00:00
|
|
|
localActor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_base(base_dir, nickname, domain, port,
|
|
|
|
'https://www.w3.org/ns/activitystreams#Public',
|
|
|
|
localActor + '/followers',
|
|
|
|
http_prefix, content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
False, False, None, None, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
None, low_bandwidth, content_license_url)
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['type'] = 'Question'
|
|
|
|
message_json['object']['oneOf'] = []
|
|
|
|
message_json['object']['votersCount'] = 0
|
2021-12-26 13:17:46 +00:00
|
|
|
curr_time = datetime.datetime.utcnow()
|
2020-04-04 10:05:27 +00:00
|
|
|
daysSinceEpoch = \
|
2021-12-26 13:17:46 +00:00
|
|
|
int((curr_time - datetime.datetime(1970, 1, 1)).days + durationDays)
|
2020-04-04 10:05:27 +00:00
|
|
|
endTime = datetime.datetime(1970, 1, 1) + \
|
|
|
|
datetime.timedelta(daysSinceEpoch)
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['endTime'] = endTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
2019-11-25 22:34:26 +00:00
|
|
|
for questionOption in qOptions:
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['object']['oneOf'].append({
|
2019-11-25 22:34:26 +00:00
|
|
|
"type": "Note",
|
|
|
|
"name": questionOption,
|
|
|
|
"replies": {
|
|
|
|
"type": "Collection",
|
|
|
|
"totalItems": 0
|
|
|
|
}
|
|
|
|
})
|
2021-12-25 23:51:19 +00:00
|
|
|
return message_json
|
2019-11-25 22:34:26 +00:00
|
|
|
|
2020-02-24 13:32:19 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_unlisted_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
inReplyTo: str, inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-07-28 11:08:14 +00:00
|
|
|
"""Unlisted post. This has the #Public and followers links inverted.
|
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:19:59 +00:00
|
|
|
localActor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-12-29 21:55:09 +00:00
|
|
|
return _create_post_base(base_dir, nickname, domain, port,
|
|
|
|
localActor + '/followers',
|
|
|
|
'https://www.w3.org/ns/activitystreams#Public',
|
|
|
|
http_prefix, content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
False, False,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
None, None, None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-07-28 11:08:14 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_followers_only_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
content: str, followersOnly: bool,
|
|
|
|
saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Followers only post
|
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:19:59 +00:00
|
|
|
localActor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-12-29 21:55:09 +00:00
|
|
|
return _create_post_base(base_dir, nickname, domain, port,
|
|
|
|
localActor + '/followers',
|
|
|
|
None,
|
|
|
|
http_prefix, content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
False, False,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
None, None, None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
|
|
|
|
|
|
|
|
|
|
|
def get_mentioned_people(base_dir: str, http_prefix: str,
|
|
|
|
content: str, domain: str, debug: bool) -> []:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Extracts a list of mentioned actors from the given message content
|
|
|
|
"""
|
|
|
|
if '@' not in content:
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
mentions = []
|
|
|
|
words = content.split(' ')
|
2019-07-27 22:48:34 +00:00
|
|
|
for wrd in words:
|
2021-07-04 12:50:42 +00:00
|
|
|
if not wrd.startswith('@'):
|
|
|
|
continue
|
|
|
|
handle = wrd[1:]
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mentioned handle ' + handle)
|
|
|
|
if '@' not in handle:
|
|
|
|
handle = handle + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
if not os.path.isdir(base_dir + '/accounts/' + handle):
|
2021-07-04 12:50:42 +00:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
externalDomain = handle.split('@')[1]
|
|
|
|
if not ('.' in externalDomain or
|
|
|
|
externalDomain == 'localhost'):
|
2019-07-27 22:48:34 +00:00
|
|
|
continue
|
2021-07-04 12:50:42 +00:00
|
|
|
mentionedNickname = handle.split('@')[0]
|
|
|
|
mentionedDomain = handle.split('@')[1].strip('\n').strip('\r')
|
|
|
|
if ':' in mentionedDomain:
|
2021-12-26 18:17:37 +00:00
|
|
|
mentionedDomain = remove_domain_port(mentionedDomain)
|
2021-12-28 14:41:10 +00:00
|
|
|
if not valid_nickname(mentionedDomain, mentionedNickname):
|
2021-07-04 12:50:42 +00:00
|
|
|
continue
|
|
|
|
actor = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, mentionedNickname,
|
|
|
|
handle.split('@')[1])
|
2021-07-04 12:50:42 +00:00
|
|
|
mentions.append(actor)
|
2019-08-19 09:16:33 +00:00
|
|
|
return mentions
|
2019-07-27 22:48:34 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_direct_message_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
content: str, followersOnly: bool,
|
|
|
|
saveToFile: bool, client_to_server: bool,
|
|
|
|
commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str,
|
|
|
|
subject: str, debug: bool,
|
|
|
|
schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str, system_language: str,
|
|
|
|
conversationId: str, low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Direct Message post
|
|
|
|
"""
|
2021-12-29 21:55:09 +00:00
|
|
|
content = resolve_petnames(base_dir, nickname, domain, content)
|
2020-04-04 10:05:27 +00:00
|
|
|
mentionedPeople = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_mentioned_people(base_dir, http_prefix, content, domain, debug)
|
2019-08-19 09:11:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('mentionedPeople: ' + str(mentionedPeople))
|
2019-07-27 22:48:34 +00:00
|
|
|
if not mentionedPeople:
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
postTo = None
|
|
|
|
postCc = None
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_base(base_dir, nickname, domain, port,
|
|
|
|
postTo, postCc,
|
|
|
|
http_prefix, content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
False, False,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
None, None, None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
2019-11-10 12:28:12 +00:00
|
|
|
# mentioned recipients go into To rather than Cc
|
2021-12-25 23:51:19 +00:00
|
|
|
message_json['to'] = message_json['object']['cc']
|
|
|
|
message_json['object']['to'] = message_json['to']
|
|
|
|
message_json['cc'] = []
|
|
|
|
message_json['object']['cc'] = []
|
2020-06-24 11:53:43 +00:00
|
|
|
if schedulePost:
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(message_json['object']['id'])
|
2021-12-28 18:13:52 +00:00
|
|
|
save_post_to_box(base_dir, http_prefix, post_id,
|
|
|
|
nickname, domain, message_json, 'scheduled')
|
2021-12-25 23:51:19 +00:00
|
|
|
return message_json
|
2020-06-24 11:53:43 +00:00
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def create_report_post(base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int, http_prefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
|
|
|
client_to_server: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
debug: bool, subject: str, system_language: str,
|
|
|
|
low_bandwidth: bool,
|
|
|
|
content_license_url: str) -> {}:
|
2019-08-11 11:25:27 +00:00
|
|
|
"""Send a report to moderators
|
|
|
|
"""
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2019-08-11 11:25:27 +00:00
|
|
|
|
2019-08-11 11:33:29 +00:00
|
|
|
# add a title to distinguish moderation reports from other posts
|
2020-04-04 10:05:27 +00:00
|
|
|
reportTitle = 'Moderation Report'
|
2019-08-11 11:33:29 +00:00
|
|
|
if not subject:
|
2020-04-04 10:05:27 +00:00
|
|
|
subject = reportTitle
|
2019-08-11 11:33:29 +00:00
|
|
|
else:
|
|
|
|
if not subject.startswith(reportTitle):
|
2020-04-04 10:05:27 +00:00
|
|
|
subject = reportTitle + ': ' + subject
|
2019-08-11 11:33:29 +00:00
|
|
|
|
2019-08-11 13:02:36 +00:00
|
|
|
# create the list of moderators from the moderators file
|
2020-04-04 10:05:27 +00:00
|
|
|
moderatorsList = []
|
2021-12-25 16:17:53 +00:00
|
|
|
moderatorsFile = base_dir + '/accounts/moderators.txt'
|
2019-08-11 11:25:27 +00:00
|
|
|
if os.path.isfile(moderatorsFile):
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(moderatorsFile, 'r') as fileHandler:
|
2019-08-11 11:25:27 +00:00
|
|
|
for line in fileHandler:
|
2020-05-22 11:32:38 +00:00
|
|
|
line = line.strip('\n').strip('\r')
|
2019-08-11 11:25:27 +00:00
|
|
|
if line.startswith('#'):
|
|
|
|
continue
|
|
|
|
if line.startswith('/users/'):
|
2020-04-04 10:05:27 +00:00
|
|
|
line = line.replace('users', '')
|
2019-08-11 11:25:27 +00:00
|
|
|
if line.startswith('@'):
|
2020-04-04 10:05:27 +00:00
|
|
|
line = line[1:]
|
2019-08-11 11:25:27 +00:00
|
|
|
if '@' in line:
|
2021-08-14 11:13:39 +00:00
|
|
|
nick = line.split('@')[0]
|
|
|
|
moderatorActor = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nick, domain_full)
|
2020-04-04 10:05:27 +00:00
|
|
|
if moderatorActor not in moderatorsList:
|
2019-08-11 11:25:27 +00:00
|
|
|
moderatorsList.append(moderatorActor)
|
|
|
|
continue
|
2021-07-01 17:59:24 +00:00
|
|
|
if line.startswith('http') or line.startswith('hyper'):
|
2019-08-11 11:25:27 +00:00
|
|
|
# must be a local address - no remote moderators
|
2021-12-26 10:00:46 +00:00
|
|
|
if '://' + domain_full + '/' in line:
|
2019-08-11 11:25:27 +00:00
|
|
|
if line not in moderatorsList:
|
|
|
|
moderatorsList.append(line)
|
|
|
|
else:
|
|
|
|
if '/' not in line:
|
2021-08-14 11:13:39 +00:00
|
|
|
moderatorActor = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, line, domain_full)
|
2019-08-11 11:25:27 +00:00
|
|
|
if moderatorActor not in moderatorsList:
|
|
|
|
moderatorsList.append(moderatorActor)
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(moderatorsList) == 0:
|
2019-08-11 11:25:27 +00:00
|
|
|
# if there are no moderators then the admin becomes the moderator
|
2021-12-26 14:08:58 +00:00
|
|
|
adminNickname = get_config_param(base_dir, 'admin')
|
2019-08-11 11:25:27 +00:00
|
|
|
if adminNickname:
|
2021-12-26 10:19:59 +00:00
|
|
|
localActor = \
|
|
|
|
local_actor_url(http_prefix, adminNickname, domain_full)
|
2021-08-14 11:13:39 +00:00
|
|
|
moderatorsList.append(localActor)
|
2019-08-11 11:25:27 +00:00
|
|
|
if not moderatorsList:
|
|
|
|
return None
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: Sending report to moderators')
|
|
|
|
print(str(moderatorsList))
|
2020-04-04 10:05:27 +00:00
|
|
|
postTo = moderatorsList
|
|
|
|
postCc = None
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = None
|
2020-03-22 21:16:02 +00:00
|
|
|
for toUrl in postTo:
|
2019-11-16 22:09:54 +00:00
|
|
|
# who is this report going to?
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = toUrl.split('/users/')[1]
|
|
|
|
handle = toNickname + '@' + domain
|
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_base(base_dir, nickname, domain, port,
|
|
|
|
toUrl, postCc,
|
|
|
|
http_prefix, content, followersOnly, saveToFile,
|
|
|
|
client_to_server, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
True, False, None, None, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
None, low_bandwidth, content_license_url)
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object:
|
2019-11-16 15:24:07 +00:00
|
|
|
continue
|
2019-11-16 18:14:00 +00:00
|
|
|
|
2019-11-16 15:24:07 +00:00
|
|
|
# save a notification file so that the moderator
|
|
|
|
# knows something new has appeared
|
2021-12-25 16:17:53 +00:00
|
|
|
newReportFile = base_dir + '/accounts/' + handle + '/.newReport'
|
2019-11-16 15:24:07 +00:00
|
|
|
if os.path.isfile(newReportFile):
|
|
|
|
continue
|
2021-06-21 22:53:04 +00:00
|
|
|
try:
|
|
|
|
with open(newReportFile, 'w+') as fp:
|
|
|
|
fp.write(toUrl + '/moderation')
|
2021-11-25 22:22:54 +00:00
|
|
|
except OSError:
|
2021-12-28 19:33:29 +00:00
|
|
|
print('EX: create_report_post unable to write ' + newReportFile)
|
2019-11-16 15:24:07 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
return post_json_object
|
2019-08-11 11:25:27 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def thread_send_post(session, post_jsonStr: str, federation_list: [],
|
|
|
|
inboxUrl: str, base_dir: str,
|
|
|
|
signatureHeaderJson: {}, postLog: [],
|
|
|
|
debug: bool) -> None:
|
2019-10-23 18:28:04 +00:00
|
|
|
"""Sends a with retries
|
2019-06-30 13:38:01 +00:00
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
tries = 0
|
|
|
|
sendIntervalSec = 30
|
2019-06-30 13:38:01 +00:00
|
|
|
for attempt in range(20):
|
2020-04-04 10:05:27 +00:00
|
|
|
postResult = None
|
|
|
|
unauthorized = False
|
2021-02-02 21:08:33 +00:00
|
|
|
if debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('Getting post_json_string for ' + inboxUrl)
|
2019-10-14 21:05:14 +00:00
|
|
|
try:
|
2021-10-18 10:20:57 +00:00
|
|
|
postResult, unauthorized, returnCode = \
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_string(session, post_jsonStr, federation_list,
|
|
|
|
inboxUrl, signatureHeaderJson,
|
|
|
|
debug)
|
2021-10-18 10:23:42 +00:00
|
|
|
if returnCode >= 500 and returnCode < 600:
|
2021-10-18 10:42:17 +00:00
|
|
|
# if an instance is returning a code which indicates that
|
|
|
|
# it might have a runtime error, like 503, then don't
|
|
|
|
# continue to post to it
|
2021-10-18 10:20:57 +00:00
|
|
|
break
|
2021-02-02 21:08:33 +00:00
|
|
|
if debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('Obtained post_json_string for ' + inboxUrl +
|
2021-02-02 21:08:33 +00:00
|
|
|
' unauthorized: ' + str(unauthorized))
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('ERROR: post_json_string failed ' + str(ex))
|
2020-04-04 10:05:27 +00:00
|
|
|
if unauthorized:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: thread_send_post: Post is unauthorized ' +
|
|
|
|
inboxUrl + ' ' + post_jsonStr)
|
2019-10-23 18:44:03 +00:00
|
|
|
break
|
2019-08-21 21:05:37 +00:00
|
|
|
if postResult:
|
2021-12-29 21:55:09 +00:00
|
|
|
logStr = 'Success on try ' + str(tries) + ': ' + post_jsonStr
|
2019-08-21 21:05:37 +00:00
|
|
|
else:
|
2021-12-29 21:55:09 +00:00
|
|
|
logStr = 'Retry ' + str(tries) + ': ' + post_jsonStr
|
2019-08-21 21:05:37 +00:00
|
|
|
postLog.append(logStr)
|
|
|
|
# keep the length of the log finite
|
|
|
|
# Don't accumulate massive files on systems with limited resources
|
2020-04-04 10:05:27 +00:00
|
|
|
while len(postLog) > 16:
|
2019-09-01 10:11:06 +00:00
|
|
|
postLog.pop(0)
|
2019-10-16 11:27:43 +00:00
|
|
|
if debug:
|
|
|
|
# save the log file
|
2021-12-25 16:17:53 +00:00
|
|
|
postLogFilename = base_dir + '/post.log'
|
2021-07-31 13:19:45 +00:00
|
|
|
if os.path.isfile(postLogFilename):
|
|
|
|
with open(postLogFilename, 'a+') as logFile:
|
|
|
|
logFile.write(logStr + '\n')
|
|
|
|
else:
|
|
|
|
with open(postLogFilename, 'w+') as logFile:
|
|
|
|
logFile.write(logStr + '\n')
|
2019-08-21 21:05:37 +00:00
|
|
|
|
2019-06-30 13:38:01 +00:00
|
|
|
if postResult:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: successful json post to ' + inboxUrl)
|
2019-06-30 13:38:01 +00:00
|
|
|
# our work here is done
|
2019-06-30 13:20:23 +00:00
|
|
|
break
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print(post_jsonStr)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: json post to ' + inboxUrl +
|
|
|
|
' failed. Waiting for ' +
|
|
|
|
str(sendIntervalSec) + ' seconds.')
|
2019-10-23 18:28:04 +00:00
|
|
|
time.sleep(sendIntervalSec)
|
2020-04-04 10:05:27 +00:00
|
|
|
tries += 1
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_post(signing_priv_key_pem: str, project_version: str,
|
|
|
|
session, base_dir: str, nickname: str, domain: str, port: int,
|
|
|
|
toNickname: str, toDomain: str, toPort: int, cc: str,
|
|
|
|
http_prefix: str, content: str, followersOnly: bool,
|
|
|
|
saveToFile: bool, client_to_server: bool,
|
|
|
|
commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
federation_list: [], send_threads: [], postLog: [],
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
isArticle: bool, system_language: str,
|
|
|
|
shared_items_federated_domains: [],
|
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
low_bandwidth: bool, content_license_url: str,
|
|
|
|
debug: bool = False, inReplyTo: str = None,
|
|
|
|
inReplyToAtomUri: str = None, subject: str = None) -> int:
|
2021-08-05 11:24:24 +00:00
|
|
|
"""Post to another inbox. Used by unit tests.
|
2019-06-30 10:14:02 +00:00
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
withDigest = True
|
2021-08-08 16:52:32 +00:00
|
|
|
conversationId = None
|
2019-07-01 09:31:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if toNickname == 'inbox':
|
2019-08-23 14:08:10 +00:00
|
|
|
# shared inbox actor on @domain@domain
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = toDomain
|
2019-08-23 14:08:10 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
toDomain = get_full_domain(toDomain, toPort)
|
2019-06-30 22:56:37 +00:00
|
|
|
|
2021-12-25 17:09:22 +00:00
|
|
|
handle = http_prefix + '://' + toDomain + '/@' + toNickname
|
2019-06-30 22:56:37 +00:00
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix,
|
|
|
|
cached_webfingers,
|
|
|
|
domain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2019-06-30 10:14:02 +00:00
|
|
|
if not wfRequest:
|
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
return 1
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2021-12-25 20:39:35 +00:00
|
|
|
if not client_to_server:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'inbox'
|
2019-07-05 22:13:20 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'outbox'
|
2020-02-24 22:34:54 +00:00
|
|
|
if isArticle:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'tlblogs'
|
2019-07-05 22:13:20 +00:00
|
|
|
|
2019-06-30 22:56:37 +00:00
|
|
|
# get the actor inbox for the To handle
|
2021-09-20 17:36:58 +00:00
|
|
|
originDomain = domain
|
2021-09-22 09:29:48 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, toPersonId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
nickname, domain, postToBox,
|
|
|
|
72533)
|
2019-07-05 14:39:24 +00:00
|
|
|
|
2019-06-30 10:14:02 +00:00
|
|
|
if not inboxUrl:
|
|
|
|
return 3
|
2019-07-05 22:13:20 +00:00
|
|
|
if not pubKey:
|
2019-06-30 10:14:02 +00:00
|
|
|
return 4
|
2019-07-05 22:13:20 +00:00
|
|
|
if not toPersonId:
|
|
|
|
return 5
|
2020-09-27 18:35:35 +00:00
|
|
|
# sharedInbox is optional
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_base(base_dir, nickname, domain, port,
|
|
|
|
toPersonId, cc, http_prefix, content,
|
|
|
|
followersOnly, saveToFile, client_to_server,
|
|
|
|
commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
False, isArticle, inReplyTo,
|
|
|
|
inReplyToAtomUri, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2019-06-30 22:56:37 +00:00
|
|
|
# get the senders private key
|
2021-12-29 21:55:09 +00:00
|
|
|
privateKeyPem = _get_person_key(nickname, domain, base_dir, 'private')
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(privateKeyPem) == 0:
|
2019-07-05 22:13:20 +00:00
|
|
|
return 6
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if toDomain not in inboxUrl:
|
|
|
|
return 7
|
2020-04-04 10:05:27 +00:00
|
|
|
postPath = inboxUrl.split(toDomain, 1)[1]
|
2019-08-17 10:15:01 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('signature'):
|
2020-06-15 13:38:21 +00:00
|
|
|
try:
|
2021-12-25 22:09:19 +00:00
|
|
|
signedPostJsonObject = post_json_object.copy()
|
2021-12-29 21:55:09 +00:00
|
|
|
generate_json_signature(signedPostJsonObject, privateKeyPem)
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = signedPostJsonObject
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
|
|
|
print('WARN: failed to JSON-LD sign post, ' + str(ex))
|
2020-06-15 13:38:21 +00:00
|
|
|
pass
|
2020-06-15 13:08:19 +00:00
|
|
|
|
2019-08-17 10:15:01 +00:00
|
|
|
# convert json to string so that there are no
|
|
|
|
# subsequent conversions after creating message body digest
|
2021-12-29 21:55:09 +00:00
|
|
|
post_jsonStr = json.dumps(post_json_object)
|
2019-08-17 10:15:01 +00:00
|
|
|
|
|
|
|
# construct the http header, including the message body digest
|
2020-04-04 10:05:27 +00:00
|
|
|
signatureHeaderJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
create_signed_header(None, privateKeyPem, nickname, domain, port,
|
|
|
|
toDomain, toPort,
|
|
|
|
postPath, http_prefix, withDigest, post_jsonStr,
|
|
|
|
None)
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2021-08-05 11:24:24 +00:00
|
|
|
# if the "to" domain is within the shared items
|
|
|
|
# federation list then send the token for this domain
|
|
|
|
# so that it can request a catalog
|
2021-12-25 18:05:01 +00:00
|
|
|
if toDomain in shared_items_federated_domains:
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:00:46 +00:00
|
|
|
if sharedItemFederationTokens.get(domain_full):
|
|
|
|
signatureHeaderJson['Origin'] = domain_full
|
2021-08-05 11:24:24 +00:00
|
|
|
signatureHeaderJson['SharesCatalog'] = \
|
2021-12-26 10:00:46 +00:00
|
|
|
sharedItemFederationTokens[domain_full]
|
2021-08-05 11:24:24 +00:00
|
|
|
if debug:
|
|
|
|
print('SharesCatalog added to header')
|
|
|
|
elif debug:
|
2021-12-26 10:00:46 +00:00
|
|
|
print(domain_full + ' not in sharedItemFederationTokens')
|
2021-08-05 11:24:24 +00:00
|
|
|
elif debug:
|
2021-12-25 18:05:01 +00:00
|
|
|
print(toDomain + ' not in shared_items_federated_domains ' +
|
|
|
|
str(shared_items_federated_domains))
|
2021-08-05 11:24:24 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('signatureHeaderJson: ' + str(signatureHeaderJson))
|
|
|
|
|
2019-07-05 18:57:19 +00:00
|
|
|
# Keep the number of threads being used small
|
2021-12-25 21:37:41 +00:00
|
|
|
while len(send_threads) > 1000:
|
2019-10-16 14:46:29 +00:00
|
|
|
print('WARN: Maximum threads reached - killing send thread')
|
2021-12-25 21:37:41 +00:00
|
|
|
send_threads[0].kill()
|
|
|
|
send_threads.pop(0)
|
2019-10-16 14:46:29 +00:00
|
|
|
print('WARN: thread killed')
|
2020-04-04 10:05:27 +00:00
|
|
|
thr = \
|
2021-12-29 21:55:09 +00:00
|
|
|
thread_with_trace(target=thread_send_post,
|
2021-12-28 21:36:27 +00:00
|
|
|
args=(session,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_jsonStr,
|
2021-12-28 21:36:27 +00:00
|
|
|
federation_list,
|
|
|
|
inboxUrl, base_dir,
|
|
|
|
signatureHeaderJson.copy(),
|
|
|
|
postLog,
|
|
|
|
debug), daemon=True)
|
2021-12-25 21:37:41 +00:00
|
|
|
send_threads.append(thr)
|
2019-07-05 18:57:19 +00:00
|
|
|
thr.start()
|
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_post_via_server(signing_priv_key_pem: str, project_version: str,
|
|
|
|
base_dir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
toNickname: str, toDomain: str, toPort: int, cc: str,
|
|
|
|
http_prefix: str, content: str, followersOnly: bool,
|
|
|
|
commentsEnabled: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
|
|
|
imageDescription: str, city: str,
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
isArticle: bool, system_language: str,
|
|
|
|
low_bandwidth: bool,
|
|
|
|
content_license_url: str,
|
|
|
|
debug: bool = False,
|
|
|
|
inReplyTo: str = None,
|
|
|
|
inReplyToAtomUri: str = None,
|
|
|
|
conversationId: str = None,
|
|
|
|
subject: str = None) -> int:
|
2019-07-16 10:19:04 +00:00
|
|
|
"""Send a post via a proxy (c2s)
|
|
|
|
"""
|
2019-07-16 11:33:40 +00:00
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for send_post_via_server')
|
2019-07-16 11:33:40 +00:00
|
|
|
return 6
|
2019-07-16 10:19:04 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
2019-07-16 10:19:04 +00:00
|
|
|
|
2021-12-25 17:09:22 +00:00
|
|
|
handle = http_prefix + '://' + fromDomainFull + '/@' + fromNickname
|
2019-07-16 10:19:04 +00:00
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2020-04-04 10:05:27 +00:00
|
|
|
wfRequest = \
|
2021-12-29 21:55:09 +00:00
|
|
|
webfinger_handle(session, handle, http_prefix, cached_webfingers,
|
|
|
|
fromDomainFull, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2019-07-16 10:19:04 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post webfinger failed for ' + handle)
|
2019-07-16 10:19:04 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: post webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
return 1
|
2019-07-16 10:19:04 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'outbox'
|
2020-02-24 22:34:54 +00:00
|
|
|
if isArticle:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'tlblogs'
|
2019-07-16 10:19:04 +00:00
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-20 17:36:58 +00:00
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
fromNickname,
|
|
|
|
fromDomainFull, postToBox,
|
|
|
|
82796)
|
2019-07-16 10:19:04 +00:00
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post no ' + postToBox +
|
|
|
|
' was found for ' + handle)
|
2019-07-16 10:19:04 +00:00
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post no actor was found for ' + handle)
|
2019-07-16 10:19:04 +00:00
|
|
|
return 4
|
|
|
|
|
|
|
|
# Get the json for the c2s post, not saving anything to file
|
2021-12-25 16:17:53 +00:00
|
|
|
# Note that base_dir is set to None
|
2020-04-04 10:05:27 +00:00
|
|
|
saveToFile = False
|
2021-12-25 20:39:35 +00:00
|
|
|
client_to_server = True
|
2019-07-17 14:43:51 +00:00
|
|
|
if toDomain.lower().endswith('public'):
|
2020-04-04 10:05:27 +00:00
|
|
|
toPersonId = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-12-26 10:19:59 +00:00
|
|
|
cc = local_actor_url(http_prefix, fromNickname, fromDomainFull) + \
|
2021-08-14 11:13:39 +00:00
|
|
|
'/followers'
|
2019-07-17 14:43:51 +00:00
|
|
|
else:
|
|
|
|
if toDomain.lower().endswith('followers') or \
|
|
|
|
toDomain.lower().endswith('followersonly'):
|
2020-04-04 10:05:27 +00:00
|
|
|
toPersonId = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, fromNickname, fromDomainFull) + \
|
2021-08-14 11:13:39 +00:00
|
|
|
'/followers'
|
2019-07-17 14:43:51 +00:00
|
|
|
else:
|
2021-12-26 12:45:03 +00:00
|
|
|
toDomainFull = get_full_domain(toDomain, toPort)
|
2021-12-26 10:19:59 +00:00
|
|
|
toPersonId = local_actor_url(http_prefix, toNickname, toDomainFull)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_create_post_base(base_dir,
|
|
|
|
fromNickname, fromDomain, fromPort,
|
|
|
|
toPersonId, cc, http_prefix, content,
|
|
|
|
followersOnly, saveToFile, client_to_server,
|
|
|
|
commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
|
|
|
imageDescription, city,
|
|
|
|
False, isArticle, inReplyTo,
|
|
|
|
inReplyToAtomUri, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
|
|
|
None, None, None, None, None, system_language,
|
|
|
|
conversationId, low_bandwidth,
|
|
|
|
content_license_url)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
authHeader = create_basic_auth_header(fromNickname, password)
|
2019-07-16 14:23:06 +00:00
|
|
|
|
|
|
|
if attachImageFilename:
|
2020-04-04 10:05:27 +00:00
|
|
|
headers = {
|
2021-08-05 11:24:24 +00:00
|
|
|
'host': fromDomainFull,
|
2020-03-22 20:36:19 +00:00
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
postResult = \
|
2021-12-29 21:55:09 +00:00
|
|
|
post_image(session, attachImageFilename, [],
|
|
|
|
inboxUrl, headers)
|
2020-04-04 10:05:27 +00:00
|
|
|
if not postResult:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post failed to upload image')
|
2020-04-04 10:08:37 +00:00
|
|
|
# return 9
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
headers = {
|
2021-08-05 11:24:24 +00:00
|
|
|
'host': fromDomainFull,
|
2020-04-04 10:05:27 +00:00
|
|
|
'Content-type': 'application/json',
|
2020-03-22 20:36:19 +00:00
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-12-25 22:09:19 +00:00
|
|
|
postDumps = json.dumps(post_json_object)
|
2021-10-18 10:20:57 +00:00
|
|
|
postResult, unauthorized, returnCode = \
|
2021-12-29 21:55:09 +00:00
|
|
|
post_json_string(session, postDumps, [],
|
|
|
|
inboxUrl, headers, debug, 5, True)
|
2020-04-04 10:05:27 +00:00
|
|
|
if not postResult:
|
|
|
|
if debug:
|
2021-10-18 10:20:57 +00:00
|
|
|
if unauthorized:
|
|
|
|
print('DEBUG: POST failed for c2s to ' +
|
|
|
|
inboxUrl + ' unathorized')
|
|
|
|
else:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('DEBUG: POST failed for c2s to ' +
|
|
|
|
inboxUrl + ' return code ' + str(returnCode))
|
2020-04-04 10:05:27 +00:00
|
|
|
return 5
|
2019-07-16 10:19:04 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST success')
|
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def group_followers_by_domain(base_dir: str, nickname: str, domain: str) -> {}:
|
2019-07-08 08:51:33 +00:00
|
|
|
"""Returns a dictionary with followers grouped by domain
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
followersFilename = base_dir + '/accounts/' + handle + '/followers.txt'
|
2019-07-08 08:51:33 +00:00
|
|
|
if not os.path.isfile(followersFilename):
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
grouped = {}
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(followersFilename, 'r') as f:
|
2019-07-08 08:51:33 +00:00
|
|
|
for followerHandle in f:
|
2021-07-04 12:50:42 +00:00
|
|
|
if '@' not in followerHandle:
|
|
|
|
continue
|
|
|
|
fHandle = \
|
|
|
|
followerHandle.strip().replace('\n', '').replace('\r', '')
|
|
|
|
followerDomain = fHandle.split('@')[1]
|
|
|
|
if not grouped.get(followerDomain):
|
|
|
|
grouped[followerDomain] = [fHandle]
|
|
|
|
else:
|
|
|
|
grouped[followerDomain].append(fHandle)
|
2019-07-08 08:51:33 +00:00
|
|
|
return grouped
|
2019-10-16 10:58:31 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _add_followers_to_public_post(post_json_object: {}) -> None:
|
2019-10-16 10:58:31 +00:00
|
|
|
"""Adds followers entry to cc if it doesn't exist
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('actor'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if isinstance(post_json_object['object'], str):
|
|
|
|
if not post_json_object.get('to'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if len(post_json_object['to']) > 1:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if len(post_json_object['to']) == 0:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['to'][0].endswith('#Public'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('cc'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['cc'] = post_json_object['actor'] + '/followers'
|
2021-12-26 10:57:03 +00:00
|
|
|
elif has_object_dict(post_json_object):
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('to'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if len(post_json_object['object']['to']) > 1:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
elif len(post_json_object['object']['to']) == 0:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
elif not post_json_object['object']['to'][0].endswith('#Public'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('cc'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object']['cc'] = \
|
|
|
|
post_json_object['actor'] + '/followers'
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_signed_json(post_json_object: {}, session, base_dir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
toNickname: str, toDomain: str, toPort: int, cc: str,
|
|
|
|
http_prefix: str, saveToFile: bool,
|
|
|
|
client_to_server: bool, federation_list: [],
|
|
|
|
send_threads: [], postLog: [], cached_webfingers: {},
|
|
|
|
person_cache: {}, debug: bool, project_version: str,
|
|
|
|
sharedItemsToken: str, group_account: bool,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
sourceId: int) -> int:
|
2019-07-05 18:57:19 +00:00
|
|
|
"""Sends a signed json object to an inbox/outbox
|
|
|
|
"""
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('DEBUG: send_signed_json start')
|
2019-07-16 10:19:04 +00:00
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session specified for send_signed_json')
|
2019-07-16 10:19:04 +00:00
|
|
|
return 8
|
2020-04-04 10:05:27 +00:00
|
|
|
withDigest = True
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2020-06-19 22:50:41 +00:00
|
|
|
if toDomain.endswith('.onion') or toDomain.endswith('.i2p'):
|
2021-12-25 17:09:22 +00:00
|
|
|
http_prefix = 'http'
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if toNickname == 'inbox':
|
2019-08-23 13:47:29 +00:00
|
|
|
# shared inbox actor on @domain@domain
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = toDomain
|
2019-08-16 20:04:24 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
toDomain = get_full_domain(toDomain, toPort)
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2021-12-25 17:09:22 +00:00
|
|
|
toDomainUrl = http_prefix + '://' + toDomain
|
2021-12-29 21:55:09 +00:00
|
|
|
if not site_is_active(toDomainUrl, 10):
|
2020-06-23 09:46:38 +00:00
|
|
|
print('Domain is inactive: ' + toDomainUrl)
|
|
|
|
return 9
|
2020-06-23 10:41:12 +00:00
|
|
|
print('Domain is active: ' + toDomainUrl)
|
2020-06-23 09:46:38 +00:00
|
|
|
handleBase = toDomainUrl + '/@'
|
2019-10-21 14:12:22 +00:00
|
|
|
if toNickname:
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = handleBase + toNickname
|
2019-10-21 14:12:22 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
singleUserInstanceNickname = 'dev'
|
2020-05-17 12:16:40 +00:00
|
|
|
handle = handleBase + singleUserInstanceNickname
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: handle - ' + handle + ' toPort ' + str(toPort))
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2019-08-23 13:47:29 +00:00
|
|
|
# lookup the inbox for the To handle
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix,
|
|
|
|
cached_webfingers,
|
|
|
|
domain, project_version, debug, group_account,
|
|
|
|
signing_priv_key_pem)
|
2019-08-23 13:47:29 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: webfinger for ' + handle + ' failed')
|
2019-08-23 13:47:29 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
return 1
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2019-10-17 14:41:47 +00:00
|
|
|
if wfRequest.get('errors'):
|
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: webfinger for ' + handle +
|
|
|
|
' failed with errors ' + str(wfRequest['errors']))
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-25 20:39:35 +00:00
|
|
|
if not client_to_server:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'inbox'
|
2019-07-05 22:13:20 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'outbox'
|
2019-08-22 20:26:57 +00:00
|
|
|
|
2020-09-27 18:35:35 +00:00
|
|
|
# get the actor inbox/outbox for the To handle
|
2021-09-20 17:34:15 +00:00
|
|
|
originDomain = domain
|
2020-09-27 19:27:24 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, toPersonId, sharedInboxUrl, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
nickname, domain, postToBox,
|
|
|
|
sourceId)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-09-27 18:35:35 +00:00
|
|
|
print("inboxUrl: " + str(inboxUrl))
|
|
|
|
print("toPersonId: " + str(toPersonId))
|
|
|
|
print("sharedInboxUrl: " + str(sharedInboxUrl))
|
|
|
|
if inboxUrl:
|
|
|
|
if inboxUrl.endswith('/actor/inbox'):
|
|
|
|
inboxUrl = sharedInboxUrl
|
2019-07-06 13:49:25 +00:00
|
|
|
|
2019-07-05 18:57:19 +00:00
|
|
|
if not inboxUrl:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: missing inboxUrl')
|
2019-07-05 18:57:19 +00:00
|
|
|
return 3
|
2019-08-04 21:26:31 +00:00
|
|
|
|
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Sending to endpoint ' + inboxUrl)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if not pubKey:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: missing pubkey')
|
2019-07-05 18:57:19 +00:00
|
|
|
return 4
|
2019-07-05 22:13:20 +00:00
|
|
|
if not toPersonId:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: missing personId')
|
2019-07-05 22:13:20 +00:00
|
|
|
return 5
|
2020-09-27 18:35:35 +00:00
|
|
|
# sharedInbox is optional
|
2019-07-05 18:57:19 +00:00
|
|
|
|
|
|
|
# get the senders private key
|
2021-12-29 21:55:09 +00:00
|
|
|
privateKeyPem = \
|
|
|
|
_get_person_key(nickname, domain, base_dir, 'private', debug)
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(privateKeyPem) == 0:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Private key not found for ' +
|
2021-12-25 16:17:53 +00:00
|
|
|
nickname + '@' + domain +
|
|
|
|
' in ' + base_dir + '/keys/private')
|
2019-07-05 22:13:20 +00:00
|
|
|
return 6
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if toDomain not in inboxUrl:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: ' + toDomain + ' is not in ' + inboxUrl)
|
2019-07-05 22:13:20 +00:00
|
|
|
return 7
|
2020-04-04 10:05:27 +00:00
|
|
|
postPath = inboxUrl.split(toDomain, 1)[1]
|
2019-08-17 10:15:01 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
_add_followers_to_public_post(post_json_object)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('signature'):
|
2020-06-15 13:38:21 +00:00
|
|
|
try:
|
2021-12-25 22:09:19 +00:00
|
|
|
signedPostJsonObject = post_json_object.copy()
|
2021-12-29 21:55:09 +00:00
|
|
|
generate_json_signature(signedPostJsonObject, privateKeyPem)
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = signedPostJsonObject
|
2021-12-25 15:28:52 +00:00
|
|
|
except Exception as ex:
|
|
|
|
print('WARN: failed to JSON-LD sign post, ' + str(ex))
|
2020-06-15 13:38:21 +00:00
|
|
|
pass
|
2020-06-15 13:08:19 +00:00
|
|
|
|
2019-08-17 10:15:01 +00:00
|
|
|
# convert json to string so that there are no
|
|
|
|
# subsequent conversions after creating message body digest
|
2021-12-29 21:55:09 +00:00
|
|
|
post_jsonStr = json.dumps(post_json_object)
|
2019-08-17 10:15:01 +00:00
|
|
|
|
|
|
|
# construct the http header, including the message body digest
|
2020-04-04 10:05:27 +00:00
|
|
|
signatureHeaderJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
create_signed_header(None, privateKeyPem, nickname, domain, port,
|
|
|
|
toDomain, toPort,
|
|
|
|
postPath, http_prefix, withDigest, post_jsonStr,
|
|
|
|
None)
|
2021-07-26 13:12:51 +00:00
|
|
|
# optionally add a token so that the receiving instance may access
|
|
|
|
# your shared items catalog
|
|
|
|
if sharedItemsToken:
|
2021-12-26 12:45:03 +00:00
|
|
|
signatureHeaderJson['Origin'] = get_full_domain(domain, port)
|
2021-07-26 13:12:51 +00:00
|
|
|
signatureHeaderJson['SharesCatalog'] = sharedItemsToken
|
2021-08-05 11:24:24 +00:00
|
|
|
elif debug:
|
|
|
|
print('Not sending shared items federation token')
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-06-30 13:20:23 +00:00
|
|
|
# Keep the number of threads being used small
|
2021-12-25 21:37:41 +00:00
|
|
|
while len(send_threads) > 1000:
|
2019-10-04 12:22:56 +00:00
|
|
|
print('WARN: Maximum threads reached - killing send thread')
|
2021-12-25 21:37:41 +00:00
|
|
|
send_threads[0].kill()
|
|
|
|
send_threads.pop(0)
|
2019-10-04 12:22:56 +00:00
|
|
|
print('WARN: thread killed')
|
2019-10-16 18:19:18 +00:00
|
|
|
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: starting thread to send post')
|
2021-12-25 22:09:19 +00:00
|
|
|
pprint(post_json_object)
|
2020-04-04 10:05:27 +00:00
|
|
|
thr = \
|
2021-12-29 21:55:09 +00:00
|
|
|
thread_with_trace(target=thread_send_post,
|
2021-12-28 21:36:27 +00:00
|
|
|
args=(session,
|
2021-12-29 21:55:09 +00:00
|
|
|
post_jsonStr,
|
2021-12-28 21:36:27 +00:00
|
|
|
federation_list,
|
|
|
|
inboxUrl, base_dir,
|
|
|
|
signatureHeaderJson.copy(),
|
|
|
|
postLog,
|
|
|
|
debug), daemon=True)
|
2021-12-25 21:37:41 +00:00
|
|
|
send_threads.append(thr)
|
2020-04-04 10:05:27 +00:00
|
|
|
# thr.start()
|
2019-06-30 10:14:02 +00:00
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def add_to_field(activityType: str, post_json_object: {},
|
|
|
|
debug: bool) -> ({}, bool):
|
2021-03-20 12:00:00 +00:00
|
|
|
"""The Follow/Add/Remove activity doesn't have a 'to' field and so one
|
2019-08-18 09:39:12 +00:00
|
|
|
needs to be added so that activity distribution happens in a consistent way
|
|
|
|
Returns true if a 'to' field exists or was added
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('to'):
|
|
|
|
return post_json_object, True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
2021-12-25 22:09:19 +00:00
|
|
|
pprint(post_json_object)
|
2019-08-18 09:39:12 +00:00
|
|
|
print('DEBUG: no "to" field when sending to named addresses 2')
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
isSameType = False
|
|
|
|
toFieldAdded = False
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('object'):
|
|
|
|
if isinstance(post_json_object['object'], str):
|
|
|
|
if post_json_object.get('type'):
|
|
|
|
if post_json_object['type'] == activityType:
|
2020-04-04 10:05:27 +00:00
|
|
|
isSameType = True
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
2021-03-20 12:00:00 +00:00
|
|
|
print('DEBUG: "to" field assigned to ' + activityType)
|
2021-12-25 22:09:19 +00:00
|
|
|
toAddress = post_json_object['object']
|
2019-08-18 16:49:35 +00:00
|
|
|
if '/statuses/' in toAddress:
|
2020-04-04 10:05:27 +00:00
|
|
|
toAddress = toAddress.split('/statuses/')[0]
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['to'] = [toAddress]
|
2020-04-04 10:05:27 +00:00
|
|
|
toFieldAdded = True
|
2021-12-26 10:57:03 +00:00
|
|
|
elif has_object_dict(post_json_object):
|
2021-03-20 12:00:00 +00:00
|
|
|
# add a to field to bookmark add or remove
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('type') and \
|
|
|
|
post_json_object.get('actor') and \
|
|
|
|
post_json_object['object'].get('type'):
|
|
|
|
if post_json_object['type'] == 'Add' or \
|
|
|
|
post_json_object['type'] == 'Remove':
|
|
|
|
if post_json_object['object']['type'] == 'Document':
|
|
|
|
post_json_object['to'] = \
|
|
|
|
[post_json_object['actor']]
|
|
|
|
post_json_object['object']['to'] = \
|
|
|
|
[post_json_object['actor']]
|
2021-03-20 12:00:00 +00:00
|
|
|
toFieldAdded = True
|
|
|
|
|
|
|
|
if not toFieldAdded and \
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object'].get('type'):
|
|
|
|
if post_json_object['object']['type'] == activityType:
|
2020-04-04 10:05:27 +00:00
|
|
|
isSameType = True
|
2021-12-25 22:09:19 +00:00
|
|
|
if isinstance(post_json_object['object']['object'], str):
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
2021-03-20 12:00:00 +00:00
|
|
|
print('DEBUG: "to" field assigned to ' +
|
|
|
|
activityType)
|
2021-12-25 22:09:19 +00:00
|
|
|
toAddress = post_json_object['object']['object']
|
2019-08-18 16:49:35 +00:00
|
|
|
if '/statuses/' in toAddress:
|
2020-04-04 10:05:27 +00:00
|
|
|
toAddress = toAddress.split('/statuses/')[0]
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object']['to'] = [toAddress]
|
|
|
|
post_json_object['to'] = \
|
|
|
|
[post_json_object['object']['object']]
|
2020-04-04 10:05:27 +00:00
|
|
|
toFieldAdded = True
|
2019-08-18 09:39:12 +00:00
|
|
|
|
|
|
|
if not isSameType:
|
2021-12-25 22:09:19 +00:00
|
|
|
return post_json_object, True
|
2019-08-18 09:39:12 +00:00
|
|
|
if toFieldAdded:
|
2021-12-25 22:09:19 +00:00
|
|
|
return post_json_object, True
|
|
|
|
return post_json_object, False
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _is_profile_update(post_json_object: {}) -> bool:
|
2021-07-04 12:50:42 +00:00
|
|
|
"""Is the given post a profile update?
|
|
|
|
for actor updates there is no 'to' within the object
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('type'):
|
2021-12-26 17:12:07 +00:00
|
|
|
if has_object_stringType(post_json_object, False):
|
2021-12-25 22:09:19 +00:00
|
|
|
if (post_json_object['type'] == 'Update' and
|
|
|
|
(post_json_object['object']['type'] == 'Person' or
|
|
|
|
post_json_object['object']['type'] == 'Application' or
|
|
|
|
post_json_object['object']['type'] == 'Group' or
|
|
|
|
post_json_object['object']['type'] == 'Service')):
|
2021-10-13 10:11:02 +00:00
|
|
|
return True
|
2021-07-04 12:50:42 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _send_to_named_addresses(session, base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
onion_domain: str, i2p_domain: str, port: int,
|
|
|
|
http_prefix: str, federation_list: [],
|
|
|
|
send_threads: [], postLog: [],
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
post_json_object: {}, debug: bool,
|
|
|
|
project_version: str,
|
|
|
|
shared_items_federated_domains: [],
|
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signing_priv_key_pem: str) -> None:
|
2019-07-15 18:20:52 +00:00
|
|
|
"""sends a post to the specific named addresses in to/cc
|
|
|
|
"""
|
2019-07-16 10:19:04 +00:00
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendToNamedAddresses')
|
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('object'):
|
2019-07-16 10:19:04 +00:00
|
|
|
return
|
2021-03-18 23:39:54 +00:00
|
|
|
isProfileUpdate = False
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-12-29 21:55:09 +00:00
|
|
|
if _is_profile_update(post_json_object):
|
2021-07-04 12:50:42 +00:00
|
|
|
# use the original object, which has a 'to'
|
2021-12-25 22:09:19 +00:00
|
|
|
recipientsObject = post_json_object
|
2021-07-04 12:50:42 +00:00
|
|
|
isProfileUpdate = True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-08-20 20:35:15 +00:00
|
|
|
if not isProfileUpdate:
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('to'):
|
2019-08-20 20:35:15 +00:00
|
|
|
if debug:
|
2021-12-25 22:09:19 +00:00
|
|
|
pprint(post_json_object)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'no "to" field when sending to named addresses')
|
2021-12-26 17:12:07 +00:00
|
|
|
if has_object_stringType(post_json_object, debug):
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object']['type'] == 'Follow' or \
|
|
|
|
post_json_object['object']['type'] == 'Join':
|
|
|
|
post_json_obj2 = post_json_object['object']['object']
|
|
|
|
if isinstance(post_json_obj2, str):
|
2019-08-20 20:35:15 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: "to" field assigned to Follow')
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object']['to'] = \
|
|
|
|
[post_json_object['object']['object']]
|
|
|
|
if not post_json_object['object'].get('to'):
|
2019-08-20 20:35:15 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
recipientsObject = post_json_object['object']
|
2020-03-22 21:16:02 +00:00
|
|
|
else:
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object, fieldAdded = \
|
2021-12-28 19:33:29 +00:00
|
|
|
add_to_field('Follow', post_json_object, debug)
|
2019-08-18 16:49:35 +00:00
|
|
|
if not fieldAdded:
|
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object, fieldAdded = \
|
2021-12-28 19:33:29 +00:00
|
|
|
add_to_field('Like', post_json_object, debug)
|
2019-08-18 09:39:12 +00:00
|
|
|
if not fieldAdded:
|
2019-07-16 19:07:45 +00:00
|
|
|
return
|
2021-12-25 22:09:19 +00:00
|
|
|
recipientsObject = post_json_object
|
2019-07-15 18:20:52 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
recipients = []
|
|
|
|
recipientType = ('to', 'cc')
|
2019-07-15 18:20:52 +00:00
|
|
|
for rType in recipientType:
|
2019-08-18 09:39:12 +00:00
|
|
|
if not recipientsObject.get(rType):
|
|
|
|
continue
|
2019-08-18 20:54:33 +00:00
|
|
|
if isinstance(recipientsObject[rType], list):
|
2019-08-18 21:08:38 +00:00
|
|
|
if debug:
|
2019-08-19 08:58:04 +00:00
|
|
|
pprint(recipientsObject)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('recipientsObject: ' + str(recipientsObject[rType]))
|
2019-08-18 20:54:33 +00:00
|
|
|
for address in recipientsObject[rType]:
|
2019-08-18 21:15:09 +00:00
|
|
|
if not address:
|
|
|
|
continue
|
2019-08-18 21:12:37 +00:00
|
|
|
if '/' not in address:
|
|
|
|
continue
|
2019-08-18 20:54:33 +00:00
|
|
|
if address.endswith('#Public'):
|
|
|
|
continue
|
|
|
|
if address.endswith('/followers'):
|
|
|
|
continue
|
|
|
|
recipients.append(address)
|
|
|
|
elif isinstance(recipientsObject[rType], str):
|
2020-04-04 10:05:27 +00:00
|
|
|
address = recipientsObject[rType]
|
2019-08-18 21:15:09 +00:00
|
|
|
if address:
|
|
|
|
if '/' in address:
|
|
|
|
if address.endswith('#Public'):
|
|
|
|
continue
|
|
|
|
if address.endswith('/followers'):
|
|
|
|
continue
|
|
|
|
recipients.append(address)
|
2019-07-15 18:20:52 +00:00
|
|
|
if not recipients:
|
2019-08-18 20:54:33 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: no individual recipients')
|
2019-07-15 18:20:52 +00:00
|
|
|
return
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Sending individually addressed posts: ' +
|
|
|
|
str(recipients))
|
2019-07-15 18:29:30 +00:00
|
|
|
# this is after the message has arrived at the server
|
2021-12-25 20:39:35 +00:00
|
|
|
client_to_server = False
|
2019-07-15 18:20:52 +00:00
|
|
|
for address in recipients:
|
2021-12-27 22:19:18 +00:00
|
|
|
toNickname = get_nickname_from_actor(address)
|
2019-07-15 18:20:52 +00:00
|
|
|
if not toNickname:
|
|
|
|
continue
|
2021-12-27 19:05:25 +00:00
|
|
|
toDomain, toPort = get_domain_from_actor(address)
|
2019-07-15 18:20:52 +00:00
|
|
|
if not toDomain:
|
|
|
|
continue
|
2021-03-18 23:25:27 +00:00
|
|
|
# Don't send profile/actor updates to yourself
|
|
|
|
if isProfileUpdate:
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
|
|
|
toDomainFull = get_full_domain(toDomain, toPort)
|
2021-03-18 23:25:27 +00:00
|
|
|
if nickname == toNickname and \
|
2021-12-26 10:00:46 +00:00
|
|
|
domain_full == toDomainFull:
|
2021-03-18 23:25:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Not sending profile update to self. ' +
|
2021-12-26 10:00:46 +00:00
|
|
|
nickname + '@' + domain_full)
|
2021-03-18 23:25:27 +00:00
|
|
|
continue
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
|
|
|
toDomainFull = get_full_domain(toDomain, toPort)
|
2021-12-26 10:00:46 +00:00
|
|
|
print('DEBUG: Post sending s2s: ' + nickname + '@' + domain_full +
|
2020-04-04 10:05:27 +00:00
|
|
|
' to ' + toNickname + '@' + toDomainFull)
|
2020-03-02 16:11:34 +00:00
|
|
|
|
|
|
|
# if we have an alt onion domain and we are sending to
|
|
|
|
# another onion domain then switch the clearnet
|
|
|
|
# domain for the onion one
|
2020-04-04 10:05:27 +00:00
|
|
|
fromDomain = domain
|
2021-12-26 12:45:03 +00:00
|
|
|
fromDomainFull = get_full_domain(domain, port)
|
2021-12-25 17:09:22 +00:00
|
|
|
fromHttpPrefix = http_prefix
|
2021-12-25 20:43:43 +00:00
|
|
|
if onion_domain:
|
2020-03-02 16:11:34 +00:00
|
|
|
if toDomain.endswith('.onion'):
|
2021-12-25 20:43:43 +00:00
|
|
|
fromDomain = onion_domain
|
|
|
|
fromDomainFull = onion_domain
|
2020-04-04 10:05:27 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2021-12-25 20:50:24 +00:00
|
|
|
elif i2p_domain:
|
2020-06-03 20:21:44 +00:00
|
|
|
if toDomain.endswith('.i2p'):
|
2021-12-25 20:50:24 +00:00
|
|
|
fromDomain = i2p_domain
|
|
|
|
fromDomainFull = i2p_domain
|
2020-06-19 22:50:41 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2020-04-04 10:05:27 +00:00
|
|
|
cc = []
|
2021-07-26 13:12:51 +00:00
|
|
|
|
|
|
|
# if the "to" domain is within the shared items
|
|
|
|
# federation list then send the token for this domain
|
|
|
|
# so that it can request a catalog
|
|
|
|
sharedItemsToken = None
|
2021-12-25 18:05:01 +00:00
|
|
|
if toDomain in shared_items_federated_domains:
|
2021-08-05 16:46:02 +00:00
|
|
|
if sharedItemFederationTokens.get(fromDomainFull):
|
|
|
|
sharedItemsToken = sharedItemFederationTokens[fromDomainFull]
|
2021-07-26 13:12:51 +00:00
|
|
|
|
2021-12-26 17:53:07 +00:00
|
|
|
group_account = has_group_type(base_dir, address, person_cache)
|
2021-07-31 13:19:45 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
send_signed_json(post_json_object, session, base_dir,
|
|
|
|
nickname, fromDomain, port,
|
|
|
|
toNickname, toDomain, toPort,
|
|
|
|
cc, fromHttpPrefix, True, client_to_server,
|
|
|
|
federation_list,
|
|
|
|
send_threads, postLog, cached_webfingers,
|
|
|
|
person_cache, debug, project_version,
|
|
|
|
sharedItemsToken, group_account,
|
|
|
|
signing_priv_key_pem, 34436782)
|
|
|
|
|
|
|
|
|
|
|
|
def send_to_named_addresses_thread(session, base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
onion_domain: str,
|
|
|
|
i2p_domain: str, port: int,
|
|
|
|
http_prefix: str, federation_list: [],
|
|
|
|
send_threads: [], postLog: [],
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
post_json_object: {}, debug: bool,
|
|
|
|
project_version: str,
|
|
|
|
shared_items_federated_domains: [],
|
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signing_priv_key_pem: str):
|
2021-10-20 20:00:09 +00:00
|
|
|
"""Returns a thread used to send a post to named addresses
|
|
|
|
"""
|
|
|
|
sendThread = \
|
2021-12-29 21:55:09 +00:00
|
|
|
thread_with_trace(target=_send_to_named_addresses,
|
2021-12-28 21:36:27 +00:00
|
|
|
args=(session, base_dir,
|
|
|
|
nickname, domain,
|
|
|
|
onion_domain, i2p_domain, port,
|
|
|
|
http_prefix, federation_list,
|
|
|
|
send_threads, postLog,
|
|
|
|
cached_webfingers, person_cache,
|
|
|
|
post_json_object, debug,
|
|
|
|
project_version,
|
|
|
|
shared_items_federated_domains,
|
|
|
|
sharedItemFederationTokens,
|
|
|
|
signing_priv_key_pem), daemon=True)
|
2021-10-20 20:00:09 +00:00
|
|
|
try:
|
|
|
|
sendThread.start()
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
2021-10-20 20:00:09 +00:00
|
|
|
print('WARN: socket error while starting ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
'thread to send to named addresses. ' + str(ex))
|
2021-10-20 20:00:09 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2021-10-20 20:00:09 +00:00
|
|
|
print('WARN: error while starting ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
'thread to send to named addresses. ' + str(ex))
|
2021-10-20 20:00:09 +00:00
|
|
|
return None
|
|
|
|
return sendThread
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _has_shared_inbox(session, http_prefix: str, domain: str,
|
|
|
|
debug: bool, signing_priv_key_pem: str) -> bool:
|
2019-08-26 17:42:06 +00:00
|
|
|
"""Returns true if the given domain has a shared inbox
|
2020-12-18 11:05:31 +00:00
|
|
|
This tries the new and the old way of webfingering the shared inbox
|
2019-08-26 17:42:06 +00:00
|
|
|
"""
|
2021-08-04 17:14:23 +00:00
|
|
|
tryHandles = []
|
|
|
|
if ':' not in domain:
|
|
|
|
tryHandles.append(domain + '@' + domain)
|
|
|
|
tryHandles.append('inbox@' + domain)
|
2020-12-18 11:05:31 +00:00
|
|
|
for handle in tryHandles:
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix, {},
|
|
|
|
domain, __version__, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2020-12-18 11:05:31 +00:00
|
|
|
if wfRequest:
|
|
|
|
if isinstance(wfRequest, dict):
|
|
|
|
if not wfRequest.get('errors'):
|
|
|
|
return True
|
2019-08-26 17:42:06 +00:00
|
|
|
return False
|
2019-11-04 10:43:19 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _sending_profile_update(post_json_object: {}) -> bool:
|
2020-12-18 10:43:19 +00:00
|
|
|
"""Returns true if the given json is a profile update
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['type'] != 'Update':
|
2020-12-18 10:43:19 +00:00
|
|
|
return False
|
2021-12-26 17:12:07 +00:00
|
|
|
if not has_object_stringType(post_json_object, False):
|
2020-12-18 10:43:19 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
activityType = post_json_object['object']['type']
|
2020-12-18 10:43:19 +00:00
|
|
|
if activityType == 'Person' or \
|
|
|
|
activityType == 'Application' or \
|
|
|
|
activityType == 'Group' or \
|
|
|
|
activityType == 'Service':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_to_followers(session, base_dir: str,
|
|
|
|
nickname: str,
|
|
|
|
domain: str,
|
|
|
|
onion_domain: str, i2p_domain: str, port: int,
|
|
|
|
http_prefix: str, federation_list: [],
|
|
|
|
send_threads: [], postLog: [],
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
post_json_object: {}, debug: bool,
|
|
|
|
project_version: str,
|
|
|
|
shared_items_federated_domains: [],
|
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signing_priv_key_pem: str) -> None:
|
2019-07-08 13:30:04 +00:00
|
|
|
"""sends a post to the followers of the given nickname
|
|
|
|
"""
|
2021-12-29 21:55:09 +00:00
|
|
|
print('send_to_followers')
|
2019-07-16 10:19:04 +00:00
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for send_to_followers')
|
2019-07-16 10:19:04 +00:00
|
|
|
return
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _post_is_addressed_to_followers(base_dir, nickname, domain,
|
|
|
|
port, http_prefix,
|
|
|
|
post_json_object):
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
|
|
|
print('Post is not addressed to followers')
|
2019-07-08 13:30:04 +00:00
|
|
|
return
|
2019-08-20 21:04:24 +00:00
|
|
|
print('Post is addressed to followers')
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
grouped = group_followers_by_domain(base_dir, nickname, domain)
|
2019-07-08 13:30:04 +00:00
|
|
|
if not grouped:
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
|
|
|
print('Post to followers did not resolve any domains')
|
2019-07-08 13:30:04 +00:00
|
|
|
return
|
2019-08-20 21:04:24 +00:00
|
|
|
print('Post to followers resolved domains')
|
2021-08-11 12:49:15 +00:00
|
|
|
# print(str(grouped))
|
2019-07-08 13:30:04 +00:00
|
|
|
|
2019-07-15 18:29:30 +00:00
|
|
|
# this is after the message has arrived at the server
|
2021-12-25 20:39:35 +00:00
|
|
|
client_to_server = False
|
2019-07-15 18:20:52 +00:00
|
|
|
|
2019-07-08 13:30:04 +00:00
|
|
|
# for each instance
|
2020-12-18 15:09:41 +00:00
|
|
|
sendingStartTime = datetime.datetime.utcnow()
|
|
|
|
print('Sending post to followers begins ' +
|
|
|
|
sendingStartTime.strftime("%Y-%m-%dT%H:%M:%SZ"))
|
|
|
|
sendingCtr = 0
|
2020-04-04 10:05:27 +00:00
|
|
|
for followerDomain, followerHandles in grouped.items():
|
2020-12-18 15:09:41 +00:00
|
|
|
print('Sending post to followers progress ' +
|
2020-12-18 19:59:19 +00:00
|
|
|
str(int(sendingCtr * 100 / len(grouped.items()))) + '% ' +
|
|
|
|
followerDomain)
|
2020-12-18 15:09:41 +00:00
|
|
|
sendingCtr += 1
|
|
|
|
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
pprint(followerHandles)
|
2020-06-23 09:46:38 +00:00
|
|
|
|
2021-07-26 13:12:51 +00:00
|
|
|
# if the followers domain is within the shared items
|
|
|
|
# federation list then send the token for this domain
|
|
|
|
# so that it can request a catalog
|
|
|
|
sharedItemsToken = None
|
2021-12-25 18:05:01 +00:00
|
|
|
if followerDomain in shared_items_federated_domains:
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:00:46 +00:00
|
|
|
if sharedItemFederationTokens.get(domain_full):
|
|
|
|
sharedItemsToken = sharedItemFederationTokens[domain_full]
|
2021-07-26 13:12:51 +00:00
|
|
|
|
2020-06-23 09:46:38 +00:00
|
|
|
# check that the follower's domain is active
|
2021-12-25 17:09:22 +00:00
|
|
|
followerDomainUrl = http_prefix + '://' + followerDomain
|
2021-12-29 21:55:09 +00:00
|
|
|
if not site_is_active(followerDomainUrl, 10):
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers domain is inactive: ' +
|
|
|
|
followerDomainUrl)
|
2020-06-23 09:46:38 +00:00
|
|
|
continue
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers domain is active: ' +
|
|
|
|
followerDomainUrl)
|
2020-06-23 09:46:38 +00:00
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
withSharedInbox = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_has_shared_inbox(session, http_prefix, followerDomain, debug,
|
|
|
|
signing_priv_key_pem)
|
2019-08-26 17:42:06 +00:00
|
|
|
if debug:
|
2019-08-26 17:44:21 +00:00
|
|
|
if withSharedInbox:
|
2020-04-04 10:05:27 +00:00
|
|
|
print(followerDomain + ' has shared inbox')
|
2020-12-18 11:05:31 +00:00
|
|
|
if not withSharedInbox:
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers, ' + followerDomain +
|
|
|
|
' does not have a shared inbox')
|
2019-08-26 17:42:06 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
toPort = port
|
|
|
|
index = 0
|
|
|
|
toDomain = followerHandles[index].split('@')[1]
|
2019-07-08 13:30:04 +00:00
|
|
|
if ':' in toDomain:
|
2021-12-26 18:14:21 +00:00
|
|
|
toPort = get_port_from_domain(toDomain)
|
2021-12-26 18:17:37 +00:00
|
|
|
toDomain = remove_domain_port(toDomain)
|
2019-08-22 19:47:10 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
cc = ''
|
2019-11-07 20:51:29 +00:00
|
|
|
|
2020-03-02 16:23:30 +00:00
|
|
|
# if we are sending to an onion domain and we
|
|
|
|
# have an alt onion domain then use the alt
|
2020-04-04 10:05:27 +00:00
|
|
|
fromDomain = domain
|
2021-12-25 17:09:22 +00:00
|
|
|
fromHttpPrefix = http_prefix
|
2021-12-25 20:43:43 +00:00
|
|
|
if onion_domain:
|
2020-03-02 19:31:41 +00:00
|
|
|
if toDomain.endswith('.onion'):
|
2021-12-25 20:43:43 +00:00
|
|
|
fromDomain = onion_domain
|
2020-04-04 10:05:27 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2021-12-25 20:50:24 +00:00
|
|
|
elif i2p_domain:
|
2020-06-03 20:21:44 +00:00
|
|
|
if toDomain.endswith('.i2p'):
|
2021-12-25 20:50:24 +00:00
|
|
|
fromDomain = i2p_domain
|
2020-06-19 22:50:41 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2020-03-02 16:23:30 +00:00
|
|
|
|
2019-11-07 21:12:53 +00:00
|
|
|
if withSharedInbox:
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = followerHandles[index].split('@')[0]
|
2019-08-26 17:42:06 +00:00
|
|
|
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = False
|
2021-07-31 13:19:45 +00:00
|
|
|
if toNickname.startswith('!'):
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = True
|
2021-07-31 13:19:45 +00:00
|
|
|
toNickname = toNickname[1:]
|
|
|
|
|
2019-08-26 17:42:06 +00:00
|
|
|
# if there are more than one followers on the domain
|
|
|
|
# then send the post to the shared inbox
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(followerHandles) > 1:
|
|
|
|
toNickname = 'inbox'
|
2019-08-26 17:42:06 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if toNickname != 'inbox' and post_json_object.get('type'):
|
2021-12-29 21:55:09 +00:00
|
|
|
if _sending_profile_update(post_json_object):
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers ' +
|
2020-12-18 10:43:19 +00:00
|
|
|
'shared inbox of ' + toDomain)
|
|
|
|
toNickname = 'inbox'
|
2020-02-05 11:46:05 +00:00
|
|
|
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers from ' +
|
|
|
|
nickname + '@' + domain +
|
|
|
|
' to ' + toNickname + '@' + toDomain)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
send_signed_json(post_json_object, session, base_dir,
|
|
|
|
nickname, fromDomain, port,
|
|
|
|
toNickname, toDomain, toPort,
|
|
|
|
cc, fromHttpPrefix, True, client_to_server,
|
|
|
|
federation_list,
|
|
|
|
send_threads, postLog, cached_webfingers,
|
|
|
|
person_cache, debug, project_version,
|
|
|
|
sharedItemsToken, group_account,
|
|
|
|
signing_priv_key_pem, 639342)
|
2019-08-26 17:42:06 +00:00
|
|
|
else:
|
|
|
|
# send to individual followers without using a shared inbox
|
|
|
|
for handle in followerHandles:
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers ' + handle)
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = handle.split('@')[0]
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = False
|
2021-07-31 13:19:45 +00:00
|
|
|
if toNickname.startswith('!'):
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = True
|
2021-07-31 13:19:45 +00:00
|
|
|
toNickname = toNickname[1:]
|
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['type'] != 'Update':
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers from ' +
|
|
|
|
nickname + '@' + domain + ' to ' +
|
|
|
|
toNickname + '@' + toDomain)
|
|
|
|
else:
|
|
|
|
print('Sending post to followers profile update from ' +
|
|
|
|
nickname + '@' + domain + ' to ' +
|
|
|
|
toNickname + '@' + toDomain)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
send_signed_json(post_json_object, session, base_dir,
|
|
|
|
nickname, fromDomain, port,
|
|
|
|
toNickname, toDomain, toPort,
|
|
|
|
cc, fromHttpPrefix, True, client_to_server,
|
|
|
|
federation_list,
|
|
|
|
send_threads, postLog, cached_webfingers,
|
|
|
|
person_cache, debug, project_version,
|
|
|
|
sharedItemsToken, group_account,
|
|
|
|
signing_priv_key_pem, 634219)
|
2020-02-05 11:46:05 +00:00
|
|
|
|
2019-11-07 20:51:29 +00:00
|
|
|
time.sleep(4)
|
2019-11-04 10:43:19 +00:00
|
|
|
|
2019-11-07 21:16:40 +00:00
|
|
|
if debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('DEBUG: End of send_to_followers')
|
2019-11-07 21:16:40 +00:00
|
|
|
|
2020-12-18 15:09:41 +00:00
|
|
|
sendingEndTime = datetime.datetime.utcnow()
|
|
|
|
sendingMins = int((sendingEndTime - sendingStartTime).total_seconds() / 60)
|
|
|
|
print('Sending post to followers ends ' + str(sendingMins) + ' mins')
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_to_followers_thread(session, base_dir: str,
|
|
|
|
nickname: str,
|
|
|
|
domain: str,
|
|
|
|
onion_domain: str, i2p_domain: str, port: int,
|
|
|
|
http_prefix: str, federation_list: [],
|
|
|
|
send_threads: [], postLog: [],
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
post_json_object: {}, debug: bool,
|
|
|
|
project_version: str,
|
|
|
|
shared_items_federated_domains: [],
|
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signing_priv_key_pem: str):
|
2019-11-04 10:43:19 +00:00
|
|
|
"""Returns a thread used to send a post to followers
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
sendThread = \
|
2021-12-29 21:55:09 +00:00
|
|
|
thread_with_trace(target=send_to_followers,
|
2021-12-28 21:36:27 +00:00
|
|
|
args=(session, base_dir,
|
|
|
|
nickname, domain,
|
|
|
|
onion_domain, i2p_domain, port,
|
|
|
|
http_prefix, federation_list,
|
|
|
|
send_threads, postLog,
|
|
|
|
cached_webfingers, person_cache,
|
|
|
|
post_json_object.copy(), debug,
|
|
|
|
project_version,
|
|
|
|
shared_items_federated_domains,
|
|
|
|
sharedItemFederationTokens,
|
|
|
|
signing_priv_key_pem), daemon=True)
|
2020-06-23 14:48:10 +00:00
|
|
|
try:
|
|
|
|
sendThread.start()
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
2020-06-23 14:48:10 +00:00
|
|
|
print('WARN: socket error while starting ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
'thread to send to followers. ' + str(ex))
|
2020-06-23 14:48:10 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2020-06-23 14:48:10 +00:00
|
|
|
print('WARN: error while starting ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
'thread to send to followers. ' + str(ex))
|
2020-06-23 14:48:10 +00:00
|
|
|
return None
|
2019-11-04 10:43:19 +00:00
|
|
|
return sendThread
|
2019-07-08 13:30:04 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_inbox(recent_posts_cache: {},
|
|
|
|
session, base_dir: str, nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str, itemsPerPage: int, headerOnly: bool,
|
|
|
|
pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed(recent_posts_cache,
|
|
|
|
session, base_dir, 'inbox',
|
|
|
|
nickname, domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_bookmarks_timeline(session, base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed({}, session, base_dir, 'tlbookmarks',
|
|
|
|
nickname, domain,
|
|
|
|
port, http_prefix, itemsPerPage, headerOnly,
|
|
|
|
True, 0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_dm_timeline(recent_posts_cache: {},
|
|
|
|
session, base_dir: str, nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed(recent_posts_cache,
|
|
|
|
session, base_dir, 'dm', nickname,
|
|
|
|
domain, port, http_prefix, itemsPerPage,
|
|
|
|
headerOnly, True, 0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_replies_timeline(recent_posts_cache: {},
|
|
|
|
session, base_dir: str, nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed(recent_posts_cache, session,
|
|
|
|
base_dir, 'tlreplies',
|
|
|
|
nickname, domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-11-27 12:29:20 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_blogs_timeline(session, base_dir: str, nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed({}, session, base_dir, 'tlblogs', nickname,
|
|
|
|
domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_features_timeline(session, base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed({}, session, base_dir, 'tlfeatures', nickname,
|
|
|
|
domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def create_media_timeline(session, base_dir: str, nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed({}, session, base_dir, 'tlmedia', nickname,
|
|
|
|
domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
|
|
|
|
|
|
|
|
|
|
|
def create_news_timeline(session, base_dir: str, nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str, itemsPerPage: int,
|
|
|
|
headerOnly: bool, newswire_votes_threshold: int,
|
|
|
|
positive_voting: bool, voting_time_mins: int,
|
|
|
|
pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed({}, session, base_dir, 'outbox', 'news',
|
|
|
|
domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
newswire_votes_threshold, positive_voting,
|
|
|
|
voting_time_mins, pageNumber)
|
|
|
|
|
|
|
|
|
|
|
|
def create_outbox(session, base_dir: str, nickname: str, domain: str,
|
|
|
|
port: int, http_prefix: str,
|
|
|
|
itemsPerPage: int, headerOnly: bool, authorized: bool,
|
|
|
|
pageNumber: int) -> {}:
|
|
|
|
return _create_box_indexed({}, session, base_dir, 'outbox',
|
|
|
|
nickname, domain, port, http_prefix,
|
|
|
|
itemsPerPage, headerOnly, authorized,
|
|
|
|
0, False, 0, pageNumber)
|
|
|
|
|
|
|
|
|
|
|
|
def create_moderation(base_dir: str, nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str, itemsPerPage: int, headerOnly: bool,
|
|
|
|
pageNumber: int) -> {}:
|
2021-12-27 19:26:54 +00:00
|
|
|
boxDir = create_person_dir(nickname, domain, base_dir, 'inbox')
|
2020-04-04 10:05:27 +00:00
|
|
|
boxname = 'moderation'
|
2019-08-12 13:22:17 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
domain = get_full_domain(domain, port)
|
2019-08-12 13:22:17 +00:00
|
|
|
|
|
|
|
if not pageNumber:
|
2020-04-04 10:05:27 +00:00
|
|
|
pageNumber = 1
|
2019-11-16 22:09:54 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
pageStr = '?page=' + str(pageNumber)
|
2021-12-26 10:19:59 +00:00
|
|
|
boxUrl = local_actor_url(http_prefix, nickname, domain) + '/' + boxname
|
2020-04-04 10:05:27 +00:00
|
|
|
boxHeader = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2021-03-23 19:14:49 +00:00
|
|
|
'first': boxUrl + '?page=true',
|
2020-04-04 10:05:27 +00:00
|
|
|
'id': boxUrl,
|
2021-03-23 19:14:49 +00:00
|
|
|
'last': boxUrl + '?page=true',
|
2020-03-22 20:36:19 +00:00
|
|
|
'totalItems': 0,
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
boxItems = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2021-03-23 19:14:49 +00:00
|
|
|
'id': boxUrl + pageStr,
|
2020-03-22 20:36:19 +00:00
|
|
|
'orderedItems': [
|
|
|
|
],
|
2020-04-04 10:05:27 +00:00
|
|
|
'partOf': boxUrl,
|
2020-03-22 20:36:19 +00:00
|
|
|
'type': 'OrderedCollectionPage'
|
|
|
|
}
|
2019-08-12 13:22:17 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
if is_moderator(base_dir, nickname):
|
2021-12-25 16:17:53 +00:00
|
|
|
moderationIndexFile = base_dir + '/accounts/moderation.txt'
|
2019-08-12 13:22:17 +00:00
|
|
|
if os.path.isfile(moderationIndexFile):
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(moderationIndexFile, 'r') as f:
|
2020-04-04 10:05:27 +00:00
|
|
|
lines = f.readlines()
|
|
|
|
boxHeader['totalItems'] = len(lines)
|
2019-08-12 13:22:17 +00:00
|
|
|
if headerOnly:
|
|
|
|
return boxHeader
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
pageLines = []
|
|
|
|
if len(lines) > 0:
|
|
|
|
endLineNumber = len(lines) - 1 - int(itemsPerPage * pageNumber)
|
|
|
|
if endLineNumber < 0:
|
|
|
|
endLineNumber = 0
|
|
|
|
startLineNumber = \
|
|
|
|
len(lines) - 1 - int(itemsPerPage * (pageNumber - 1))
|
|
|
|
if startLineNumber < 0:
|
|
|
|
startLineNumber = 0
|
|
|
|
lineNumber = startLineNumber
|
|
|
|
while lineNumber >= endLineNumber:
|
2020-05-22 11:32:38 +00:00
|
|
|
pageLines.append(lines[lineNumber].strip('\n').strip('\r'))
|
2020-04-04 10:05:27 +00:00
|
|
|
lineNumber -= 1
|
2019-11-16 22:09:54 +00:00
|
|
|
|
2019-08-12 13:22:17 +00:00
|
|
|
for postUrl in pageLines:
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = \
|
2020-04-04 10:05:27 +00:00
|
|
|
boxDir + '/' + postUrl.replace('/', '#') + '.json'
|
2021-12-26 23:41:34 +00:00
|
|
|
if os.path.isfile(post_filename):
|
|
|
|
post_json_object = load_json(post_filename)
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object:
|
|
|
|
boxItems['orderedItems'].append(post_json_object)
|
2019-09-17 12:14:36 +00:00
|
|
|
|
2019-08-12 13:22:17 +00:00
|
|
|
if headerOnly:
|
|
|
|
return boxHeader
|
|
|
|
return boxItems
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def is_image_media(session, base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
post_json_object: {}, translate: {},
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
recent_posts_cache: {}, debug: bool,
|
|
|
|
system_language: str,
|
|
|
|
domain_full: str, person_cache: {},
|
|
|
|
signing_priv_key_pem: str) -> bool:
|
2019-09-28 11:29:42 +00:00
|
|
|
"""Returns true if the given post has attached image media
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['type'] == 'Announce':
|
2021-09-13 11:34:56 +00:00
|
|
|
blockedCache = {}
|
2021-12-29 21:55:09 +00:00
|
|
|
post_jsonAnnounce = \
|
|
|
|
download_announce(session, base_dir, http_prefix,
|
|
|
|
nickname, domain, post_json_object,
|
|
|
|
__version__, translate,
|
|
|
|
yt_replace_domain,
|
|
|
|
twitter_replacement_domain,
|
|
|
|
allow_local_network_access,
|
|
|
|
recent_posts_cache, debug,
|
|
|
|
system_language,
|
|
|
|
domain_full, person_cache,
|
|
|
|
signing_priv_key_pem,
|
|
|
|
blockedCache)
|
|
|
|
if post_jsonAnnounce:
|
|
|
|
post_json_object = post_jsonAnnounce
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['type'] != 'Create':
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object'].get('moderationStatus'):
|
2019-11-16 22:20:16 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['object']['type'] != 'Note' and \
|
|
|
|
post_json_object['object']['type'] != 'Page' and \
|
|
|
|
post_json_object['object']['type'] != 'Event' and \
|
|
|
|
post_json_object['object']['type'] != 'Article':
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('attachment'):
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
if not isinstance(post_json_object['object']['attachment'], list):
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2021-12-25 22:09:19 +00:00
|
|
|
for attach in post_json_object['object']['attachment']:
|
2019-09-28 11:29:42 +00:00
|
|
|
if attach.get('mediaType') and attach.get('url'):
|
2019-10-23 13:24:09 +00:00
|
|
|
if attach['mediaType'].startswith('image/') or \
|
|
|
|
attach['mediaType'].startswith('audio/') or \
|
|
|
|
attach['mediaType'].startswith('video/'):
|
2019-09-28 11:29:42 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _add_post_string_to_timeline(postStr: str, boxname: str,
|
|
|
|
postsInBox: [], boxActor: str) -> bool:
|
2019-11-18 11:28:17 +00:00
|
|
|
""" is this a valid timeline post?
|
|
|
|
"""
|
2020-05-03 13:18:35 +00:00
|
|
|
# must be a recognized ActivityPub type
|
2020-04-04 10:05:27 +00:00
|
|
|
if ('"Note"' in postStr or
|
2020-08-05 12:24:09 +00:00
|
|
|
'"EncryptedMessage"' in postStr or
|
2020-08-26 12:12:43 +00:00
|
|
|
'"Event"' in postStr or
|
2020-04-04 10:05:27 +00:00
|
|
|
'"Article"' in postStr or
|
2020-05-03 12:52:13 +00:00
|
|
|
'"Patch"' in postStr or
|
2020-04-04 10:05:27 +00:00
|
|
|
'"Announce"' in postStr or
|
|
|
|
('"Question"' in postStr and
|
|
|
|
('"Create"' in postStr or '"Update"' in postStr))):
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname == 'dm':
|
2019-11-24 12:12:29 +00:00
|
|
|
if '#Public' in postStr or '/followers' in postStr:
|
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
elif boxname == 'tlreplies':
|
2019-11-24 12:12:29 +00:00
|
|
|
if boxActor not in postStr:
|
|
|
|
return False
|
2020-11-27 14:17:00 +00:00
|
|
|
elif (boxname == 'tlblogs' or
|
|
|
|
boxname == 'tlnews' or
|
|
|
|
boxname == 'tlfeatures'):
|
2020-02-24 14:39:25 +00:00
|
|
|
if '"Create"' not in postStr:
|
|
|
|
return False
|
|
|
|
if '"Article"' not in postStr:
|
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
elif boxname == 'tlmedia':
|
2019-11-24 12:12:29 +00:00
|
|
|
if '"Create"' in postStr:
|
2021-03-07 12:17:06 +00:00
|
|
|
if ('mediaType' not in postStr or
|
2021-03-07 10:58:18 +00:00
|
|
|
('image/' not in postStr and
|
|
|
|
'video/' not in postStr and
|
|
|
|
'audio/' not in postStr)):
|
2019-11-18 11:28:17 +00:00
|
|
|
return False
|
2019-11-24 12:12:29 +00:00
|
|
|
# add the post to the dictionary
|
|
|
|
postsInBox.append(postStr)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _add_post_to_timeline(filePath: str, boxname: str,
|
|
|
|
postsInBox: [], boxActor: str) -> bool:
|
2019-11-24 12:12:29 +00:00
|
|
|
""" Reads a post from file and decides whether it is valid
|
|
|
|
"""
|
2021-06-21 22:52:04 +00:00
|
|
|
with open(filePath, 'r') as postFile:
|
|
|
|
postStr = postFile.read()
|
|
|
|
|
2020-11-28 19:39:37 +00:00
|
|
|
if filePath.endswith('.json'):
|
|
|
|
repliesFilename = filePath.replace('.json', '.replies')
|
|
|
|
if os.path.isfile(repliesFilename):
|
|
|
|
# append a replies identifier, which will later be removed
|
|
|
|
postStr += '<hasReplies>'
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
return _add_post_string_to_timeline(postStr, boxname, postsInBox,
|
|
|
|
boxActor)
|
2019-11-18 11:28:17 +00:00
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def remove_post_interactions(post_json_object: {}, force: bool) -> bool:
|
2021-04-30 09:48:39 +00:00
|
|
|
""" Don't show likes, replies, bookmarks, DMs or shares (announces) to
|
|
|
|
unauthorized viewers. This makes the timeline less useful to
|
|
|
|
marketers and other surveillance-oriented organizations.
|
|
|
|
Returns False if this is a private post
|
|
|
|
"""
|
2021-04-30 11:45:46 +00:00
|
|
|
hasObject = False
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(post_json_object):
|
2021-06-22 15:45:59 +00:00
|
|
|
hasObject = True
|
2021-04-30 11:45:46 +00:00
|
|
|
if hasObject:
|
2021-12-25 22:09:19 +00:00
|
|
|
postObj = post_json_object['object']
|
2021-04-30 11:45:46 +00:00
|
|
|
if not force:
|
|
|
|
# If not authorized and it's a private post
|
|
|
|
# then just don't show it within timelines
|
2021-12-28 14:41:10 +00:00
|
|
|
if not is_public_post(post_json_object):
|
2021-04-30 11:45:46 +00:00
|
|
|
return False
|
|
|
|
else:
|
2021-12-25 22:09:19 +00:00
|
|
|
postObj = post_json_object
|
2021-04-30 11:45:46 +00:00
|
|
|
|
2021-04-30 09:48:39 +00:00
|
|
|
# clear the likes
|
2021-04-30 11:45:46 +00:00
|
|
|
if postObj.get('likes'):
|
|
|
|
postObj['likes'] = {
|
|
|
|
'items': []
|
|
|
|
}
|
2021-11-10 12:16:03 +00:00
|
|
|
# clear the reactions
|
|
|
|
if postObj.get('reactions'):
|
|
|
|
postObj['reactions'] = {
|
|
|
|
'items': []
|
|
|
|
}
|
2021-04-30 09:48:39 +00:00
|
|
|
# remove other collections
|
|
|
|
removeCollections = (
|
|
|
|
'replies', 'shares', 'bookmarks', 'ignores'
|
|
|
|
)
|
|
|
|
for removeName in removeCollections:
|
2021-04-30 11:45:46 +00:00
|
|
|
if postObj.get(removeName):
|
|
|
|
postObj[removeName] = {}
|
2021-04-30 09:48:39 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _passed_newswire_voting(newswire_votes_threshold: int,
|
|
|
|
base_dir: str, domain: str,
|
|
|
|
post_filename: str,
|
|
|
|
positive_voting: bool,
|
|
|
|
voting_time_mins: int) -> bool:
|
2021-04-30 13:24:33 +00:00
|
|
|
"""Returns true if the post has passed through newswire voting
|
|
|
|
"""
|
|
|
|
# apply votes within this timeline
|
2021-12-25 20:17:35 +00:00
|
|
|
if newswire_votes_threshold <= 0:
|
2021-04-30 13:24:33 +00:00
|
|
|
return True
|
|
|
|
# note that the presence of an arrival file also indicates
|
|
|
|
# that this post is moderated
|
|
|
|
arrivalDate = \
|
2021-12-27 22:46:10 +00:00
|
|
|
locate_news_arrival(base_dir, domain, post_filename)
|
2021-04-30 13:24:33 +00:00
|
|
|
if not arrivalDate:
|
|
|
|
return True
|
|
|
|
# how long has elapsed since this post arrived?
|
|
|
|
currDate = datetime.datetime.utcnow()
|
|
|
|
timeDiffMins = \
|
|
|
|
int((currDate - arrivalDate).total_seconds() / 60)
|
|
|
|
# has the voting time elapsed?
|
2021-12-25 20:12:07 +00:00
|
|
|
if timeDiffMins < voting_time_mins:
|
2021-04-30 13:24:33 +00:00
|
|
|
# voting is still happening, so don't add this
|
|
|
|
# post to the timeline
|
|
|
|
return False
|
|
|
|
# if there a votes file for this post?
|
|
|
|
votesFilename = \
|
2021-12-27 22:38:48 +00:00
|
|
|
locate_news_votes(base_dir, domain, post_filename)
|
2021-04-30 13:24:33 +00:00
|
|
|
if not votesFilename:
|
|
|
|
return True
|
|
|
|
# load the votes file and count the votes
|
2021-12-26 15:13:34 +00:00
|
|
|
votesJson = load_json(votesFilename, 0, 2)
|
2021-04-30 13:24:33 +00:00
|
|
|
if not votesJson:
|
|
|
|
return True
|
2021-12-25 20:14:45 +00:00
|
|
|
if not positive_voting:
|
2021-12-27 22:32:59 +00:00
|
|
|
if votes_on_newswire_item(votesJson) >= \
|
2021-12-25 20:17:35 +00:00
|
|
|
newswire_votes_threshold:
|
2021-04-30 13:24:33 +00:00
|
|
|
# Too many veto votes.
|
|
|
|
# Continue without incrementing
|
|
|
|
# the posts counter
|
|
|
|
return False
|
|
|
|
else:
|
2021-12-27 22:32:59 +00:00
|
|
|
if votes_on_newswire_item < \
|
2021-12-25 20:17:35 +00:00
|
|
|
newswire_votes_threshold:
|
2021-04-30 13:24:33 +00:00
|
|
|
# Not enough votes.
|
|
|
|
# Continue without incrementing
|
|
|
|
# the posts counter
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _create_box_indexed(recent_posts_cache: {},
|
|
|
|
session, base_dir: str, boxname: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
itemsPerPage: int, headerOnly: bool, authorized: bool,
|
|
|
|
newswire_votes_threshold: int, positive_voting: bool,
|
|
|
|
voting_time_mins: int, pageNumber: int) -> {}:
|
2019-11-18 11:28:17 +00:00
|
|
|
"""Constructs the box feed for a person with the given nickname
|
|
|
|
"""
|
|
|
|
if not authorized or not pageNumber:
|
2020-04-04 10:05:27 +00:00
|
|
|
pageNumber = 1
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'dm' and \
|
|
|
|
boxname != 'tlreplies' and boxname != 'tlmedia' and \
|
2020-10-07 09:10:42 +00:00
|
|
|
boxname != 'tlblogs' and boxname != 'tlnews' and \
|
2020-11-27 14:17:00 +00:00
|
|
|
boxname != 'tlfeatures' and \
|
2020-05-21 20:48:51 +00:00
|
|
|
boxname != 'outbox' and boxname != 'tlbookmarks' and \
|
2021-07-01 21:30:36 +00:00
|
|
|
boxname != 'bookmarks':
|
2021-08-01 16:17:36 +00:00
|
|
|
print('ERROR: invalid boxname ' + boxname)
|
2019-11-18 11:28:17 +00:00
|
|
|
return None
|
|
|
|
|
2020-08-23 11:13:35 +00:00
|
|
|
# bookmarks and events timelines are like the inbox
|
|
|
|
# but have their own separate index
|
2020-04-04 10:05:27 +00:00
|
|
|
indexBoxName = boxname
|
2020-11-28 13:04:30 +00:00
|
|
|
timelineNickname = nickname
|
2020-05-21 22:02:27 +00:00
|
|
|
if boxname == "tlbookmarks":
|
|
|
|
boxname = "bookmarks"
|
|
|
|
indexBoxName = boxname
|
2020-11-27 14:17:00 +00:00
|
|
|
elif boxname == "tlfeatures":
|
|
|
|
boxname = "tlblogs"
|
|
|
|
indexBoxName = boxname
|
2020-11-28 13:04:30 +00:00
|
|
|
timelineNickname = 'news'
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2021-08-01 19:19:45 +00:00
|
|
|
originalDomain = domain
|
2021-12-26 12:45:03 +00:00
|
|
|
domain = get_full_domain(domain, port)
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2021-12-26 10:19:59 +00:00
|
|
|
boxActor = local_actor_url(http_prefix, nickname, domain)
|
2020-01-19 20:19:56 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
pageStr = '?page=true'
|
2019-11-18 11:28:17 +00:00
|
|
|
if pageNumber:
|
2020-05-21 21:43:33 +00:00
|
|
|
if pageNumber < 1:
|
|
|
|
pageNumber = 1
|
2019-11-18 11:28:17 +00:00
|
|
|
try:
|
2020-04-04 10:05:27 +00:00
|
|
|
pageStr = '?page=' + str(pageNumber)
|
|
|
|
except BaseException:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _create_box_indexed ' +
|
2021-10-29 18:48:15 +00:00
|
|
|
'unable to convert page number to string')
|
2019-11-18 11:28:17 +00:00
|
|
|
pass
|
2021-12-26 10:19:59 +00:00
|
|
|
boxUrl = local_actor_url(http_prefix, nickname, domain) + '/' + boxname
|
2020-04-04 10:05:27 +00:00
|
|
|
boxHeader = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2020-05-21 21:43:33 +00:00
|
|
|
'first': boxUrl + '?page=true',
|
2020-04-04 10:05:27 +00:00
|
|
|
'id': boxUrl,
|
2020-05-21 21:43:33 +00:00
|
|
|
'last': boxUrl + '?page=true',
|
2020-03-22 20:36:19 +00:00
|
|
|
'totalItems': 0,
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
boxItems = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2020-05-21 21:43:33 +00:00
|
|
|
'id': boxUrl + pageStr,
|
2020-03-22 20:36:19 +00:00
|
|
|
'orderedItems': [
|
|
|
|
],
|
2020-04-04 10:05:27 +00:00
|
|
|
'partOf': boxUrl,
|
2020-03-22 20:36:19 +00:00
|
|
|
'type': 'OrderedCollectionPage'
|
|
|
|
}
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBox = []
|
2021-09-06 08:48:58 +00:00
|
|
|
postUrlsInBox = []
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
indexFilename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
acct_dir(base_dir, timelineNickname, originalDomain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/' + indexBoxName + '.index'
|
2021-04-30 09:17:22 +00:00
|
|
|
totalPostsCount = 0
|
2021-03-06 20:00:24 +00:00
|
|
|
postsAddedToTimeline = 0
|
2019-11-18 11:28:17 +00:00
|
|
|
if os.path.isfile(indexFilename):
|
|
|
|
with open(indexFilename, 'r') as indexFile:
|
2021-03-05 15:01:58 +00:00
|
|
|
postsAddedToTimeline = 0
|
|
|
|
while postsAddedToTimeline < itemsPerPage:
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = indexFile.readline()
|
2019-11-18 12:40:38 +00:00
|
|
|
|
2021-12-26 23:41:34 +00:00
|
|
|
if not post_filename:
|
2020-05-21 21:53:12 +00:00
|
|
|
break
|
2019-11-18 15:04:08 +00:00
|
|
|
|
2021-04-30 13:24:33 +00:00
|
|
|
# Has this post passed through the newswire voting stage?
|
2021-12-29 21:55:09 +00:00
|
|
|
if not _passed_newswire_voting(newswire_votes_threshold,
|
|
|
|
base_dir, domain,
|
|
|
|
post_filename,
|
|
|
|
positive_voting,
|
|
|
|
voting_time_mins):
|
2021-04-30 13:24:33 +00:00
|
|
|
continue
|
2020-10-08 19:47:23 +00:00
|
|
|
|
2019-11-18 12:40:38 +00:00
|
|
|
# Skip through any posts previous to the current page
|
2021-04-30 09:17:22 +00:00
|
|
|
if totalPostsCount < int((pageNumber - 1) * itemsPerPage):
|
|
|
|
totalPostsCount += 1
|
2019-11-18 11:28:17 +00:00
|
|
|
continue
|
|
|
|
|
2019-11-18 12:54:41 +00:00
|
|
|
# if this is a full path then remove the directories
|
2021-12-26 23:41:34 +00:00
|
|
|
if '/' in post_filename:
|
|
|
|
post_filename = post_filename.split('/')[-1]
|
2019-11-18 12:54:41 +00:00
|
|
|
|
2019-11-18 11:28:17 +00:00
|
|
|
# filename of the post without any extension or path
|
2020-04-04 10:05:27 +00:00
|
|
|
# This should also correspond to any index entry in
|
|
|
|
# the posts cache
|
|
|
|
postUrl = \
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename.replace('\n', '').replace('\r', '')
|
2020-05-22 11:32:38 +00:00
|
|
|
postUrl = postUrl.replace('.json', '').strip()
|
2019-11-24 12:12:29 +00:00
|
|
|
|
2021-09-06 08:52:34 +00:00
|
|
|
if postUrl in postUrlsInBox:
|
2021-09-06 08:48:58 +00:00
|
|
|
continue
|
|
|
|
|
2019-11-25 10:10:59 +00:00
|
|
|
# is the post cached in memory?
|
2021-12-26 20:01:37 +00:00
|
|
|
if recent_posts_cache.get('index'):
|
|
|
|
if postUrl in recent_posts_cache['index']:
|
|
|
|
if recent_posts_cache['json'].get(postUrl):
|
|
|
|
url = recent_posts_cache['json'][postUrl]
|
2021-12-29 21:55:09 +00:00
|
|
|
if _add_post_string_to_timeline(url,
|
|
|
|
boxname,
|
|
|
|
postsInBox,
|
|
|
|
boxActor):
|
2021-09-06 08:50:44 +00:00
|
|
|
totalPostsCount += 1
|
|
|
|
postsAddedToTimeline += 1
|
2021-09-06 08:52:34 +00:00
|
|
|
postUrlsInBox.append(postUrl)
|
2021-09-06 08:50:44 +00:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
print('Post not added to timeline')
|
2020-05-21 19:28:09 +00:00
|
|
|
|
|
|
|
# read the post from file
|
|
|
|
fullPostFilename = \
|
2021-12-26 20:36:08 +00:00
|
|
|
locate_post(base_dir, nickname,
|
|
|
|
originalDomain, postUrl, False)
|
2020-05-21 19:28:09 +00:00
|
|
|
if fullPostFilename:
|
2021-03-05 19:29:09 +00:00
|
|
|
# has the post been rejected?
|
|
|
|
if os.path.isfile(fullPostFilename + '.reject'):
|
|
|
|
continue
|
2021-03-05 19:37:58 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if _add_post_to_timeline(fullPostFilename, boxname,
|
|
|
|
postsInBox, boxActor):
|
2021-03-05 15:18:32 +00:00
|
|
|
postsAddedToTimeline += 1
|
2021-04-30 09:17:22 +00:00
|
|
|
totalPostsCount += 1
|
2021-09-06 08:52:34 +00:00
|
|
|
postUrlsInBox.append(postUrl)
|
2021-04-30 09:24:56 +00:00
|
|
|
else:
|
|
|
|
print('WARN: Unable to add post ' + postUrl +
|
|
|
|
' nickname ' + nickname +
|
|
|
|
' timeline ' + boxname)
|
2020-05-21 19:28:09 +00:00
|
|
|
else:
|
2020-11-28 13:04:30 +00:00
|
|
|
if timelineNickname != nickname:
|
2021-02-17 14:01:45 +00:00
|
|
|
# if this is the features timeline
|
2020-11-28 13:04:30 +00:00
|
|
|
fullPostFilename = \
|
2021-12-26 20:36:08 +00:00
|
|
|
locate_post(base_dir, timelineNickname,
|
|
|
|
originalDomain, postUrl, False)
|
2020-11-28 13:04:30 +00:00
|
|
|
if fullPostFilename:
|
2021-12-29 21:55:09 +00:00
|
|
|
if _add_post_to_timeline(fullPostFilename, boxname,
|
|
|
|
postsInBox, boxActor):
|
2021-03-05 15:18:32 +00:00
|
|
|
postsAddedToTimeline += 1
|
2021-04-30 09:17:22 +00:00
|
|
|
totalPostsCount += 1
|
2021-09-06 08:52:34 +00:00
|
|
|
postUrlsInBox.append(postUrl)
|
2021-04-30 09:24:56 +00:00
|
|
|
else:
|
|
|
|
print('WARN: Unable to add features post ' +
|
|
|
|
postUrl + ' nickname ' + nickname +
|
|
|
|
' timeline ' + boxname)
|
2020-11-28 13:04:30 +00:00
|
|
|
else:
|
2021-02-17 14:01:45 +00:00
|
|
|
print('WARN: features timeline. ' +
|
|
|
|
'Unable to locate post ' + postUrl)
|
2020-11-28 13:04:30 +00:00
|
|
|
else:
|
2021-02-17 14:01:45 +00:00
|
|
|
print('WARN: Unable to locate post ' + postUrl +
|
|
|
|
' nickname ' + nickname)
|
2019-11-24 12:12:29 +00:00
|
|
|
|
2021-04-30 09:17:22 +00:00
|
|
|
if totalPostsCount < 3:
|
2021-03-05 16:10:25 +00:00
|
|
|
print('Posts added to json timeline ' + boxname + ': ' +
|
|
|
|
str(postsAddedToTimeline))
|
2021-03-05 15:59:39 +00:00
|
|
|
|
2019-11-18 11:28:17 +00:00
|
|
|
# Generate first and last entries within header
|
2021-04-30 09:17:22 +00:00
|
|
|
if totalPostsCount > 0:
|
|
|
|
lastPage = int(totalPostsCount / itemsPerPage)
|
2020-04-04 10:05:27 +00:00
|
|
|
if lastPage < 1:
|
|
|
|
lastPage = 1
|
|
|
|
boxHeader['last'] = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2021-08-14 11:13:39 +00:00
|
|
|
'/' + boxname + '?page=' + str(lastPage)
|
2019-11-18 11:28:17 +00:00
|
|
|
|
|
|
|
if headerOnly:
|
2020-04-04 10:05:27 +00:00
|
|
|
boxHeader['totalItems'] = len(postsInBox)
|
|
|
|
prevPageStr = 'true'
|
|
|
|
if pageNumber > 1:
|
|
|
|
prevPageStr = str(pageNumber - 1)
|
|
|
|
boxHeader['prev'] = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2021-08-14 11:13:39 +00:00
|
|
|
'/' + boxname + '?page=' + prevPageStr
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
nextPageStr = str(pageNumber + 1)
|
|
|
|
boxHeader['next'] = \
|
2021-12-26 10:19:59 +00:00
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2021-08-14 11:13:39 +00:00
|
|
|
'/' + boxname + '?page=' + nextPageStr
|
2019-11-18 11:28:17 +00:00
|
|
|
return boxHeader
|
|
|
|
|
2019-11-18 11:55:27 +00:00
|
|
|
for postStr in postsInBox:
|
2020-11-28 19:39:37 +00:00
|
|
|
# Check if the post has replies
|
|
|
|
hasReplies = False
|
|
|
|
if postStr.endswith('<hasReplies>'):
|
|
|
|
hasReplies = True
|
|
|
|
# remove the replies identifier
|
|
|
|
postStr = postStr.replace('<hasReplies>', '')
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
p = None
|
2019-11-18 11:28:17 +00:00
|
|
|
try:
|
2020-04-04 10:05:27 +00:00
|
|
|
p = json.loads(postStr)
|
|
|
|
except BaseException:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: _create_box_indexed unable to load json ' + postStr)
|
2019-11-18 11:28:17 +00:00
|
|
|
continue
|
|
|
|
|
2020-11-28 19:39:37 +00:00
|
|
|
# Does this post have replies?
|
|
|
|
# This will be used to indicate that replies exist within the html
|
2021-12-29 21:55:09 +00:00
|
|
|
# created by individual_post_as_html
|
2020-11-28 19:39:37 +00:00
|
|
|
p['hasReplies'] = hasReplies
|
|
|
|
|
2021-04-30 11:45:46 +00:00
|
|
|
if not authorized:
|
2021-12-28 19:33:29 +00:00
|
|
|
if not remove_post_interactions(p, False):
|
2021-04-30 11:45:46 +00:00
|
|
|
continue
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2019-11-18 12:02:55 +00:00
|
|
|
boxItems['orderedItems'].append(p)
|
2019-11-18 11:28:17 +00:00
|
|
|
|
|
|
|
return boxItems
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def expire_cache(base_dir: str, person_cache: {},
|
|
|
|
http_prefix: str, archive_dir: str,
|
|
|
|
recent_posts_cache: {},
|
|
|
|
maxPostsInBox=32000):
|
2019-08-20 11:51:29 +00:00
|
|
|
"""Thread used to expire actors from the cache and archive old posts
|
|
|
|
"""
|
|
|
|
while True:
|
|
|
|
# once per day
|
2020-04-04 10:05:27 +00:00
|
|
|
time.sleep(60 * 60 * 24)
|
2021-12-29 21:55:09 +00:00
|
|
|
expire_person_cache(person_cache)
|
|
|
|
archive_posts(base_dir, http_prefix, archive_dir, recent_posts_cache,
|
|
|
|
maxPostsInBox)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-08-20 11:51:29 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def archive_posts(base_dir: str, http_prefix: str, archive_dir: str,
|
|
|
|
recent_posts_cache: {},
|
|
|
|
maxPostsInBox=32000) -> None:
|
2019-07-12 20:43:55 +00:00
|
|
|
"""Archives posts for all accounts
|
|
|
|
"""
|
2020-12-08 14:09:54 +00:00
|
|
|
if maxPostsInBox == 0:
|
|
|
|
return
|
|
|
|
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
|
|
|
if not os.path.isdir(archive_dir):
|
|
|
|
os.mkdir(archive_dir)
|
2020-12-08 14:09:54 +00:00
|
|
|
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
|
|
|
if not os.path.isdir(archive_dir + '/accounts'):
|
|
|
|
os.mkdir(archive_dir + '/accounts')
|
2019-07-12 20:43:55 +00:00
|
|
|
|
2021-12-25 16:17:53 +00:00
|
|
|
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
2019-07-12 20:43:55 +00:00
|
|
|
for handle in dirs:
|
|
|
|
if '@' in handle:
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
domain = handle.split('@')[1]
|
|
|
|
archiveSubdir = None
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
|
|
|
if not os.path.isdir(archive_dir + '/accounts/' + handle):
|
|
|
|
os.mkdir(archive_dir + '/accounts/' + handle)
|
|
|
|
if not os.path.isdir(archive_dir + '/accounts/' +
|
2020-04-04 10:05:27 +00:00
|
|
|
handle + '/inbox'):
|
2021-12-25 23:41:17 +00:00
|
|
|
os.mkdir(archive_dir + '/accounts/' +
|
2020-04-04 10:05:27 +00:00
|
|
|
handle + '/inbox')
|
2021-12-25 23:41:17 +00:00
|
|
|
if not os.path.isdir(archive_dir + '/accounts/' +
|
2020-04-04 10:05:27 +00:00
|
|
|
handle + '/outbox'):
|
2021-12-25 23:41:17 +00:00
|
|
|
os.mkdir(archive_dir + '/accounts/' +
|
2020-04-04 10:05:27 +00:00
|
|
|
handle + '/outbox')
|
2021-12-25 23:41:17 +00:00
|
|
|
archiveSubdir = archive_dir + '/accounts/' + \
|
2020-04-04 10:05:27 +00:00
|
|
|
handle + '/inbox'
|
2021-12-29 21:55:09 +00:00
|
|
|
archive_posts_for_person(http_prefix,
|
|
|
|
nickname, domain, base_dir,
|
|
|
|
'inbox', archiveSubdir,
|
|
|
|
recent_posts_cache, maxPostsInBox)
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
|
|
|
archiveSubdir = archive_dir + '/accounts/' + \
|
2020-04-04 10:05:27 +00:00
|
|
|
handle + '/outbox'
|
2021-12-29 21:55:09 +00:00
|
|
|
archive_posts_for_person(http_prefix,
|
|
|
|
nickname, domain, base_dir,
|
|
|
|
'outbox', archiveSubdir,
|
|
|
|
recent_posts_cache, maxPostsInBox)
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2019-07-12 20:43:55 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def archive_posts_for_person(http_prefix: str, nickname: str, domain: str,
|
|
|
|
base_dir: str,
|
|
|
|
boxname: str, archive_dir: str,
|
|
|
|
recent_posts_cache: {},
|
|
|
|
maxPostsInBox=32000) -> None:
|
2019-07-04 16:24:23 +00:00
|
|
|
"""Retain a maximum number of posts within the given box
|
2019-06-29 13:44:21 +00:00
|
|
|
Move any others to an archive directory
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'outbox':
|
2019-07-04 16:24:23 +00:00
|
|
|
return
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
|
|
|
if not os.path.isdir(archive_dir):
|
|
|
|
os.mkdir(archive_dir)
|
2021-12-27 19:26:54 +00:00
|
|
|
boxDir = create_person_dir(nickname, domain, base_dir, boxname)
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBox = os.scandir(boxDir)
|
|
|
|
noOfPosts = 0
|
2019-10-19 10:19:19 +00:00
|
|
|
for f in postsInBox:
|
2020-04-04 10:05:27 +00:00
|
|
|
noOfPosts += 1
|
|
|
|
if noOfPosts <= maxPostsInBox:
|
|
|
|
print('Checked ' + str(noOfPosts) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
2019-06-29 13:44:21 +00:00
|
|
|
return
|
2019-09-14 11:18:34 +00:00
|
|
|
|
2019-10-20 11:18:25 +00:00
|
|
|
# remove entries from the index
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = nickname + '@' + domain
|
2021-12-25 16:17:53 +00:00
|
|
|
indexFilename = base_dir + '/accounts/' + handle + '/' + boxname + '.index'
|
2019-10-20 11:18:25 +00:00
|
|
|
if os.path.isfile(indexFilename):
|
2020-04-04 10:05:27 +00:00
|
|
|
indexCtr = 0
|
2019-10-20 11:18:25 +00:00
|
|
|
# get the existing index entries as a string
|
2020-04-04 10:05:27 +00:00
|
|
|
newIndex = ''
|
2019-10-20 11:18:25 +00:00
|
|
|
with open(indexFilename, 'r') as indexFile:
|
2021-12-26 19:47:06 +00:00
|
|
|
for post_id in indexFile:
|
|
|
|
newIndex += post_id
|
2020-04-04 10:05:27 +00:00
|
|
|
indexCtr += 1
|
|
|
|
if indexCtr >= maxPostsInBox:
|
2019-10-20 11:18:25 +00:00
|
|
|
break
|
|
|
|
# save the new index file
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(newIndex) > 0:
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(indexFilename, 'w+') as indexFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
indexFile.write(newIndex)
|
2019-10-20 11:18:25 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBoxDict = {}
|
|
|
|
postsCtr = 0
|
|
|
|
postsInBox = os.scandir(boxDir)
|
2021-12-26 23:41:34 +00:00
|
|
|
for post_filename in postsInBox:
|
|
|
|
post_filename = post_filename.name
|
|
|
|
if not post_filename.endswith('.json'):
|
2019-09-14 11:18:34 +00:00
|
|
|
continue
|
2019-11-06 13:35:25 +00:00
|
|
|
# Time of file creation
|
2021-12-26 23:41:34 +00:00
|
|
|
fullFilename = os.path.join(boxDir, post_filename)
|
2019-11-06 14:50:17 +00:00
|
|
|
if os.path.isfile(fullFilename):
|
2020-04-04 10:05:27 +00:00
|
|
|
content = open(fullFilename).read()
|
2019-11-06 14:50:17 +00:00
|
|
|
if '"published":' in content:
|
2020-04-04 10:05:27 +00:00
|
|
|
publishedStr = content.split('"published":')[1]
|
2019-11-06 14:50:17 +00:00
|
|
|
if '"' in publishedStr:
|
2020-04-04 10:05:27 +00:00
|
|
|
publishedStr = publishedStr.split('"')[1]
|
2019-11-06 14:54:17 +00:00
|
|
|
if publishedStr.endswith('Z'):
|
2021-12-26 23:41:34 +00:00
|
|
|
postsInBoxDict[publishedStr] = post_filename
|
2020-04-04 10:05:27 +00:00
|
|
|
postsCtr += 1
|
2019-09-14 11:18:34 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
noOfPosts = postsCtr
|
|
|
|
if noOfPosts <= maxPostsInBox:
|
|
|
|
print('Checked ' + str(noOfPosts) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
2019-09-14 11:18:34 +00:00
|
|
|
return
|
2019-11-06 14:50:17 +00:00
|
|
|
|
2019-09-14 11:18:34 +00:00
|
|
|
# sort the list in ascending order of date
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBoxSorted = \
|
|
|
|
OrderedDict(sorted(postsInBoxDict.items(), reverse=False))
|
2019-09-14 17:12:03 +00:00
|
|
|
|
2019-10-19 10:10:52 +00:00
|
|
|
# directory containing cached html posts
|
2020-04-04 10:05:27 +00:00
|
|
|
postCacheDir = boxDir.replace('/' + boxname, '/postcache')
|
2019-10-19 10:10:52 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
removeCtr = 0
|
2021-12-26 23:41:34 +00:00
|
|
|
for publishedStr, post_filename in postsInBoxSorted.items():
|
|
|
|
filePath = os.path.join(boxDir, post_filename)
|
2019-09-24 21:16:44 +00:00
|
|
|
if not os.path.isfile(filePath):
|
|
|
|
continue
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
2021-12-26 23:41:34 +00:00
|
|
|
archivePath = os.path.join(archive_dir, post_filename)
|
2020-04-04 10:05:27 +00:00
|
|
|
os.rename(filePath, archivePath)
|
2020-10-21 10:39:09 +00:00
|
|
|
|
|
|
|
extensions = ('replies', 'votes', 'arrived', 'muted')
|
|
|
|
for ext in extensions:
|
|
|
|
extPath = filePath.replace('.json', '.' + ext)
|
|
|
|
if os.path.isfile(extPath):
|
|
|
|
os.rename(extPath,
|
|
|
|
archivePath.replace('.json', '.' + ext))
|
|
|
|
else:
|
|
|
|
extPath = filePath.replace('.json',
|
|
|
|
'.json.' + ext)
|
|
|
|
if os.path.isfile(extPath):
|
|
|
|
os.rename(extPath,
|
|
|
|
archivePath.replace('.json', '.json.' + ext))
|
2019-09-24 21:16:44 +00:00
|
|
|
else:
|
2021-12-28 14:55:45 +00:00
|
|
|
delete_post(base_dir, http_prefix, nickname, domain,
|
|
|
|
filePath, False, recent_posts_cache)
|
2019-10-19 10:10:52 +00:00
|
|
|
|
|
|
|
# remove cached html posts
|
2020-04-04 10:05:27 +00:00
|
|
|
postCacheFilename = \
|
2021-12-26 23:41:34 +00:00
|
|
|
os.path.join(postCacheDir, post_filename).replace('.json', '.html')
|
2019-10-19 10:10:52 +00:00
|
|
|
if os.path.isfile(postCacheFilename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
|
|
|
os.remove(postCacheFilename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: archive_posts_for_person unable to delete ' +
|
2021-10-29 18:48:15 +00:00
|
|
|
postCacheFilename)
|
2019-10-19 10:10:52 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
noOfPosts -= 1
|
|
|
|
removeCtr += 1
|
|
|
|
if noOfPosts <= maxPostsInBox:
|
2019-09-24 21:16:44 +00:00
|
|
|
break
|
2021-12-25 23:41:17 +00:00
|
|
|
if archive_dir:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Archived ' + str(removeCtr) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
2020-02-26 20:39:18 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Removed ' + str(removeCtr) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
|
|
|
print(nickname + '@' + domain + ' has ' + str(noOfPosts) +
|
|
|
|
' in ' + boxname)
|
|
|
|
|
2019-07-03 10:31:02 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_public_posts_of_person(base_dir: str, nickname: str, domain: str,
|
|
|
|
raw: bool, simple: bool, proxy_type: str,
|
|
|
|
port: int, http_prefix: str,
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
system_language: str,
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
originDomain: str) -> None:
|
2019-07-03 10:31:02 +00:00
|
|
|
""" This is really just for test purposes
|
|
|
|
"""
|
2021-09-15 11:09:16 +00:00
|
|
|
if debug:
|
2021-12-25 23:03:28 +00:00
|
|
|
if signing_priv_key_pem:
|
2021-09-15 11:09:16 +00:00
|
|
|
print('Signing key available')
|
|
|
|
else:
|
|
|
|
print('Signing key missing')
|
|
|
|
|
2020-06-24 09:04:58 +00:00
|
|
|
print('Starting new session for getting public posts')
|
2021-12-28 16:56:57 +00:00
|
|
|
session = create_session(proxy_type)
|
2020-06-08 20:22:18 +00:00
|
|
|
if not session:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Session was not created')
|
2020-06-08 20:22:18 +00:00
|
|
|
return
|
2021-12-25 22:17:49 +00:00
|
|
|
person_cache = {}
|
2021-12-25 22:28:18 +00:00
|
|
|
cached_webfingers = {}
|
2021-12-25 23:45:30 +00:00
|
|
|
federation_list = []
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = False
|
2021-08-02 20:43:53 +00:00
|
|
|
if nickname.startswith('!'):
|
|
|
|
nickname = nickname[1:]
|
2021-12-26 00:07:44 +00:00
|
|
|
group_account = True
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:00:46 +00:00
|
|
|
handle = http_prefix + "://" + domain_full + "/@" + nickname
|
2021-08-02 20:43:53 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
wfRequest = \
|
2021-12-29 21:55:09 +00:00
|
|
|
webfinger_handle(session, handle, http_prefix, cached_webfingers,
|
|
|
|
originDomain, project_version, debug, group_account,
|
|
|
|
signing_priv_key_pem)
|
2019-07-03 10:31:02 +00:00
|
|
|
if not wfRequest:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('No webfinger result was returned for ' + handle)
|
2019-07-03 10:31:02 +00:00
|
|
|
sys.exit()
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
sys.exit()
|
2019-07-03 10:31:02 +00:00
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting the outbox for ' + handle)
|
2021-09-22 09:29:48 +00:00
|
|
|
(personUrl, pubKeyId, pubKey, personId, shaedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
nickname, domain, 'outbox',
|
|
|
|
62524)
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
2021-09-13 14:02:11 +00:00
|
|
|
print('Actor url: ' + str(personId))
|
|
|
|
if not personId:
|
|
|
|
return
|
2021-08-01 13:44:27 +00:00
|
|
|
|
2021-12-25 21:02:44 +00:00
|
|
|
max_mentions = 10
|
2021-12-25 21:04:51 +00:00
|
|
|
max_emoji = 10
|
2020-04-04 10:05:27 +00:00
|
|
|
maxAttachments = 5
|
2021-12-29 21:55:09 +00:00
|
|
|
_get_posts(session, personUrl, 30, max_mentions, max_emoji,
|
|
|
|
maxAttachments, federation_list,
|
|
|
|
person_cache, raw, simple, debug,
|
|
|
|
project_version, http_prefix, originDomain, system_language,
|
|
|
|
signing_priv_key_pem)
|
|
|
|
|
|
|
|
|
|
|
|
def get_public_post_domains(session, base_dir: str, nickname: str, domain: str,
|
|
|
|
originDomain: str,
|
|
|
|
proxy_type: str, port: int, http_prefix: str,
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
wordFrequency: {}, domainList: [],
|
|
|
|
system_language: str,
|
|
|
|
signing_priv_key_pem: str) -> []:
|
2020-07-08 10:09:51 +00:00
|
|
|
""" Returns a list of domains referenced within public posts
|
|
|
|
"""
|
2020-09-25 10:05:23 +00:00
|
|
|
if not session:
|
2021-12-28 16:56:57 +00:00
|
|
|
session = create_session(proxy_type)
|
2020-07-08 10:09:51 +00:00
|
|
|
if not session:
|
2020-07-08 10:30:29 +00:00
|
|
|
return domainList
|
2021-12-25 22:17:49 +00:00
|
|
|
person_cache = {}
|
2021-12-25 22:28:18 +00:00
|
|
|
cached_webfingers = {}
|
2021-12-25 23:45:30 +00:00
|
|
|
federation_list = []
|
2020-07-08 10:09:51 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:00:46 +00:00
|
|
|
handle = http_prefix + "://" + domain_full + "/@" + nickname
|
2020-07-08 10:09:51 +00:00
|
|
|
wfRequest = \
|
2021-12-29 21:55:09 +00:00
|
|
|
webfinger_handle(session, handle, http_prefix, cached_webfingers,
|
|
|
|
domain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2020-07-08 10:09:51 +00:00
|
|
|
if not wfRequest:
|
2020-07-08 10:30:29 +00:00
|
|
|
return domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
2020-07-08 10:30:29 +00:00
|
|
|
return domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
|
2021-09-22 09:29:48 +00:00
|
|
|
(personUrl, pubKeyId, pubKey, personId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
nickname, domain, 'outbox',
|
|
|
|
92522)
|
2021-12-25 21:02:44 +00:00
|
|
|
max_mentions = 99
|
2021-12-25 21:04:51 +00:00
|
|
|
max_emoji = 99
|
2020-07-08 10:09:51 +00:00
|
|
|
maxAttachments = 5
|
|
|
|
postDomains = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_post_domains(session, personUrl, 64, max_mentions, max_emoji,
|
|
|
|
maxAttachments, federation_list,
|
|
|
|
person_cache, debug,
|
|
|
|
project_version, http_prefix, domain,
|
|
|
|
wordFrequency, domainList, system_language,
|
|
|
|
signing_priv_key_pem)
|
2020-07-08 10:09:51 +00:00
|
|
|
postDomains.sort()
|
|
|
|
return postDomains
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def download_follow_collection(signing_priv_key_pem: str,
|
|
|
|
followType: str,
|
|
|
|
session, http_prefix: str,
|
|
|
|
actor: str, pageNumber: int,
|
|
|
|
noOfPages: int, debug: bool) -> []:
|
2021-01-11 10:08:05 +00:00
|
|
|
"""Returns a list of following/followers for the given actor
|
|
|
|
by downloading the json for their following/followers collection
|
2021-01-10 21:57:53 +00:00
|
|
|
"""
|
|
|
|
prof = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
if '/channel/' not in actor or '/accounts/' not in actor:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/activity+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + prof + '"'
|
2021-01-10 21:57:53 +00:00
|
|
|
sessionHeaders = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2021-01-10 21:57:53 +00:00
|
|
|
}
|
|
|
|
else:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/ld+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + prof + '"'
|
2021-01-10 21:57:53 +00:00
|
|
|
sessionHeaders = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2021-01-10 21:57:53 +00:00
|
|
|
}
|
|
|
|
result = []
|
|
|
|
for pageCtr in range(noOfPages):
|
2021-01-11 10:08:05 +00:00
|
|
|
url = actor + '/' + followType + '?page=' + str(pageNumber + pageCtr)
|
2021-01-10 21:57:53 +00:00
|
|
|
followersJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_json(signing_priv_key_pem, session, url, sessionHeaders, None,
|
|
|
|
debug, __version__, http_prefix, None)
|
2021-01-10 21:57:53 +00:00
|
|
|
if followersJson:
|
|
|
|
if followersJson.get('orderedItems'):
|
2021-01-10 23:29:03 +00:00
|
|
|
for followerActor in followersJson['orderedItems']:
|
|
|
|
if followerActor not in result:
|
|
|
|
result.append(followerActor)
|
2021-09-11 12:10:44 +00:00
|
|
|
elif followersJson.get('items'):
|
|
|
|
for followerActor in followersJson['items']:
|
|
|
|
if followerActor not in result:
|
|
|
|
result.append(followerActor)
|
2021-01-10 21:57:53 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_public_post_info(session, base_dir: str, nickname: str, domain: str,
|
|
|
|
originDomain: str,
|
|
|
|
proxy_type: str, port: int, http_prefix: str,
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
wordFrequency: {}, system_language: str,
|
|
|
|
signing_priv_key_pem: str) -> []:
|
2020-12-16 16:43:51 +00:00
|
|
|
""" Returns a dict of domains referenced within public posts
|
|
|
|
"""
|
|
|
|
if not session:
|
2021-12-28 16:56:57 +00:00
|
|
|
session = create_session(proxy_type)
|
2020-12-16 16:43:51 +00:00
|
|
|
if not session:
|
|
|
|
return {}
|
2021-12-25 22:17:49 +00:00
|
|
|
person_cache = {}
|
2021-12-25 22:28:18 +00:00
|
|
|
cached_webfingers = {}
|
2021-12-25 23:45:30 +00:00
|
|
|
federation_list = []
|
2020-12-16 16:43:51 +00:00
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:00:46 +00:00
|
|
|
handle = http_prefix + "://" + domain_full + "/@" + nickname
|
2020-12-16 16:43:51 +00:00
|
|
|
wfRequest = \
|
2021-12-29 21:55:09 +00:00
|
|
|
webfinger_handle(session, handle, http_prefix, cached_webfingers,
|
|
|
|
domain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2020-12-16 16:43:51 +00:00
|
|
|
if not wfRequest:
|
|
|
|
return {}
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
return {}
|
|
|
|
|
2021-09-22 09:29:48 +00:00
|
|
|
(personUrl, pubKeyId, pubKey, personId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
nickname, domain, 'outbox',
|
|
|
|
13863)
|
2021-12-25 21:02:44 +00:00
|
|
|
max_mentions = 99
|
2021-12-25 21:04:51 +00:00
|
|
|
max_emoji = 99
|
2020-12-16 16:43:51 +00:00
|
|
|
maxAttachments = 5
|
|
|
|
maxPosts = 64
|
|
|
|
postDomains = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_post_domains(session, personUrl, maxPosts,
|
|
|
|
max_mentions, max_emoji,
|
|
|
|
maxAttachments, federation_list,
|
|
|
|
person_cache, debug,
|
|
|
|
project_version, http_prefix, domain,
|
|
|
|
wordFrequency, [], system_language,
|
|
|
|
signing_priv_key_pem)
|
2020-12-16 16:43:51 +00:00
|
|
|
postDomains.sort()
|
|
|
|
domainsInfo = {}
|
|
|
|
for d in postDomains:
|
|
|
|
if not domainsInfo.get(d):
|
|
|
|
domainsInfo[d] = []
|
|
|
|
|
|
|
|
blockedPosts = \
|
2021-12-29 21:55:09 +00:00
|
|
|
_get_posts_for_blocked_domains(base_dir, session,
|
|
|
|
personUrl, maxPosts,
|
|
|
|
max_mentions,
|
|
|
|
max_emoji, maxAttachments,
|
|
|
|
federation_list,
|
|
|
|
person_cache,
|
|
|
|
debug,
|
|
|
|
project_version, http_prefix,
|
|
|
|
domain, signing_priv_key_pem)
|
2020-12-16 17:09:08 +00:00
|
|
|
for blockedDomain, postUrlList in blockedPosts.items():
|
|
|
|
domainsInfo[blockedDomain] += postUrlList
|
2020-12-16 16:43:51 +00:00
|
|
|
|
|
|
|
return domainsInfo
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_public_post_domains_blocked(session, base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
proxy_type: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
wordFrequency: {}, domainList: [],
|
|
|
|
system_language: str,
|
|
|
|
signing_priv_key_pem: str) -> []:
|
2020-09-25 09:52:13 +00:00
|
|
|
""" Returns a list of domains referenced within public posts which
|
|
|
|
are globally blocked on this instance
|
|
|
|
"""
|
2021-09-15 14:05:08 +00:00
|
|
|
originDomain = domain
|
2020-09-25 09:52:13 +00:00
|
|
|
postDomains = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_public_post_domains(session, base_dir, nickname, domain,
|
|
|
|
originDomain,
|
|
|
|
proxy_type, port, http_prefix,
|
|
|
|
debug, project_version,
|
|
|
|
wordFrequency, domainList, system_language,
|
|
|
|
signing_priv_key_pem)
|
2020-09-25 09:52:13 +00:00
|
|
|
if not postDomains:
|
|
|
|
return []
|
|
|
|
|
2021-12-25 16:17:53 +00:00
|
|
|
blockingFilename = base_dir + '/accounts/blocking.txt'
|
2020-09-25 09:52:13 +00:00
|
|
|
if not os.path.isfile(blockingFilename):
|
|
|
|
return []
|
|
|
|
|
|
|
|
# read the blocked domains as a single string
|
2021-06-21 22:52:04 +00:00
|
|
|
blockedStr = ''
|
|
|
|
with open(blockingFilename, 'r') as fp:
|
|
|
|
blockedStr = fp.read()
|
2020-09-25 09:52:13 +00:00
|
|
|
|
|
|
|
blockedDomains = []
|
|
|
|
for domainName in postDomains:
|
|
|
|
if '@' not in domainName:
|
|
|
|
continue
|
|
|
|
# get the domain after the @
|
|
|
|
domainName = domainName.split('@')[1].strip()
|
2021-12-27 17:49:35 +00:00
|
|
|
if is_evil(domainName):
|
2020-09-25 10:12:36 +00:00
|
|
|
blockedDomains.append(domainName)
|
|
|
|
continue
|
2020-09-25 09:52:13 +00:00
|
|
|
if domainName in blockedStr:
|
|
|
|
blockedDomains.append(domainName)
|
|
|
|
|
|
|
|
return blockedDomains
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _get_non_mutuals_of_person(base_dir: str,
|
|
|
|
nickname: str, domain: str) -> []:
|
2020-09-25 13:21:56 +00:00
|
|
|
"""Returns the followers who are not mutuals of a person
|
|
|
|
i.e. accounts which follow you but you don't follow them
|
|
|
|
"""
|
|
|
|
followers = \
|
2021-12-27 13:58:17 +00:00
|
|
|
get_followers_list(base_dir, nickname, domain, 'followers.txt')
|
2020-09-25 13:21:56 +00:00
|
|
|
following = \
|
2021-12-27 13:58:17 +00:00
|
|
|
get_followers_list(base_dir, nickname, domain, 'following.txt')
|
2020-09-25 13:21:56 +00:00
|
|
|
nonMutuals = []
|
2020-09-25 14:33:20 +00:00
|
|
|
for handle in followers:
|
|
|
|
if handle not in following:
|
2020-09-25 13:21:56 +00:00
|
|
|
nonMutuals.append(handle)
|
|
|
|
return nonMutuals
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def check_domains(session, base_dir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
proxy_type: str, port: int, http_prefix: str,
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
maxBlockedDomains: int, singleCheck: bool,
|
|
|
|
system_language: str,
|
|
|
|
signing_priv_key_pem: str) -> None:
|
2020-09-25 13:21:56 +00:00
|
|
|
"""Checks follower accounts for references to globally blocked domains
|
|
|
|
"""
|
2021-01-11 13:14:22 +00:00
|
|
|
wordFrequency = {}
|
2021-12-29 21:55:09 +00:00
|
|
|
nonMutuals = _get_non_mutuals_of_person(base_dir, nickname, domain)
|
2020-09-25 13:21:56 +00:00
|
|
|
if not nonMutuals:
|
2020-09-25 13:33:44 +00:00
|
|
|
print('No non-mutual followers were found')
|
2020-09-25 13:21:56 +00:00
|
|
|
return
|
2021-12-25 16:17:53 +00:00
|
|
|
followerWarningFilename = base_dir + '/accounts/followerWarnings.txt'
|
2020-09-25 13:21:56 +00:00
|
|
|
updateFollowerWarnings = False
|
|
|
|
followerWarningStr = ''
|
|
|
|
if os.path.isfile(followerWarningFilename):
|
2021-06-21 22:52:04 +00:00
|
|
|
with open(followerWarningFilename, 'r') as fp:
|
|
|
|
followerWarningStr = fp.read()
|
2020-09-25 13:21:56 +00:00
|
|
|
|
|
|
|
if singleCheck:
|
|
|
|
# checks a single random non-mutual
|
|
|
|
index = random.randrange(0, len(nonMutuals))
|
2020-09-25 14:23:33 +00:00
|
|
|
handle = nonMutuals[index]
|
|
|
|
if '@' in handle:
|
|
|
|
nonMutualNickname = handle.split('@')[0]
|
|
|
|
nonMutualDomain = handle.split('@')[1].strip()
|
|
|
|
blockedDomains = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_public_post_domains_blocked(session, base_dir,
|
|
|
|
nonMutualNickname,
|
|
|
|
nonMutualDomain,
|
|
|
|
proxy_type, port, http_prefix,
|
|
|
|
debug, project_version,
|
|
|
|
wordFrequency, [],
|
|
|
|
system_language,
|
|
|
|
signing_priv_key_pem)
|
2020-09-25 14:23:33 +00:00
|
|
|
if blockedDomains:
|
|
|
|
if len(blockedDomains) > maxBlockedDomains:
|
|
|
|
followerWarningStr += handle + '\n'
|
|
|
|
updateFollowerWarnings = True
|
2020-09-25 13:21:56 +00:00
|
|
|
else:
|
|
|
|
# checks all non-mutuals
|
2020-09-25 14:23:33 +00:00
|
|
|
for handle in nonMutuals:
|
|
|
|
if '@' not in handle:
|
|
|
|
continue
|
|
|
|
if handle in followerWarningStr:
|
2020-09-25 13:21:56 +00:00
|
|
|
continue
|
2020-09-25 14:23:33 +00:00
|
|
|
nonMutualNickname = handle.split('@')[0]
|
|
|
|
nonMutualDomain = handle.split('@')[1].strip()
|
2020-09-25 13:21:56 +00:00
|
|
|
blockedDomains = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_public_post_domains_blocked(session, base_dir,
|
|
|
|
nonMutualNickname,
|
|
|
|
nonMutualDomain,
|
|
|
|
proxy_type, port, http_prefix,
|
|
|
|
debug, project_version,
|
|
|
|
wordFrequency, [],
|
|
|
|
system_language,
|
|
|
|
signing_priv_key_pem)
|
2020-09-25 13:21:56 +00:00
|
|
|
if blockedDomains:
|
2020-09-25 14:23:33 +00:00
|
|
|
print(handle)
|
2020-09-25 13:21:56 +00:00
|
|
|
for d in blockedDomains:
|
|
|
|
print(' ' + d)
|
|
|
|
if len(blockedDomains) > maxBlockedDomains:
|
2020-09-25 14:23:33 +00:00
|
|
|
followerWarningStr += handle + '\n'
|
2020-09-25 13:21:56 +00:00
|
|
|
updateFollowerWarnings = True
|
|
|
|
|
|
|
|
if updateFollowerWarnings and followerWarningStr:
|
2021-06-21 22:53:04 +00:00
|
|
|
with open(followerWarningFilename, 'w+') as fp:
|
|
|
|
fp.write(followerWarningStr)
|
2020-09-25 13:21:56 +00:00
|
|
|
if not singleCheck:
|
|
|
|
print(followerWarningStr)
|
|
|
|
|
|
|
|
|
2021-12-28 19:33:29 +00:00
|
|
|
def populate_replies_json(base_dir: str, nickname: str, domain: str,
|
|
|
|
postRepliesFilename: str, authorized: bool,
|
|
|
|
repliesJson: {}) -> None:
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr = 'https://www.w3.org/ns/activitystreams#Public'
|
2019-08-02 18:37:23 +00:00
|
|
|
# populate the items list with replies
|
2020-04-04 10:05:27 +00:00
|
|
|
repliesBoxes = ('outbox', 'inbox')
|
|
|
|
with open(postRepliesFilename, 'r') as repliesFile:
|
2019-08-02 18:37:23 +00:00
|
|
|
for messageId in repliesFile:
|
2020-04-04 10:05:27 +00:00
|
|
|
replyFound = False
|
2019-08-02 18:37:23 +00:00
|
|
|
# examine inbox and outbox
|
|
|
|
for boxname in repliesBoxes:
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2 = messageId.replace('\n', '').replace('\r', '')
|
2020-04-04 10:05:27 +00:00
|
|
|
searchFilename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + \
|
2021-06-22 12:42:52 +00:00
|
|
|
boxname + '/' + \
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2.replace('/', '#') + '.json'
|
2019-08-02 18:37:23 +00:00
|
|
|
if os.path.isfile(searchFilename):
|
|
|
|
if authorized or \
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr in open(searchFilename).read():
|
2021-12-26 15:13:34 +00:00
|
|
|
post_json_object = load_json(searchFilename)
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object:
|
|
|
|
if post_json_object['object'].get('cc'):
|
|
|
|
pjo = post_json_object
|
2020-04-04 10:05:27 +00:00
|
|
|
if (authorized or
|
|
|
|
(pubStr in pjo['object']['to'] or
|
|
|
|
pubStr in pjo['object']['cc'])):
|
|
|
|
repliesJson['orderedItems'].append(pjo)
|
|
|
|
replyFound = True
|
2019-08-02 18:37:23 +00:00
|
|
|
else:
|
|
|
|
if authorized or \
|
2021-12-25 22:09:19 +00:00
|
|
|
pubStr in post_json_object['object']['to']:
|
|
|
|
pjo = post_json_object
|
2020-04-04 10:05:27 +00:00
|
|
|
repliesJson['orderedItems'].append(pjo)
|
|
|
|
replyFound = True
|
2019-08-02 18:37:23 +00:00
|
|
|
break
|
|
|
|
# if not in either inbox or outbox then examine the shared inbox
|
|
|
|
if not replyFound:
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2 = messageId.replace('\n', '').replace('\r', '')
|
2020-04-04 10:05:27 +00:00
|
|
|
searchFilename = \
|
2021-12-25 16:17:53 +00:00
|
|
|
base_dir + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/accounts/inbox@' + \
|
2021-06-22 12:42:52 +00:00
|
|
|
domain + '/inbox/' + \
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2.replace('/', '#') + '.json'
|
2019-08-02 18:37:23 +00:00
|
|
|
if os.path.isfile(searchFilename):
|
|
|
|
if authorized or \
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr in open(searchFilename).read():
|
|
|
|
# get the json of the reply and append it to
|
|
|
|
# the collection
|
2021-12-26 15:13:34 +00:00
|
|
|
post_json_object = load_json(searchFilename)
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object:
|
|
|
|
if post_json_object['object'].get('cc'):
|
|
|
|
pjo = post_json_object
|
2020-04-04 10:05:27 +00:00
|
|
|
if (authorized or
|
|
|
|
(pubStr in pjo['object']['to'] or
|
|
|
|
pubStr in pjo['object']['cc'])):
|
2021-12-25 22:09:19 +00:00
|
|
|
pjo = post_json_object
|
2020-04-04 10:05:27 +00:00
|
|
|
repliesJson['orderedItems'].append(pjo)
|
2019-08-02 18:37:23 +00:00
|
|
|
else:
|
|
|
|
if authorized or \
|
2021-12-25 22:09:19 +00:00
|
|
|
pubStr in post_json_object['object']['to']:
|
|
|
|
pjo = post_json_object
|
2020-04-04 10:05:27 +00:00
|
|
|
repliesJson['orderedItems'].append(pjo)
|
|
|
|
|
2019-09-28 16:10:45 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def _reject_announce(announceFilename: str,
|
|
|
|
base_dir: str, nickname: str, domain: str,
|
|
|
|
announcePostId: str, recent_posts_cache: {}):
|
2019-09-28 16:58:21 +00:00
|
|
|
"""Marks an announce as rejected
|
|
|
|
"""
|
2021-12-26 20:20:36 +00:00
|
|
|
reject_post_id(base_dir, nickname, domain, announcePostId,
|
|
|
|
recent_posts_cache)
|
2021-03-05 18:03:15 +00:00
|
|
|
|
|
|
|
# reject the post referenced by the announce activity object
|
2020-04-04 10:05:27 +00:00
|
|
|
if not os.path.isfile(announceFilename + '.reject'):
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(announceFilename + '.reject', 'w+') as rejectAnnounceFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
rejectAnnounceFile.write('\n')
|
2019-09-28 16:58:21 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def download_announce(session, base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
post_json_object: {}, project_version: str,
|
|
|
|
translate: {},
|
|
|
|
yt_replace_domain: str,
|
|
|
|
twitter_replacement_domain: str,
|
|
|
|
allow_local_network_access: bool,
|
|
|
|
recent_posts_cache: {}, debug: bool,
|
|
|
|
system_language: str,
|
|
|
|
domain_full: str, person_cache: {},
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
blockedCache: {}) -> {}:
|
2019-09-28 16:10:45 +00:00
|
|
|
"""Download the post referenced by an announce
|
|
|
|
"""
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('object'):
|
2021-06-22 20:30:27 +00:00
|
|
|
return None
|
2021-12-25 22:09:19 +00:00
|
|
|
if not isinstance(post_json_object['object'], str):
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-06-03 12:17:24 +00:00
|
|
|
# ignore self-boosts
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object['actor'] in post_json_object['object']:
|
2021-06-03 12:17:24 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
|
|
|
|
# get the announced post
|
2021-12-25 16:17:53 +00:00
|
|
|
announceCacheDir = base_dir + '/cache/announce/' + nickname
|
2019-09-28 16:10:45 +00:00
|
|
|
if not os.path.isdir(announceCacheDir):
|
|
|
|
os.mkdir(announceCacheDir)
|
2021-03-05 18:03:15 +00:00
|
|
|
|
2021-12-26 19:47:06 +00:00
|
|
|
post_id = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object.get('id'):
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['id'])
|
2020-04-04 10:05:27 +00:00
|
|
|
announceFilename = \
|
|
|
|
announceCacheDir + '/' + \
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object'].replace('/', '#') + '.json'
|
2019-09-28 16:10:45 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if os.path.isfile(announceFilename + '.reject'):
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
|
|
|
|
if os.path.isfile(announceFilename):
|
2021-03-14 19:32:11 +00:00
|
|
|
if debug:
|
|
|
|
print('Reading cached Announce content for ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object'])
|
2021-12-26 15:13:34 +00:00
|
|
|
post_json_object = load_json(announceFilename)
|
2021-12-25 22:09:19 +00:00
|
|
|
if post_json_object:
|
|
|
|
return post_json_object
|
2019-09-28 16:10:45 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/activity+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-31 11:07:58 +00:00
|
|
|
}
|
2021-12-25 22:09:19 +00:00
|
|
|
if '/channel/' in post_json_object['actor'] or \
|
|
|
|
'/accounts/' in post_json_object['actor']:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
2021-09-14 13:05:10 +00:00
|
|
|
'application/ld+json; ' + \
|
2021-09-10 21:47:26 +00:00
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-31 11:07:58 +00:00
|
|
|
}
|
2021-12-27 22:19:18 +00:00
|
|
|
actorNickname = get_nickname_from_actor(post_json_object['actor'])
|
2021-12-27 19:05:25 +00:00
|
|
|
actorDomain, actorPort = \
|
|
|
|
get_domain_from_actor(post_json_object['actor'])
|
2020-03-01 10:06:55 +00:00
|
|
|
if not actorDomain:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce actor does not contain a ' +
|
|
|
|
'valid domain or port number: ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
str(post_json_object['actor']))
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_blocked(base_dir, nickname, domain, actorNickname, actorDomain):
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce download blocked actor: ' +
|
|
|
|
actorNickname + '@' + actorDomain)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-27 22:19:18 +00:00
|
|
|
objectNickname = get_nickname_from_actor(post_json_object['object'])
|
2021-12-25 22:09:19 +00:00
|
|
|
objectDomain, objectPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(post_json_object['object'])
|
2020-03-01 10:06:55 +00:00
|
|
|
if not objectDomain:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce object does not contain a ' +
|
|
|
|
'valid domain or port number: ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
str(post_json_object['object']))
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_blocked(base_dir, nickname, domain, objectNickname,
|
|
|
|
objectDomain):
|
2020-02-19 18:55:29 +00:00
|
|
|
if objectNickname and objectDomain:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce download blocked object: ' +
|
|
|
|
objectNickname + '@' + objectDomain)
|
2020-02-19 18:55:29 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce download blocked object: ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
str(post_json_object['object']))
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-03-14 19:46:46 +00:00
|
|
|
if debug:
|
|
|
|
print('Downloading Announce content for ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object'])
|
2020-04-04 10:05:27 +00:00
|
|
|
announcedJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
get_json(signing_priv_key_pem, session,
|
|
|
|
post_json_object['object'],
|
|
|
|
asHeader, None, debug, project_version,
|
|
|
|
http_prefix, domain)
|
2020-01-19 20:19:56 +00:00
|
|
|
|
2019-09-28 16:10:45 +00:00
|
|
|
if not announcedJson:
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-12-04 09:44:41 +00:00
|
|
|
|
|
|
|
if not isinstance(announcedJson, dict):
|
2020-04-04 10:05:27 +00:00
|
|
|
print('WARN: announce json is not a dict - ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object['object'])
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
if not announcedJson.get('id'):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-09-13 11:34:56 +00:00
|
|
|
if not announcedJson.get('type'):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-09-13 11:34:56 +00:00
|
|
|
if announcedJson['type'] == 'Video':
|
|
|
|
convertedJson = \
|
2021-12-29 21:55:09 +00:00
|
|
|
convert_video_to_note(base_dir, nickname, domain,
|
|
|
|
system_language,
|
|
|
|
announcedJson, blockedCache)
|
2021-09-13 11:34:56 +00:00
|
|
|
if convertedJson:
|
|
|
|
announcedJson = convertedJson
|
|
|
|
if '/statuses/' not in announcedJson['id']:
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-26 12:19:00 +00:00
|
|
|
if not has_users_path(announcedJson['id']):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
if announcedJson['type'] != 'Note' and \
|
2021-11-18 18:43:58 +00:00
|
|
|
announcedJson['type'] != 'Page' and \
|
2020-04-04 10:05:27 +00:00
|
|
|
announcedJson['type'] != 'Article':
|
2021-01-30 11:47:09 +00:00
|
|
|
# You can only announce Note or Article types
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-02-05 14:57:10 +00:00
|
|
|
if not announcedJson.get('content'):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-12-21 20:38:31 +00:00
|
|
|
if not announcedJson.get('published'):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-26 12:31:47 +00:00
|
|
|
if not valid_post_date(announcedJson['published'], 90, debug):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-29 21:55:09 +00:00
|
|
|
if not understood_post_language(base_dir, nickname, domain,
|
|
|
|
announcedJson, system_language,
|
|
|
|
http_prefix, domain_full,
|
|
|
|
person_cache):
|
2021-07-18 19:35:34 +00:00
|
|
|
return None
|
2021-01-30 11:47:09 +00:00
|
|
|
# Check the content of the announce
|
|
|
|
contentStr = announcedJson['content']
|
2021-12-27 21:42:08 +00:00
|
|
|
if dangerous_markup(contentStr, allow_local_network_access):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-01-30 11:59:26 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_filtered(base_dir, nickname, domain, contentStr):
|
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-01-30 11:59:26 +00:00
|
|
|
|
2021-12-26 19:15:36 +00:00
|
|
|
if invalid_ciphertext(contentStr):
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-11-22 12:10:23 +00:00
|
|
|
print('WARN: Invalid ciphertext within announce ' +
|
|
|
|
str(announcedJson))
|
|
|
|
return None
|
|
|
|
|
2020-05-17 09:44:42 +00:00
|
|
|
# remove any long words
|
2021-12-29 21:55:09 +00:00
|
|
|
contentStr = remove_long_words(contentStr, 40, [])
|
2020-01-19 20:19:56 +00:00
|
|
|
|
2021-07-10 09:38:59 +00:00
|
|
|
# Prevent the same word from being repeated many times
|
2021-12-29 21:55:09 +00:00
|
|
|
contentStr = limit_repeated_words(contentStr, 6)
|
2021-07-10 09:38:59 +00:00
|
|
|
|
2020-06-14 13:25:38 +00:00
|
|
|
# remove text formatting, such as bold/italics
|
2021-12-29 21:55:09 +00:00
|
|
|
contentStr = remove_text_formatting(contentStr)
|
2021-01-30 11:59:26 +00:00
|
|
|
|
|
|
|
# set the content after santitization
|
|
|
|
announcedJson['content'] = contentStr
|
2020-06-14 13:25:38 +00:00
|
|
|
|
2019-09-28 16:10:45 +00:00
|
|
|
# wrap in create to be consistent with other posts
|
2020-04-04 10:05:27 +00:00
|
|
|
announcedJson = \
|
2021-12-28 19:33:29 +00:00
|
|
|
outbox_message_create_wrap(http_prefix,
|
|
|
|
actorNickname, actorDomain, actorPort,
|
|
|
|
announcedJson)
|
2020-04-04 10:05:27 +00:00
|
|
|
if announcedJson['type'] != 'Create':
|
2021-01-30 11:47:09 +00:00
|
|
|
# Create wrap failed
|
2021-12-29 21:55:09 +00:00
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
|
2021-12-25 22:09:19 +00:00
|
|
|
# labelAccusatoryPost(post_json_object, translate)
|
2019-09-28 16:10:45 +00:00
|
|
|
# set the id to the original status
|
2021-12-25 22:09:19 +00:00
|
|
|
announcedJson['id'] = post_json_object['object']
|
|
|
|
announcedJson['object']['id'] = post_json_object['object']
|
2019-09-28 16:10:45 +00:00
|
|
|
# check that the repeat isn't for a blocked account
|
2020-04-04 10:05:27 +00:00
|
|
|
attributedNickname = \
|
2021-12-27 22:19:18 +00:00
|
|
|
get_nickname_from_actor(announcedJson['object']['id'])
|
2020-04-04 10:05:27 +00:00
|
|
|
attributedDomain, attributedPort = \
|
2021-12-27 19:05:25 +00:00
|
|
|
get_domain_from_actor(announcedJson['object']['id'])
|
2019-09-28 16:10:45 +00:00
|
|
|
if attributedNickname and attributedDomain:
|
2021-12-26 12:45:03 +00:00
|
|
|
attributedDomain = \
|
|
|
|
get_full_domain(attributedDomain, attributedPort)
|
2021-12-29 21:55:09 +00:00
|
|
|
if is_blocked(base_dir, nickname, domain,
|
|
|
|
attributedNickname, attributedDomain):
|
|
|
|
_reject_announce(announceFilename,
|
|
|
|
base_dir, nickname, domain, post_id,
|
|
|
|
recent_posts_cache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-12-25 22:09:19 +00:00
|
|
|
post_json_object = announcedJson
|
2021-12-28 21:36:27 +00:00
|
|
|
replace_you_tube(post_json_object, yt_replace_domain, system_language)
|
|
|
|
replace_twitter(post_json_object, twitter_replacement_domain,
|
|
|
|
system_language)
|
2021-12-26 14:47:21 +00:00
|
|
|
if save_json(post_json_object, announceFilename):
|
2021-12-25 22:09:19 +00:00
|
|
|
return post_json_object
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-12-01 13:45:30 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def is_muted_conv(base_dir: str, nickname: str, domain: str, post_id: str,
|
|
|
|
conversationId: str) -> bool:
|
2020-08-27 17:40:09 +00:00
|
|
|
"""Returns true if the given post is muted
|
|
|
|
"""
|
2021-08-12 10:22:04 +00:00
|
|
|
if conversationId:
|
|
|
|
convMutedFilename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
acct_dir(base_dir, nickname, domain) + '/conversation/' + \
|
2021-08-12 10:22:04 +00:00
|
|
|
conversationId.replace('/', '#') + '.muted'
|
|
|
|
if os.path.isfile(convMutedFilename):
|
|
|
|
return True
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = locate_post(base_dir, nickname, domain, post_id)
|
|
|
|
if not post_filename:
|
2020-08-27 17:40:09 +00:00
|
|
|
return False
|
2021-12-26 23:41:34 +00:00
|
|
|
if os.path.isfile(post_filename + '.muted'):
|
2020-08-27 17:40:09 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_block_via_server(base_dir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
http_prefix: str, blockedUrl: str,
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2020-04-01 20:13:42 +00:00
|
|
|
"""Creates a block via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for send_block_via_server')
|
2020-04-01 20:13:42 +00:00
|
|
|
return 6
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
2021-12-26 10:19:59 +00:00
|
|
|
blockActor = local_actor_url(http_prefix, fromNickname, fromDomainFull)
|
2020-04-01 20:13:42 +00:00
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
ccUrl = blockActor + '/followers'
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
newBlockJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Block',
|
|
|
|
'actor': blockActor,
|
|
|
|
'object': blockedUrl,
|
|
|
|
'to': [toUrl],
|
|
|
|
'cc': [ccUrl]
|
|
|
|
}
|
|
|
|
|
2021-12-25 17:09:22 +00:00
|
|
|
handle = http_prefix + '://' + fromDomainFull + '/@' + fromNickname
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix,
|
|
|
|
cached_webfingers,
|
|
|
|
fromDomain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: block webfinger failed for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: block Webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
return 1
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
fromNickname,
|
|
|
|
fromDomain, postToBox, 72652)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: block no ' + postToBox + ' was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: block no actor was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 4
|
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
authHeader = create_basic_auth_header(fromNickname, password)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
postResult = post_json(http_prefix, fromDomainFull,
|
|
|
|
session, newBlockJson, [], inboxUrl,
|
|
|
|
headers, 30, True)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not postResult:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: block unable to post')
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST block success')
|
|
|
|
|
|
|
|
return newBlockJson
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_mute_via_server(base_dir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
http_prefix: str, mutedUrl: str,
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2021-03-20 21:20:41 +00:00
|
|
|
"""Creates a mute via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for send_mute_via_server')
|
2021-03-20 21:20:41 +00:00
|
|
|
return 6
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, fromNickname, fromDomainFull)
|
2021-12-26 17:21:37 +00:00
|
|
|
handle = replace_users_with_at(actor)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
newMuteJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Ignore',
|
|
|
|
'actor': actor,
|
2021-03-21 12:44:58 +00:00
|
|
|
'to': [actor],
|
2021-03-20 21:20:41 +00:00
|
|
|
'object': mutedUrl
|
|
|
|
}
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix,
|
|
|
|
cached_webfingers,
|
|
|
|
fromDomain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2021-03-20 21:20:41 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mute webfinger failed for ' + handle)
|
|
|
|
return 1
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: mute Webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
fromNickname,
|
|
|
|
fromDomain, postToBox, 72652)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mute no ' + postToBox + ' was found for ' + handle)
|
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mute no actor was found for ' + handle)
|
|
|
|
return 4
|
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
authHeader = create_basic_auth_header(fromNickname, password)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
postResult = post_json(http_prefix, fromDomainFull,
|
|
|
|
session, newMuteJson, [], inboxUrl,
|
|
|
|
headers, 3, True)
|
2021-03-21 13:17:59 +00:00
|
|
|
if postResult is None:
|
2021-03-20 21:20:41 +00:00
|
|
|
print('WARN: mute unable to post')
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST mute success')
|
|
|
|
|
|
|
|
return newMuteJson
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_undo_mute_via_server(base_dir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
http_prefix: str, mutedUrl: str,
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2021-03-20 21:20:41 +00:00
|
|
|
"""Undoes a mute via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for send_undo_mute_via_server')
|
2021-03-20 21:20:41 +00:00
|
|
|
return 6
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, fromNickname, fromDomainFull)
|
2021-12-26 17:21:37 +00:00
|
|
|
handle = replace_users_with_at(actor)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
undoMuteJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Undo',
|
|
|
|
'actor': actor,
|
2021-03-21 12:44:58 +00:00
|
|
|
'to': [actor],
|
2021-03-20 21:20:41 +00:00
|
|
|
'object': {
|
|
|
|
'type': 'Ignore',
|
|
|
|
'actor': actor,
|
2021-03-21 12:44:58 +00:00
|
|
|
'to': [actor],
|
2021-03-20 21:20:41 +00:00
|
|
|
'object': mutedUrl
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix,
|
|
|
|
cached_webfingers,
|
|
|
|
fromDomain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2021-03-20 21:20:41 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo mute webfinger failed for ' + handle)
|
|
|
|
return 1
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: undo mute Webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
fromNickname,
|
|
|
|
fromDomain, postToBox, 72652)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo mute no ' + postToBox +
|
|
|
|
' was found for ' + handle)
|
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo mute no actor was found for ' + handle)
|
|
|
|
return 4
|
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
authHeader = create_basic_auth_header(fromNickname, password)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
postResult = post_json(http_prefix, fromDomainFull,
|
|
|
|
session, undoMuteJson, [], inboxUrl,
|
|
|
|
headers, 3, True)
|
2021-03-21 13:17:59 +00:00
|
|
|
if postResult is None:
|
2021-03-20 21:20:41 +00:00
|
|
|
print('WARN: undo mute unable to post')
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST undo mute success')
|
|
|
|
|
|
|
|
return undoMuteJson
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def send_undo_block_via_server(base_dir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
http_prefix: str, blockedUrl: str,
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
debug: bool, project_version: str,
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2020-04-01 20:13:42 +00:00
|
|
|
"""Creates a block via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for send_block_via_server')
|
2020-04-01 20:13:42 +00:00
|
|
|
return 6
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
2021-12-26 10:19:59 +00:00
|
|
|
blockActor = local_actor_url(http_prefix, fromNickname, fromDomainFull)
|
2020-04-01 20:13:42 +00:00
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
ccUrl = blockActor + '/followers'
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
newBlockJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Undo',
|
|
|
|
'actor': blockActor,
|
|
|
|
'object': {
|
|
|
|
'type': 'Block',
|
|
|
|
'actor': blockActor,
|
|
|
|
'object': blockedUrl,
|
|
|
|
'to': [toUrl],
|
|
|
|
'cc': [ccUrl]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-12-25 17:09:22 +00:00
|
|
|
handle = http_prefix + '://' + fromDomainFull + '/@' + fromNickname
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2021-12-29 21:55:09 +00:00
|
|
|
wfRequest = webfinger_handle(session, handle, http_prefix,
|
|
|
|
cached_webfingers,
|
|
|
|
fromDomain, project_version, debug, False,
|
|
|
|
signing_priv_key_pem)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: unblock webfinger failed for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: unblock webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
return 1
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
2021-12-29 21:55:09 +00:00
|
|
|
displayName, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
originDomain,
|
|
|
|
base_dir, session, wfRequest,
|
|
|
|
person_cache,
|
|
|
|
project_version, http_prefix,
|
|
|
|
fromNickname,
|
|
|
|
fromDomain, postToBox, 53892)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: unblock no ' + postToBox +
|
|
|
|
' was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: unblock no actor was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 4
|
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
authHeader = create_basic_auth_header(fromNickname, password)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-12-29 21:55:09 +00:00
|
|
|
postResult = post_json(http_prefix, fromDomainFull,
|
|
|
|
session, newBlockJson, [], inboxUrl,
|
|
|
|
headers, 30, True)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not postResult:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: unblock unable to post')
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: c2s POST unblock success')
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
return newBlockJson
|
2020-11-09 19:41:01 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def post_is_muted(base_dir: str, nickname: str, domain: str,
|
|
|
|
post_json_object: {}, messageId: str) -> bool:
|
2020-11-09 19:41:01 +00:00
|
|
|
""" Returns true if the given post is muted
|
|
|
|
"""
|
2021-12-29 21:55:09 +00:00
|
|
|
is_muted = None
|
2021-12-25 22:09:19 +00:00
|
|
|
if 'muted' in post_json_object:
|
2021-12-29 21:55:09 +00:00
|
|
|
is_muted = post_json_object['muted']
|
|
|
|
if is_muted is True or is_muted is False:
|
|
|
|
return is_muted
|
2021-09-27 22:06:37 +00:00
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
is_muted = False
|
2021-12-26 12:02:29 +00:00
|
|
|
postDir = acct_dir(base_dir, nickname, domain)
|
2020-11-09 19:41:01 +00:00
|
|
|
muteFilename = \
|
|
|
|
postDir + '/inbox/' + messageId.replace('/', '#') + '.json.muted'
|
|
|
|
if os.path.isfile(muteFilename):
|
2021-12-29 21:55:09 +00:00
|
|
|
is_muted = True
|
2021-09-27 22:06:37 +00:00
|
|
|
else:
|
|
|
|
muteFilename = \
|
|
|
|
postDir + '/outbox/' + messageId.replace('/', '#') + '.json.muted'
|
|
|
|
if os.path.isfile(muteFilename):
|
2021-12-29 21:55:09 +00:00
|
|
|
is_muted = True
|
2021-09-27 22:06:37 +00:00
|
|
|
else:
|
|
|
|
muteFilename = \
|
2021-12-25 16:17:53 +00:00
|
|
|
base_dir + '/accounts/cache/announce/' + nickname + \
|
2021-09-27 22:06:37 +00:00
|
|
|
'/' + messageId.replace('/', '#') + '.json.muted'
|
|
|
|
if os.path.isfile(muteFilename):
|
2021-12-29 21:55:09 +00:00
|
|
|
is_muted = True
|
|
|
|
return is_muted
|
2021-03-18 11:03:39 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def c2s_box_json(base_dir: str, session,
|
|
|
|
nickname: str, password: str,
|
|
|
|
domain: str, port: int,
|
|
|
|
http_prefix: str,
|
|
|
|
boxName: str, pageNumber: int,
|
|
|
|
debug: bool, signing_priv_key_pem: str) -> {}:
|
2021-03-18 11:03:39 +00:00
|
|
|
"""C2S Authenticated GET of posts for a timeline
|
|
|
|
"""
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: No session for c2s_box_json')
|
2021-03-18 11:03:39 +00:00
|
|
|
return None
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:19:59 +00:00
|
|
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
2021-03-18 11:03:39 +00:00
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
authHeader = create_basic_auth_header(nickname, password)
|
2021-03-18 11:03:39 +00:00
|
|
|
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
headers = {
|
|
|
|
'host': domain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader,
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
}
|
|
|
|
|
|
|
|
# GET json
|
|
|
|
url = actor + '/' + boxName + '?page=' + str(pageNumber)
|
2021-12-29 21:55:09 +00:00
|
|
|
boxJson = get_json(signing_priv_key_pem, session, url, headers, None,
|
|
|
|
debug, __version__, http_prefix, None)
|
2021-03-18 11:03:39 +00:00
|
|
|
|
|
|
|
if boxJson is not None and debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('DEBUG: GET c2s_box_json success')
|
2021-03-18 11:03:39 +00:00
|
|
|
|
|
|
|
return boxJson
|
2021-10-14 15:12:35 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def seconds_between_published(published1: str, published2: str) -> int:
|
2021-10-14 15:12:35 +00:00
|
|
|
"""Returns the number of seconds between two published dates
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
published1Time = \
|
|
|
|
datetime.datetime.strptime(published1, '%Y-%m-%dT%H:%M:%SZ')
|
|
|
|
except BaseException:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: seconds_between_published unable to parse date 1 ' +
|
2021-10-29 18:48:15 +00:00
|
|
|
str(published1))
|
2021-10-14 15:12:35 +00:00
|
|
|
return -1
|
|
|
|
try:
|
|
|
|
published2Time = \
|
|
|
|
datetime.datetime.strptime(published2, '%Y-%m-%dT%H:%M:%SZ')
|
|
|
|
except BaseException:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: seconds_between_published unable to parse date 2 ' +
|
2021-10-29 18:48:15 +00:00
|
|
|
str(published2))
|
2021-10-14 15:12:35 +00:00
|
|
|
return -1
|
|
|
|
return (published2Time - published1Time).seconds
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def edited_post_filename(base_dir: str, nickname: str, domain: str,
|
|
|
|
post_json_object: {}, debug: bool,
|
|
|
|
maxTimeDiffSeconds: int) -> str:
|
2021-10-14 15:12:35 +00:00
|
|
|
"""Returns the filename of the edited post
|
|
|
|
"""
|
2021-12-26 10:57:03 +00:00
|
|
|
if not has_object_dict(post_json_object):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object.get('type'):
|
2021-10-18 16:50:27 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('type'):
|
2021-10-18 16:50:27 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('published'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('id'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('content'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not post_json_object['object'].get('attributedTo'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
if not isinstance(post_json_object['object']['attributedTo'], str):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-25 22:09:19 +00:00
|
|
|
actor = post_json_object['object']['attributedTo']
|
2021-10-18 15:20:22 +00:00
|
|
|
actorFilename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
acct_dir(base_dir, nickname, domain) + '/lastpost/' + \
|
2021-10-18 15:20:22 +00:00
|
|
|
actor.replace('/', '#')
|
|
|
|
if not os.path.isfile(actorFilename):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-27 11:20:57 +00:00
|
|
|
post_id = remove_id_ending(post_json_object['object']['id'])
|
2021-12-26 19:47:06 +00:00
|
|
|
lastpost_id = None
|
2021-10-18 15:20:22 +00:00
|
|
|
try:
|
|
|
|
with open(actorFilename, 'r') as fp:
|
2021-12-26 19:47:06 +00:00
|
|
|
lastpost_id = fp.read()
|
2021-11-25 22:22:54 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: edited_post_filename unable to read ' + actorFilename)
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-26 19:47:06 +00:00
|
|
|
if not lastpost_id:
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-26 19:47:06 +00:00
|
|
|
if lastpost_id == post_id:
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-26 23:41:34 +00:00
|
|
|
lastpost_filename = \
|
2021-12-26 20:36:08 +00:00
|
|
|
locate_post(base_dir, nickname, domain, lastpost_id, False)
|
2021-12-26 23:41:34 +00:00
|
|
|
if not lastpost_filename:
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
lastpost_json = load_json(lastpost_filename, 0)
|
|
|
|
if not lastpost_json:
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if not lastpost_json.get('type'):
|
2021-10-18 16:50:27 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if lastpost_json['type'] != post_json_object['type']:
|
2021-10-18 16:50:27 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if not lastpost_json['object'].get('type'):
|
2021-10-18 16:50:27 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if lastpost_json['object']['type'] != post_json_object['object']['type']:
|
2021-10-18 16:50:27 +00:00
|
|
|
return
|
2021-12-29 21:55:09 +00:00
|
|
|
if not lastpost_json['object'].get('published'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if not lastpost_json['object'].get('id'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if not lastpost_json['object'].get('content'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if not lastpost_json['object'].get('attributedTo'):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-29 21:55:09 +00:00
|
|
|
if not isinstance(lastpost_json['object']['attributedTo'], str):
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
|
|
|
timeDiffSeconds = \
|
2021-12-29 21:55:09 +00:00
|
|
|
seconds_between_published(lastpost_json['object']['published'],
|
|
|
|
post_json_object['object']['published'])
|
2021-10-14 15:12:35 +00:00
|
|
|
if timeDiffSeconds > maxTimeDiffSeconds:
|
|
|
|
return ''
|
|
|
|
if debug:
|
2021-12-26 19:47:06 +00:00
|
|
|
print(post_id + ' might be an edit of ' + lastpost_id)
|
2021-12-29 21:55:09 +00:00
|
|
|
if words_similarity(lastpost_json['object']['content'],
|
|
|
|
post_json_object['object']['content'], 10) < 70:
|
2021-10-14 15:12:35 +00:00
|
|
|
return ''
|
2021-12-26 19:47:06 +00:00
|
|
|
print(post_id + ' is an edit of ' + lastpost_id)
|
2021-12-26 23:41:34 +00:00
|
|
|
return lastpost_filename
|
2021-10-17 10:02:55 +00:00
|
|
|
|
|
|
|
|
2021-12-28 18:13:52 +00:00
|
|
|
def get_original_post_from_announce_url(announceUrl: str, base_dir: str,
|
|
|
|
nickname: str,
|
|
|
|
domain: str) -> (str, str, str):
|
2021-10-17 11:35:47 +00:00
|
|
|
"""From the url of an announce this returns the actor, url and
|
|
|
|
filename (if available) of the original post being announced
|
2021-10-17 10:02:55 +00:00
|
|
|
"""
|
2021-12-26 23:41:34 +00:00
|
|
|
post_filename = locate_post(base_dir, nickname, domain, announceUrl)
|
|
|
|
if not post_filename:
|
2021-10-17 11:35:47 +00:00
|
|
|
return None, None, None
|
2021-12-26 23:41:34 +00:00
|
|
|
announcePostJson = load_json(post_filename, 0, 1)
|
2021-10-17 10:02:55 +00:00
|
|
|
if not announcePostJson:
|
2021-12-26 23:41:34 +00:00
|
|
|
return None, None, post_filename
|
2021-10-17 10:02:55 +00:00
|
|
|
if not announcePostJson.get('type'):
|
2021-12-26 23:41:34 +00:00
|
|
|
return None, None, post_filename
|
2021-10-17 10:02:55 +00:00
|
|
|
if announcePostJson['type'] != 'Announce':
|
2021-12-26 23:41:34 +00:00
|
|
|
return None, None, post_filename
|
2021-10-17 10:02:55 +00:00
|
|
|
if not announcePostJson.get('object'):
|
2021-12-26 23:41:34 +00:00
|
|
|
return None, None, post_filename
|
2021-10-17 10:02:55 +00:00
|
|
|
if not isinstance(announcePostJson['object'], str):
|
2021-12-26 23:41:34 +00:00
|
|
|
return None, None, post_filename
|
2021-10-17 12:33:02 +00:00
|
|
|
actor = url = None
|
2021-10-17 10:02:55 +00:00
|
|
|
# do we have the original post?
|
|
|
|
origPostId = announcePostJson['object']
|
2021-12-26 20:36:08 +00:00
|
|
|
origFilename = locate_post(base_dir, nickname, domain, origPostId)
|
2021-10-17 10:02:55 +00:00
|
|
|
if origFilename:
|
|
|
|
# we have the original post
|
2021-12-26 15:13:34 +00:00
|
|
|
origPostJson = load_json(origFilename, 0, 1)
|
2021-10-17 10:02:55 +00:00
|
|
|
if origPostJson:
|
2021-12-26 10:57:03 +00:00
|
|
|
if has_object_dict(origPostJson):
|
2021-10-17 10:02:55 +00:00
|
|
|
if origPostJson['object'].get('attributedTo'):
|
2021-10-17 12:33:02 +00:00
|
|
|
if isinstance(origPostJson['object']['attributedTo'], str):
|
|
|
|
actor = origPostJson['object']['attributedTo']
|
|
|
|
url = origPostId
|
2021-10-17 10:02:55 +00:00
|
|
|
elif origPostJson['object'].get('actor'):
|
|
|
|
actor = origPostJson['actor']
|
2021-10-17 12:33:02 +00:00
|
|
|
url = origPostId
|
2021-10-17 10:02:55 +00:00
|
|
|
else:
|
|
|
|
# we don't have the original post
|
2021-12-26 12:19:00 +00:00
|
|
|
if has_users_path(origPostId):
|
2021-10-17 10:02:55 +00:00
|
|
|
# get the actor from the original post url
|
2021-12-27 22:19:18 +00:00
|
|
|
origNick = get_nickname_from_actor(origPostId)
|
2021-12-27 19:05:25 +00:00
|
|
|
origDomain, origPort = get_domain_from_actor(origPostId)
|
2021-10-17 10:02:55 +00:00
|
|
|
if origNick and origDomain:
|
|
|
|
actor = \
|
|
|
|
origPostId.split('/' + origNick + '/')[0] + \
|
|
|
|
'/' + origNick
|
|
|
|
url = origPostId
|
2021-10-17 16:49:34 +00:00
|
|
|
|
2021-10-17 11:35:47 +00:00
|
|
|
return actor, url, origFilename
|