2020-04-04 10:05:27 +00:00
|
|
|
__filename__ = "posts.py"
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
__license__ = "AGPL3+"
|
2021-01-26 10:07:42 +00:00
|
|
|
__version__ = "1.2.0"
|
2020-04-04 10:05:27 +00:00
|
|
|
__maintainer__ = "Bob Mottram"
|
2021-09-10 16:14:50 +00:00
|
|
|
__email__ = "bob@libreserver.org"
|
2020-04-04 10:05:27 +00:00
|
|
|
__status__ = "Production"
|
2021-06-15 15:08:12 +00:00
|
|
|
__module_group__ = "ActivityPub"
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-06-28 18:55:29 +00:00
|
|
|
import json
|
|
|
|
import html
|
2019-06-29 10:08:59 +00:00
|
|
|
import datetime
|
2019-06-30 15:03:26 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
2019-07-01 11:48:54 +00:00
|
|
|
import time
|
2020-09-25 12:33:28 +00:00
|
|
|
import random
|
2020-06-23 21:39:19 +00:00
|
|
|
from socket import error as SocketError
|
2019-10-10 13:48:05 +00:00
|
|
|
from time import gmtime, strftime
|
2019-07-14 09:17:50 +00:00
|
|
|
from collections import OrderedDict
|
2019-06-30 16:36:58 +00:00
|
|
|
from threads import threadWithTrace
|
2019-06-30 15:03:26 +00:00
|
|
|
from cache import storePersonInCache
|
|
|
|
from cache import getPersonFromCache
|
2019-08-20 11:51:29 +00:00
|
|
|
from cache import expirePersonCache
|
2019-06-29 10:08:59 +00:00
|
|
|
from pprint import pprint
|
2019-07-03 10:33:55 +00:00
|
|
|
from session import createSession
|
2019-06-28 18:55:29 +00:00
|
|
|
from session import getJson
|
2020-04-04 10:05:27 +00:00
|
|
|
from session import postJson
|
2019-08-17 10:15:01 +00:00
|
|
|
from session import postJsonString
|
2019-07-16 14:23:06 +00:00
|
|
|
from session import postImage
|
2019-06-30 22:56:37 +00:00
|
|
|
from webfinger import webfingerHandle
|
2019-07-01 09:31:02 +00:00
|
|
|
from httpsig import createSignedHeader
|
2021-02-10 11:24:34 +00:00
|
|
|
from siteactive import siteIsActive
|
2021-07-19 08:46:21 +00:00
|
|
|
from languages import understoodPostLanguage
|
2021-08-22 18:38:02 +00:00
|
|
|
from utils import replaceUsersWithAt
|
2021-07-31 13:19:45 +00:00
|
|
|
from utils import hasGroupType
|
2021-07-20 13:33:27 +00:00
|
|
|
from utils import getBaseContentFromPost
|
2021-06-26 14:21:24 +00:00
|
|
|
from utils import removeDomainPort
|
|
|
|
from utils import getPortFromDomain
|
2021-06-22 15:45:59 +00:00
|
|
|
from utils import hasObjectDict
|
2021-03-05 19:00:37 +00:00
|
|
|
from utils import rejectPostId
|
2021-02-11 11:02:05 +00:00
|
|
|
from utils import removeInvalidChars
|
2021-01-24 21:35:26 +00:00
|
|
|
from utils import fileLastModified
|
2021-01-08 23:28:14 +00:00
|
|
|
from utils import isPublicPost
|
2020-12-23 10:57:44 +00:00
|
|
|
from utils import hasUsersPath
|
2020-12-21 20:38:31 +00:00
|
|
|
from utils import validPostDate
|
2020-12-16 11:04:46 +00:00
|
|
|
from utils import getFullDomain
|
2020-09-25 14:14:59 +00:00
|
|
|
from utils import getFollowersList
|
2020-09-25 10:12:36 +00:00
|
|
|
from utils import isEvil
|
2019-07-02 09:25:29 +00:00
|
|
|
from utils import getStatusNumber
|
2019-07-04 16:24:23 +00:00
|
|
|
from utils import createPersonDir
|
2019-07-02 10:39:55 +00:00
|
|
|
from utils import urlPermitted
|
2019-07-09 14:20:23 +00:00
|
|
|
from utils import getNicknameFromActor
|
|
|
|
from utils import getDomainFromActor
|
2019-07-14 16:37:01 +00:00
|
|
|
from utils import deletePost
|
2019-07-27 22:48:34 +00:00
|
|
|
from utils import validNickname
|
2019-10-02 14:40:39 +00:00
|
|
|
from utils import locatePost
|
2019-10-22 11:55:06 +00:00
|
|
|
from utils import loadJson
|
|
|
|
from utils import saveJson
|
2020-10-06 08:58:44 +00:00
|
|
|
from utils import getConfigParam
|
2020-10-08 19:47:23 +00:00
|
|
|
from utils import locateNewsVotes
|
2020-10-09 12:15:20 +00:00
|
|
|
from utils import locateNewsArrival
|
2020-10-08 19:47:23 +00:00
|
|
|
from utils import votesOnNewswireItem
|
2020-10-25 12:47:16 +00:00
|
|
|
from utils import removeHtml
|
2021-01-31 11:05:17 +00:00
|
|
|
from utils import dangerousMarkup
|
2021-07-13 21:59:53 +00:00
|
|
|
from utils import acctDir
|
2021-08-14 11:13:39 +00:00
|
|
|
from utils import localActorUrl
|
2019-08-30 18:45:14 +00:00
|
|
|
from media import attachMedia
|
2020-01-15 22:31:04 +00:00
|
|
|
from media import replaceYouTube
|
2021-07-10 09:38:59 +00:00
|
|
|
from content import limitRepeatedWords
|
2020-12-13 20:07:45 +00:00
|
|
|
from content import tagExists
|
2020-05-17 09:27:51 +00:00
|
|
|
from content import removeLongWords
|
2019-08-09 09:09:21 +00:00
|
|
|
from content import addHtmlTags
|
2019-09-29 17:42:51 +00:00
|
|
|
from content import replaceEmojiFromTags
|
2020-06-14 13:25:38 +00:00
|
|
|
from content import removeTextFormatting
|
2019-07-16 10:19:04 +00:00
|
|
|
from auth import createBasicAuthHeader
|
2019-09-28 16:31:03 +00:00
|
|
|
from blocking import isBlocked
|
2020-12-16 16:43:51 +00:00
|
|
|
from blocking import isBlockedDomain
|
2020-02-05 14:57:10 +00:00
|
|
|
from filters import isFiltered
|
2020-05-03 12:52:13 +00:00
|
|
|
from git import convertPostToPatch
|
2021-01-04 19:02:24 +00:00
|
|
|
from linked_data_sig import generateJsonSignature
|
2020-06-29 19:15:51 +00:00
|
|
|
from petnames import resolvePetnames
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def isModerator(baseDir: str, nickname: str) -> bool:
|
2019-08-12 13:22:17 +00:00
|
|
|
"""Returns true if the given nickname is a moderator
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
moderatorsFile = baseDir + '/accounts/moderators.txt'
|
2019-08-12 13:22:17 +00:00
|
|
|
|
|
|
|
if not os.path.isfile(moderatorsFile):
|
2020-10-10 16:10:32 +00:00
|
|
|
adminName = getConfigParam(baseDir, 'admin')
|
|
|
|
if not adminName:
|
|
|
|
return False
|
|
|
|
if adminName == nickname:
|
2019-08-12 13:22:17 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(moderatorsFile, 'r') as f:
|
2020-04-04 10:05:27 +00:00
|
|
|
lines = f.readlines()
|
|
|
|
if len(lines) == 0:
|
2020-10-10 16:10:32 +00:00
|
|
|
adminName = getConfigParam(baseDir, 'admin')
|
|
|
|
if not adminName:
|
|
|
|
return False
|
|
|
|
if adminName == nickname:
|
2019-08-12 13:22:17 +00:00
|
|
|
return True
|
|
|
|
for moderator in lines:
|
2020-05-22 11:32:38 +00:00
|
|
|
moderator = moderator.strip('\n').strip('\r')
|
2020-04-04 10:05:27 +00:00
|
|
|
if moderator == nickname:
|
2019-08-12 13:22:17 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def noOfFollowersOnDomain(baseDir: str, handle: str,
|
2019-07-06 17:00:22 +00:00
|
|
|
domain: str, followFile='followers.txt') -> int:
|
2019-07-05 14:39:24 +00:00
|
|
|
"""Returns the number of followers of the given handle from the given domain
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
filename = baseDir + '/accounts/' + handle + '/' + followFile
|
2019-07-05 14:39:24 +00:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
ctr = 0
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(filename, 'r') as followersFilename:
|
2019-07-05 14:39:24 +00:00
|
|
|
for followerHandle in followersFilename:
|
|
|
|
if '@' in followerHandle:
|
2020-05-22 11:32:38 +00:00
|
|
|
followerDomain = followerHandle.split('@')[1]
|
|
|
|
followerDomain = followerDomain.replace('\n', '')
|
|
|
|
followerDomain = followerDomain.replace('\r', '')
|
2020-04-04 10:05:27 +00:00
|
|
|
if domain == followerDomain:
|
|
|
|
ctr += 1
|
2019-07-05 14:39:24 +00:00
|
|
|
return ctr
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-09-01 17:26:40 +00:00
|
|
|
def _getLocalPrivateKey(baseDir: str, nickname: str, domain: str) -> str:
|
|
|
|
"""Returns the private key for a local account
|
|
|
|
"""
|
2021-09-02 10:29:35 +00:00
|
|
|
if not domain or not nickname:
|
|
|
|
return None
|
2021-09-01 17:26:40 +00:00
|
|
|
handle = nickname + '@' + domain
|
|
|
|
keyFilename = baseDir + '/keys/private/' + handle.lower() + '.key'
|
|
|
|
if not os.path.isfile(keyFilename):
|
|
|
|
return None
|
|
|
|
with open(keyFilename, 'r') as pemFile:
|
|
|
|
return pemFile.read()
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def getInstanceActorKey(baseDir: str, domain: str) -> str:
|
|
|
|
"""Returns the private key for the instance actor used for
|
|
|
|
signing GET posts
|
|
|
|
"""
|
|
|
|
return _getLocalPrivateKey(baseDir, 'inbox', domain)
|
|
|
|
|
|
|
|
|
|
|
|
def _getLocalPublicKey(baseDir: str, nickname: str, domain: str) -> str:
|
|
|
|
"""Returns the public key for a local account
|
|
|
|
"""
|
2021-09-02 10:29:35 +00:00
|
|
|
if not domain or not nickname:
|
|
|
|
return None
|
2021-09-01 17:26:40 +00:00
|
|
|
handle = nickname + '@' + domain
|
|
|
|
keyFilename = baseDir + '/keys/public/' + handle.lower() + '.key'
|
|
|
|
if not os.path.isfile(keyFilename):
|
|
|
|
return None
|
|
|
|
with open(keyFilename, 'r') as pemFile:
|
|
|
|
return pemFile.read()
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-06-20 11:28:35 +00:00
|
|
|
def _getPersonKey(nickname: str, domain: str, baseDir: str,
|
|
|
|
keyType: str = 'public', debug: bool = False):
|
2019-06-30 15:03:26 +00:00
|
|
|
"""Returns the public or private key of a person
|
|
|
|
"""
|
2021-09-01 17:26:40 +00:00
|
|
|
if keyType == 'private':
|
|
|
|
keyPem = _getLocalPrivateKey(baseDir, nickname, domain)
|
|
|
|
else:
|
|
|
|
keyPem = _getLocalPublicKey(baseDir, nickname, domain)
|
|
|
|
if not keyPem:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2021-09-01 17:26:40 +00:00
|
|
|
print('DEBUG: ' + keyType + ' key file not found')
|
2019-06-30 15:03:26 +00:00
|
|
|
return ''
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(keyPem) < 20:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: private key was too short: ' + keyPem)
|
2019-06-30 15:03:26 +00:00
|
|
|
return ''
|
|
|
|
return keyPem
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _cleanHtml(rawHtml: str) -> str:
|
2020-04-04 10:05:27 +00:00
|
|
|
# text=BeautifulSoup(rawHtml, 'html.parser').get_text()
|
|
|
|
text = rawHtml
|
2019-06-28 18:55:29 +00:00
|
|
|
return html.unescape(text)
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-06-20 11:28:35 +00:00
|
|
|
def getUserUrl(wfRequest: {}, sourceId: int = 0, debug: bool = False) -> str:
|
2020-12-18 17:08:35 +00:00
|
|
|
"""Gets the actor url from a webfinger request
|
|
|
|
"""
|
2020-12-18 17:02:26 +00:00
|
|
|
if not wfRequest.get('links'):
|
2020-12-30 10:29:14 +00:00
|
|
|
if sourceId == 72367:
|
2021-01-10 10:13:10 +00:00
|
|
|
print('getUserUrl ' + str(sourceId) +
|
|
|
|
' failed to get display name for webfinger ' +
|
2020-12-30 10:29:14 +00:00
|
|
|
str(wfRequest))
|
|
|
|
else:
|
|
|
|
print('getUserUrl webfinger activity+json contains no links ' +
|
|
|
|
str(sourceId) + ' ' + str(wfRequest))
|
2020-12-18 17:02:26 +00:00
|
|
|
return None
|
|
|
|
for link in wfRequest['links']:
|
|
|
|
if not (link.get('type') and link.get('href')):
|
|
|
|
continue
|
|
|
|
if link['type'] != 'application/activity+json':
|
|
|
|
continue
|
2021-01-24 22:20:23 +00:00
|
|
|
if '/@' not in link['href']:
|
2021-03-14 20:41:37 +00:00
|
|
|
if debug and not hasUsersPath(link['href']):
|
2021-01-24 22:20:23 +00:00
|
|
|
print('getUserUrl webfinger activity+json ' +
|
|
|
|
'contains single user instance actor ' +
|
|
|
|
str(sourceId) + ' ' + str(link))
|
2021-01-24 22:25:08 +00:00
|
|
|
else:
|
|
|
|
return link['href'].replace('/@', '/users/')
|
2020-12-18 17:02:26 +00:00
|
|
|
return link['href']
|
2019-06-28 18:55:29 +00:00
|
|
|
return None
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
def parseUserFeed(signingPrivateKeyPem: str,
|
|
|
|
session, feedUrl: str, asHeader: {},
|
2020-04-04 10:05:27 +00:00
|
|
|
projectVersion: str, httpPrefix: str,
|
2021-08-01 14:47:31 +00:00
|
|
|
domain: str, debug: bool, depth: int = 0) -> []:
|
2021-01-08 21:43:04 +00:00
|
|
|
if depth > 10:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Maximum search depth reached')
|
2020-07-08 12:28:41 +00:00
|
|
|
return None
|
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting user feed for ' + feedUrl)
|
|
|
|
print('User feed header ' + str(asHeader))
|
2021-09-11 12:10:44 +00:00
|
|
|
print('httpPrefix ' + str(httpPrefix))
|
2021-09-11 13:10:50 +00:00
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
feedJson = getJson(signingPrivateKeyPem, session, feedUrl, asHeader, None,
|
2021-09-11 10:03:07 +00:00
|
|
|
debug, projectVersion, httpPrefix, domain)
|
2021-09-11 13:10:50 +00:00
|
|
|
if not feedJson:
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
acceptStr = 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
if asHeader['Accept'] != acceptStr:
|
|
|
|
asHeader = {
|
|
|
|
'Accept': acceptStr
|
|
|
|
}
|
|
|
|
feedJson = getJson(signingPrivateKeyPem, session, feedUrl,
|
|
|
|
asHeader, None, debug, projectVersion,
|
|
|
|
httpPrefix, domain)
|
2019-07-04 17:31:41 +00:00
|
|
|
if not feedJson:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('No user feed was returned')
|
2020-07-08 12:28:41 +00:00
|
|
|
return None
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-08-01 14:11:20 +00:00
|
|
|
if debug:
|
|
|
|
print('User feed:')
|
|
|
|
pprint(feedJson)
|
|
|
|
|
2019-06-29 10:08:59 +00:00
|
|
|
if 'orderedItems' in feedJson:
|
2021-08-01 14:47:31 +00:00
|
|
|
return feedJson['orderedItems']
|
2021-09-11 12:10:44 +00:00
|
|
|
elif 'items' in feedJson:
|
|
|
|
return feedJson['items']
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
nextUrl = None
|
2019-06-29 10:08:59 +00:00
|
|
|
if 'first' in feedJson:
|
2020-04-04 10:05:27 +00:00
|
|
|
nextUrl = feedJson['first']
|
2019-06-29 10:08:59 +00:00
|
|
|
elif 'next' in feedJson:
|
2020-04-04 10:05:27 +00:00
|
|
|
nextUrl = feedJson['next']
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('User feed next url: ' + str(nextUrl))
|
2021-08-01 16:23:32 +00:00
|
|
|
|
2019-06-28 18:55:29 +00:00
|
|
|
if nextUrl:
|
2019-09-01 13:13:52 +00:00
|
|
|
if isinstance(nextUrl, str):
|
2020-06-28 10:02:59 +00:00
|
|
|
if '?max_id=0' not in nextUrl:
|
|
|
|
userFeed = \
|
2021-08-31 14:17:11 +00:00
|
|
|
parseUserFeed(signingPrivateKeyPem,
|
|
|
|
session, nextUrl, asHeader,
|
2020-06-28 10:02:59 +00:00
|
|
|
projectVersion, httpPrefix,
|
2021-08-01 13:44:27 +00:00
|
|
|
domain, debug, depth + 1)
|
2021-08-01 16:23:32 +00:00
|
|
|
if userFeed:
|
2021-08-01 14:47:31 +00:00
|
|
|
return userFeed
|
2019-09-01 13:13:52 +00:00
|
|
|
elif isinstance(nextUrl, dict):
|
2020-04-04 10:05:27 +00:00
|
|
|
userFeed = nextUrl
|
2019-09-01 13:13:52 +00:00
|
|
|
if userFeed.get('orderedItems'):
|
2021-08-01 14:47:31 +00:00
|
|
|
return userFeed['orderedItems']
|
2021-09-11 12:10:44 +00:00
|
|
|
elif userFeed.get('items'):
|
|
|
|
return userFeed['items']
|
2021-08-01 14:47:31 +00:00
|
|
|
return None
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-07-29 22:24:22 +00:00
|
|
|
def _getPersonBoxActor(session, baseDir: str, actor: str,
|
|
|
|
profileStr: str, asHeader: {},
|
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
httpPrefix: str, domain: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
personCache: {},
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2021-07-29 22:24:22 +00:00
|
|
|
"""Returns the actor json for the given actor url
|
|
|
|
"""
|
|
|
|
personJson = \
|
|
|
|
getPersonFromCache(baseDir, actor, personCache, True)
|
|
|
|
if personJson:
|
|
|
|
return personJson
|
|
|
|
|
|
|
|
if '/channel/' in actor or '/accounts/' in actor:
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
}
|
2021-08-31 14:17:11 +00:00
|
|
|
personJson = getJson(signingPrivateKeyPem, session, actor, asHeader, None,
|
2021-07-29 22:24:22 +00:00
|
|
|
debug, projectVersion, httpPrefix, domain)
|
|
|
|
if personJson:
|
|
|
|
return personJson
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
}
|
2021-08-31 14:17:11 +00:00
|
|
|
personJson = getJson(signingPrivateKeyPem, session, actor, asHeader, None,
|
2021-07-29 22:24:22 +00:00
|
|
|
debug, projectVersion, httpPrefix, domain)
|
|
|
|
if personJson:
|
|
|
|
return personJson
|
|
|
|
print('Unable to get actor for ' + actor)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
def getPersonBox(signingPrivateKeyPem: str,
|
|
|
|
baseDir: str, session, wfRequest: {}, personCache: {},
|
2020-04-04 10:05:27 +00:00
|
|
|
projectVersion: str, httpPrefix: str,
|
|
|
|
nickname: str, domain: str,
|
2021-07-29 22:24:22 +00:00
|
|
|
boxName: str = 'inbox',
|
2020-12-18 17:49:17 +00:00
|
|
|
sourceId=0) -> (str, str, str, str, str, str, str, str):
|
2021-03-14 20:55:37 +00:00
|
|
|
debug = False
|
2020-04-04 10:05:27 +00:00
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/activity+json; profile="' + profileStr + '"'
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2021-01-13 23:43:11 +00:00
|
|
|
if not wfRequest:
|
|
|
|
print('No webfinger given')
|
|
|
|
return None, None, None, None, None, None, None
|
|
|
|
|
2021-07-29 22:24:22 +00:00
|
|
|
# get the actor / personUrl
|
2019-10-17 15:55:05 +00:00
|
|
|
if not wfRequest.get('errors'):
|
2021-07-29 22:27:54 +00:00
|
|
|
# get the actor url from webfinger links
|
2021-03-14 20:55:37 +00:00
|
|
|
personUrl = getUserUrl(wfRequest, sourceId, debug)
|
2019-10-17 15:55:05 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
if nickname == 'dev':
|
2019-10-21 16:03:44 +00:00
|
|
|
# try single user instance
|
|
|
|
print('getPersonBox: Trying single user instance with ld+json')
|
2020-04-04 10:05:27 +00:00
|
|
|
personUrl = httpPrefix + '://' + domain
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2019-10-21 16:03:44 +00:00
|
|
|
else:
|
2021-07-29 22:27:54 +00:00
|
|
|
# the final fallback is a mastodon style url
|
2021-08-14 11:13:39 +00:00
|
|
|
personUrl = localActorUrl(httpPrefix, nickname, domain)
|
2019-06-30 10:14:02 +00:00
|
|
|
if not personUrl:
|
2020-09-27 19:27:24 +00:00
|
|
|
return None, None, None, None, None, None, None
|
2021-07-29 22:24:22 +00:00
|
|
|
|
|
|
|
# get the actor json from the url
|
2020-08-29 10:21:29 +00:00
|
|
|
personJson = \
|
2021-07-29 22:24:22 +00:00
|
|
|
_getPersonBoxActor(session, baseDir, personUrl,
|
|
|
|
profileStr, asHeader,
|
|
|
|
debug, projectVersion,
|
|
|
|
httpPrefix, domain,
|
2021-08-31 14:17:11 +00:00
|
|
|
personCache, signingPrivateKeyPem)
|
2019-06-30 11:34:19 +00:00
|
|
|
if not personJson:
|
2021-07-29 22:24:22 +00:00
|
|
|
return None, None, None, None, None, None, None
|
|
|
|
|
|
|
|
# get the url for the box/collection
|
2020-04-04 10:05:27 +00:00
|
|
|
boxJson = None
|
2019-06-30 10:14:02 +00:00
|
|
|
if not personJson.get(boxName):
|
2019-07-05 13:38:29 +00:00
|
|
|
if personJson.get('endpoints'):
|
|
|
|
if personJson['endpoints'].get(boxName):
|
2020-04-04 10:05:27 +00:00
|
|
|
boxJson = personJson['endpoints'][boxName]
|
2019-07-05 13:38:29 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
boxJson = personJson[boxName]
|
2019-07-05 13:38:29 +00:00
|
|
|
if not boxJson:
|
2020-09-27 19:27:24 +00:00
|
|
|
return None, None, None, None, None, None, None
|
2019-07-05 13:38:29 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
personId = None
|
2019-06-30 10:14:02 +00:00
|
|
|
if personJson.get('id'):
|
2020-04-04 10:05:27 +00:00
|
|
|
personId = personJson['id']
|
|
|
|
pubKeyId = None
|
|
|
|
pubKey = None
|
2019-06-30 10:14:02 +00:00
|
|
|
if personJson.get('publicKey'):
|
2019-07-01 10:25:03 +00:00
|
|
|
if personJson['publicKey'].get('id'):
|
2020-04-04 10:05:27 +00:00
|
|
|
pubKeyId = personJson['publicKey']['id']
|
2019-06-30 10:14:02 +00:00
|
|
|
if personJson['publicKey'].get('publicKeyPem'):
|
2020-04-04 10:05:27 +00:00
|
|
|
pubKey = personJson['publicKey']['publicKeyPem']
|
|
|
|
sharedInbox = None
|
2019-07-05 13:50:27 +00:00
|
|
|
if personJson.get('sharedInbox'):
|
2020-04-04 10:05:27 +00:00
|
|
|
sharedInbox = personJson['sharedInbox']
|
2019-07-05 13:50:27 +00:00
|
|
|
else:
|
|
|
|
if personJson.get('endpoints'):
|
|
|
|
if personJson['endpoints'].get('sharedInbox'):
|
2020-04-04 10:05:27 +00:00
|
|
|
sharedInbox = personJson['endpoints']['sharedInbox']
|
|
|
|
avatarUrl = None
|
2019-07-22 14:09:21 +00:00
|
|
|
if personJson.get('icon'):
|
|
|
|
if personJson['icon'].get('url'):
|
2020-04-04 10:05:27 +00:00
|
|
|
avatarUrl = personJson['icon']['url']
|
|
|
|
displayName = None
|
2019-08-22 18:36:07 +00:00
|
|
|
if personJson.get('name'):
|
2021-01-31 11:05:17 +00:00
|
|
|
displayName = personJson['name']
|
|
|
|
if dangerousMarkup(personJson['name'], False):
|
|
|
|
displayName = '*ADVERSARY*'
|
2021-01-31 11:22:19 +00:00
|
|
|
elif isFiltered(baseDir,
|
|
|
|
nickname, domain,
|
|
|
|
displayName):
|
|
|
|
displayName = '*FILTERED*'
|
2021-01-12 10:38:57 +00:00
|
|
|
# have they moved?
|
|
|
|
if personJson.get('movedTo'):
|
2021-01-12 11:08:54 +00:00
|
|
|
displayName += ' ⌂'
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-08-29 10:21:29 +00:00
|
|
|
storePersonInCache(baseDir, personUrl, personJson, personCache, True)
|
2019-06-30 10:21:07 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
return boxJson, pubKeyId, pubKey, personId, sharedInbox, \
|
2020-09-27 19:27:24 +00:00
|
|
|
avatarUrl, displayName
|
2019-06-30 10:21:07 +00:00
|
|
|
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
def _isPublicFeedPost(item: {}, personPosts: {}, debug: bool) -> bool:
|
|
|
|
"""Is the given post a public feed post?
|
|
|
|
"""
|
2021-09-11 13:59:40 +00:00
|
|
|
if not isinstance(item, dict):
|
|
|
|
if debug:
|
|
|
|
print('item object is not a dict')
|
|
|
|
pprint(item)
|
|
|
|
return False
|
2021-09-11 13:42:17 +00:00
|
|
|
if not item.get('id'):
|
|
|
|
if debug:
|
|
|
|
print('No id')
|
|
|
|
return False
|
|
|
|
if not item.get('type'):
|
|
|
|
if debug:
|
|
|
|
print('No type')
|
|
|
|
return False
|
|
|
|
if item['type'] != 'Create' and item['type'] != 'Announce':
|
|
|
|
if debug:
|
|
|
|
print('Not Create type')
|
|
|
|
return False
|
|
|
|
if item.get('object'):
|
|
|
|
if isinstance(item['object'], dict):
|
|
|
|
if not item['object'].get('published'):
|
|
|
|
if debug:
|
|
|
|
print('No published attribute')
|
|
|
|
return False
|
|
|
|
elif isinstance(item['object'], str):
|
|
|
|
if not item.get('published'):
|
|
|
|
if debug:
|
|
|
|
print('No published attribute')
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('object is not a dict or string')
|
|
|
|
return False
|
|
|
|
if not personPosts.get(item['id']):
|
|
|
|
# check that this is a public post
|
|
|
|
# #Public should appear in the "to" list
|
|
|
|
if isinstance(item['object'], dict):
|
|
|
|
if item['object'].get('to'):
|
|
|
|
isPublic = False
|
|
|
|
for recipient in item['object']['to']:
|
|
|
|
if recipient.endswith('#Public'):
|
|
|
|
isPublic = True
|
|
|
|
break
|
|
|
|
if not isPublic:
|
|
|
|
return False
|
|
|
|
elif isinstance(item['object'], str):
|
|
|
|
if item.get('to'):
|
|
|
|
isPublic = False
|
|
|
|
for recipient in item['to']:
|
|
|
|
if recipient.endswith('#Public'):
|
|
|
|
isPublic = True
|
|
|
|
break
|
|
|
|
if not isPublic:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-09-11 14:12:12 +00:00
|
|
|
def isCreateInsideAnnounce(item: {}) -> bool:
|
2021-09-11 13:59:40 +00:00
|
|
|
""" is this a Create inside of an Announce?
|
|
|
|
eg. lemmy feed item
|
|
|
|
"""
|
|
|
|
if not isinstance(item, dict):
|
|
|
|
return False
|
|
|
|
if item['type'] != 'Announce':
|
|
|
|
return False
|
|
|
|
if not item.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(item['object'], dict):
|
|
|
|
return False
|
|
|
|
if not item['object'].get('type'):
|
|
|
|
return False
|
|
|
|
if item['object']['type'] != 'Create':
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _getPosts(session, outboxUrl: str, maxPosts: int,
|
|
|
|
maxMentions: int,
|
|
|
|
maxEmoji: int, maxAttachments: int,
|
|
|
|
federationList: [],
|
|
|
|
personCache: {}, raw: bool,
|
|
|
|
simple: bool, debug: bool,
|
|
|
|
projectVersion: str, httpPrefix: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain: str, systemLanguage: str,
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2019-07-28 11:08:14 +00:00
|
|
|
"""Gets public posts from an outbox
|
|
|
|
"""
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting outbox posts for ' + outboxUrl)
|
2020-04-04 10:05:27 +00:00
|
|
|
personPosts = {}
|
2019-07-02 09:25:29 +00:00
|
|
|
if not outboxUrl:
|
|
|
|
return personPosts
|
2020-04-04 10:05:27 +00:00
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/activity+json, application/ld+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2019-10-17 22:26:47 +00:00
|
|
|
if '/outbox/' in outboxUrl:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/ld+json, application/activity+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-22 20:36:19 +00:00
|
|
|
}
|
2019-07-03 11:24:38 +00:00
|
|
|
if raw:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Returning the raw feed')
|
2020-04-04 10:05:27 +00:00
|
|
|
result = []
|
|
|
|
i = 0
|
2021-08-31 14:17:11 +00:00
|
|
|
userFeed = parseUserFeed(signingPrivateKeyPem,
|
|
|
|
session, outboxUrl, asHeader,
|
2021-08-01 13:44:27 +00:00
|
|
|
projectVersion, httpPrefix, domain, debug)
|
2019-09-01 13:13:52 +00:00
|
|
|
for item in userFeed:
|
2019-07-03 11:24:38 +00:00
|
|
|
result.append(item)
|
|
|
|
i += 1
|
|
|
|
if i == maxPosts:
|
|
|
|
break
|
|
|
|
pprint(result)
|
|
|
|
return None
|
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Returning a human readable version of the feed')
|
2021-08-31 14:17:11 +00:00
|
|
|
userFeed = parseUserFeed(signingPrivateKeyPem,
|
|
|
|
session, outboxUrl, asHeader,
|
2021-08-01 13:44:27 +00:00
|
|
|
projectVersion, httpPrefix, domain, debug)
|
2021-08-02 20:43:53 +00:00
|
|
|
if not userFeed:
|
|
|
|
return personPosts
|
|
|
|
|
|
|
|
i = 0
|
2019-09-01 13:13:52 +00:00
|
|
|
for item in userFeed:
|
2021-09-11 14:12:12 +00:00
|
|
|
if isCreateInsideAnnounce(item):
|
2021-09-11 13:59:40 +00:00
|
|
|
item = item['object']
|
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
if not _isPublicFeedPost(item, personPosts, debug):
|
2019-06-28 18:55:29 +00:00
|
|
|
continue
|
2021-09-11 13:42:17 +00:00
|
|
|
|
|
|
|
content = getBaseContentFromPost(item, systemLanguage)
|
|
|
|
content = content.replace(''', "'")
|
|
|
|
|
|
|
|
mentions = []
|
|
|
|
emoji = {}
|
|
|
|
summary = ''
|
|
|
|
inReplyTo = ''
|
|
|
|
attachment = []
|
|
|
|
sensitive = False
|
|
|
|
if isinstance(item['object'], dict):
|
|
|
|
if item['object'].get('tag'):
|
|
|
|
for tagItem in item['object']['tag']:
|
|
|
|
tagType = tagItem['type'].lower()
|
|
|
|
if tagType == 'emoji':
|
|
|
|
if tagItem.get('name') and tagItem.get('icon'):
|
|
|
|
if tagItem['icon'].get('url'):
|
|
|
|
# No emoji from non-permitted domains
|
|
|
|
if urlPermitted(tagItem['icon']['url'],
|
2021-08-01 14:47:31 +00:00
|
|
|
federationList):
|
2021-09-11 13:42:17 +00:00
|
|
|
emojiName = tagItem['name']
|
|
|
|
emojiIcon = tagItem['icon']['url']
|
|
|
|
emoji[emojiName] = emojiIcon
|
2021-08-01 14:47:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('url not permitted ' +
|
2021-09-11 13:42:17 +00:00
|
|
|
tagItem['icon']['url'])
|
|
|
|
if tagType == 'mention':
|
|
|
|
if tagItem.get('name'):
|
|
|
|
if tagItem['name'] not in mentions:
|
|
|
|
mentions.append(tagItem['name'])
|
|
|
|
if len(mentions) > maxMentions:
|
|
|
|
if debug:
|
|
|
|
print('max mentions reached')
|
|
|
|
continue
|
|
|
|
if len(emoji) > maxEmoji:
|
|
|
|
if debug:
|
|
|
|
print('max emojis reached')
|
|
|
|
continue
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
if item['object'].get('summary'):
|
|
|
|
if item['object']['summary']:
|
|
|
|
summary = item['object']['summary']
|
|
|
|
|
|
|
|
if item['object'].get('inReplyTo'):
|
|
|
|
if item['object']['inReplyTo']:
|
|
|
|
if isinstance(item['object']['inReplyTo'], str):
|
|
|
|
# No replies to non-permitted domains
|
|
|
|
if not urlPermitted(item['object']['inReplyTo'],
|
|
|
|
federationList):
|
|
|
|
if debug:
|
|
|
|
print('url not permitted ' +
|
|
|
|
item['object']['inReplyTo'])
|
|
|
|
continue
|
|
|
|
inReplyTo = item['object']['inReplyTo']
|
|
|
|
|
|
|
|
if item['object'].get('attachment'):
|
|
|
|
if item['object']['attachment']:
|
|
|
|
for attach in item['object']['attachment']:
|
|
|
|
if attach.get('name') and attach.get('url'):
|
|
|
|
# no attachments from non-permitted domains
|
|
|
|
if urlPermitted(attach['url'],
|
|
|
|
federationList):
|
|
|
|
attachment.append([attach['name'],
|
|
|
|
attach['url']])
|
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('url not permitted ' +
|
|
|
|
attach['url'])
|
2019-07-03 11:24:38 +00:00
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
sensitive = False
|
|
|
|
if item['object'].get('sensitive'):
|
|
|
|
sensitive = item['object']['sensitive']
|
|
|
|
|
|
|
|
if content:
|
2019-07-03 11:24:38 +00:00
|
|
|
if simple:
|
2020-12-22 18:06:23 +00:00
|
|
|
print(_cleanHtml(content) + '\n')
|
2019-07-03 11:24:38 +00:00
|
|
|
else:
|
2019-07-19 16:56:55 +00:00
|
|
|
pprint(item)
|
2020-04-04 10:05:27 +00:00
|
|
|
personPosts[item['id']] = {
|
2019-07-03 11:24:38 +00:00
|
|
|
"sensitive": sensitive,
|
|
|
|
"inreplyto": inReplyTo,
|
|
|
|
"summary": summary,
|
|
|
|
"html": content,
|
2020-12-22 18:06:23 +00:00
|
|
|
"plaintext": _cleanHtml(content),
|
2019-07-03 11:24:38 +00:00
|
|
|
"attachment": attachment,
|
|
|
|
"mentions": mentions,
|
2021-08-01 16:23:32 +00:00
|
|
|
"emoji": emoji
|
2019-07-03 11:24:38 +00:00
|
|
|
}
|
2021-09-11 13:42:17 +00:00
|
|
|
i += 1
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2021-09-11 13:42:17 +00:00
|
|
|
if i == maxPosts:
|
|
|
|
break
|
2019-07-02 09:25:29 +00:00
|
|
|
return personPosts
|
2019-06-29 10:08:59 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-07-23 11:57:39 +00:00
|
|
|
def _getCommonWords() -> str:
|
|
|
|
"""Returns a list of common words
|
2021-01-11 13:14:22 +00:00
|
|
|
"""
|
2021-07-23 11:57:39 +00:00
|
|
|
return (
|
2021-01-11 13:57:08 +00:00
|
|
|
'that', 'some', 'about', 'then', 'they', 'were',
|
2021-01-11 14:01:26 +00:00
|
|
|
'also', 'from', 'with', 'this', 'have', 'more',
|
|
|
|
'need', 'here', 'would', 'these', 'into', 'very',
|
2021-01-11 14:05:16 +00:00
|
|
|
'well', 'when', 'what', 'your', 'there', 'which',
|
2021-01-11 14:13:17 +00:00
|
|
|
'even', 'there', 'such', 'just', 'those', 'only',
|
2021-01-11 20:57:27 +00:00
|
|
|
'will', 'much', 'than', 'them', 'each', 'goes',
|
2021-07-23 11:31:23 +00:00
|
|
|
'been', 'over', 'their', 'where', 'could', 'though',
|
|
|
|
'like', 'think', 'same', 'maybe', 'really', 'thing',
|
|
|
|
'something', 'possible', 'actual', 'actually',
|
2021-07-23 11:57:39 +00:00
|
|
|
'because', 'around', 'having', 'especially', 'other',
|
|
|
|
'making', 'made', 'make', 'makes', 'including',
|
|
|
|
'includes', 'know', 'knowing', 'knows', 'things',
|
|
|
|
'say', 'says', 'saying', 'many', 'somewhat',
|
2021-07-23 12:05:24 +00:00
|
|
|
'problem', 'problems', 'idea', 'ideas',
|
2021-07-23 12:12:48 +00:00
|
|
|
'using', 'uses', 'https', 'still', 'want', 'wants'
|
2021-01-11 13:57:08 +00:00
|
|
|
)
|
2021-07-23 11:57:39 +00:00
|
|
|
|
2021-07-23 13:50:32 +00:00
|
|
|
|
2021-07-23 11:57:39 +00:00
|
|
|
def _updateWordFrequency(content: str, wordFrequency: {}) -> None:
|
|
|
|
"""Creates a dictionary containing words and the number of times
|
|
|
|
that they appear
|
|
|
|
"""
|
|
|
|
plainText = removeHtml(content)
|
|
|
|
removeChars = ('.', ';', '?', '\n', ':')
|
|
|
|
for ch in removeChars:
|
|
|
|
plainText = plainText.replace(ch, ' ')
|
|
|
|
wordsList = plainText.split(' ')
|
|
|
|
commonWords = _getCommonWords()
|
2021-01-11 13:14:22 +00:00
|
|
|
for word in wordsList:
|
|
|
|
wordLen = len(word)
|
|
|
|
if wordLen < 3:
|
|
|
|
continue
|
|
|
|
if wordLen < 4:
|
|
|
|
if word.upper() != word:
|
|
|
|
continue
|
2021-01-11 13:57:08 +00:00
|
|
|
if '&' in word or \
|
|
|
|
'"' in word or \
|
2021-01-11 14:13:17 +00:00
|
|
|
'@' in word or \
|
2021-07-23 12:05:24 +00:00
|
|
|
"'" in word or \
|
2021-07-23 12:08:17 +00:00
|
|
|
"--" in word or \
|
|
|
|
'//' in word:
|
2021-01-11 13:57:08 +00:00
|
|
|
continue
|
2021-01-11 14:16:19 +00:00
|
|
|
if word.lower() in commonWords:
|
2021-01-11 13:57:08 +00:00
|
|
|
continue
|
2021-01-11 13:14:22 +00:00
|
|
|
if wordFrequency.get(word):
|
|
|
|
wordFrequency[word] += 1
|
|
|
|
else:
|
|
|
|
wordFrequency[word] = 1
|
|
|
|
|
|
|
|
|
2020-07-08 10:09:51 +00:00
|
|
|
def getPostDomains(session, outboxUrl: str, maxPosts: int,
|
|
|
|
maxMentions: int,
|
|
|
|
maxEmoji: int, maxAttachments: int,
|
|
|
|
federationList: [],
|
2020-07-08 12:28:41 +00:00
|
|
|
personCache: {},
|
|
|
|
debug: bool,
|
2020-07-08 10:09:51 +00:00
|
|
|
projectVersion: str, httpPrefix: str,
|
2021-01-11 13:14:22 +00:00
|
|
|
domain: str,
|
|
|
|
wordFrequency: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
domainList: [], systemLanguage: str,
|
|
|
|
signingPrivateKeyPem: str) -> []:
|
2020-07-08 10:09:51 +00:00
|
|
|
"""Returns a list of domains referenced within public posts
|
|
|
|
"""
|
|
|
|
if not outboxUrl:
|
|
|
|
return []
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/activity+json, application/ld+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-07-08 10:09:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-07-08 10:09:51 +00:00
|
|
|
}
|
|
|
|
if '/outbox/' in outboxUrl:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/ld+json, application/activity+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-07-08 10:09:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-07-08 10:09:51 +00:00
|
|
|
}
|
|
|
|
|
2020-07-08 10:30:29 +00:00
|
|
|
postDomains = domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
|
|
|
|
i = 0
|
2021-08-31 14:17:11 +00:00
|
|
|
userFeed = parseUserFeed(signingPrivateKeyPem,
|
|
|
|
session, outboxUrl, asHeader,
|
2021-08-01 13:44:27 +00:00
|
|
|
projectVersion, httpPrefix, domain, debug)
|
2020-07-08 10:09:51 +00:00
|
|
|
for item in userFeed:
|
2020-07-08 12:28:41 +00:00
|
|
|
i += 1
|
|
|
|
if i > maxPosts:
|
|
|
|
break
|
2021-06-22 15:45:59 +00:00
|
|
|
if not hasObjectDict(item):
|
2020-07-08 10:09:51 +00:00
|
|
|
continue
|
2021-07-20 13:33:27 +00:00
|
|
|
contentStr = getBaseContentFromPost(item, systemLanguage)
|
2021-07-18 14:15:16 +00:00
|
|
|
if contentStr:
|
|
|
|
_updateWordFrequency(contentStr, wordFrequency)
|
2020-07-08 10:09:51 +00:00
|
|
|
if item['object'].get('inReplyTo'):
|
2020-08-28 14:45:07 +00:00
|
|
|
if isinstance(item['object']['inReplyTo'], str):
|
|
|
|
postDomain, postPort = \
|
|
|
|
getDomainFromActor(item['object']['inReplyTo'])
|
|
|
|
if postDomain not in postDomains:
|
|
|
|
postDomains.append(postDomain)
|
2020-07-08 10:09:51 +00:00
|
|
|
|
|
|
|
if item['object'].get('tag'):
|
|
|
|
for tagItem in item['object']['tag']:
|
|
|
|
tagType = tagItem['type'].lower()
|
|
|
|
if tagType == 'mention':
|
|
|
|
if tagItem.get('href'):
|
|
|
|
postDomain, postPort = \
|
|
|
|
getDomainFromActor(tagItem['href'])
|
|
|
|
if postDomain not in postDomains:
|
|
|
|
postDomains.append(postDomain)
|
|
|
|
return postDomains
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _getPostsForBlockedDomains(baseDir: str,
|
|
|
|
session, outboxUrl: str, maxPosts: int,
|
|
|
|
maxMentions: int,
|
|
|
|
maxEmoji: int, maxAttachments: int,
|
|
|
|
federationList: [],
|
|
|
|
personCache: {},
|
|
|
|
debug: bool,
|
|
|
|
projectVersion: str, httpPrefix: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain: str,
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2020-12-16 16:43:51 +00:00
|
|
|
"""Returns a dictionary of posts for blocked domains
|
|
|
|
"""
|
|
|
|
if not outboxUrl:
|
|
|
|
return {}
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/activity+json, application/ld+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-12-16 16:43:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-12-16 16:43:51 +00:00
|
|
|
}
|
|
|
|
if '/outbox/' in outboxUrl:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/ld+json, application/activity+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-12-16 16:43:51 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-12-16 16:43:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
blockedPosts = {}
|
|
|
|
|
|
|
|
i = 0
|
2021-08-31 14:17:11 +00:00
|
|
|
userFeed = parseUserFeed(signingPrivateKeyPem,
|
|
|
|
session, outboxUrl, asHeader,
|
2021-08-01 13:44:27 +00:00
|
|
|
projectVersion, httpPrefix, domain, debug)
|
2020-12-16 16:43:51 +00:00
|
|
|
for item in userFeed:
|
|
|
|
i += 1
|
|
|
|
if i > maxPosts:
|
|
|
|
break
|
2021-06-22 15:45:59 +00:00
|
|
|
if not hasObjectDict(item):
|
2020-12-16 16:43:51 +00:00
|
|
|
continue
|
|
|
|
if item['object'].get('inReplyTo'):
|
|
|
|
if isinstance(item['object']['inReplyTo'], str):
|
|
|
|
postDomain, postPort = \
|
|
|
|
getDomainFromActor(item['object']['inReplyTo'])
|
2020-12-17 19:54:07 +00:00
|
|
|
if isBlockedDomain(baseDir, postDomain):
|
|
|
|
if item['object'].get('url'):
|
|
|
|
url = item['object']['url']
|
|
|
|
else:
|
|
|
|
url = item['object']['id']
|
2020-12-16 16:43:51 +00:00
|
|
|
if not blockedPosts.get(postDomain):
|
2020-12-16 17:09:08 +00:00
|
|
|
blockedPosts[postDomain] = [url]
|
2020-12-16 16:43:51 +00:00
|
|
|
else:
|
2020-12-17 11:11:31 +00:00
|
|
|
if url not in blockedPosts[postDomain]:
|
|
|
|
blockedPosts[postDomain].append(url)
|
2020-12-16 16:43:51 +00:00
|
|
|
|
|
|
|
if item['object'].get('tag'):
|
|
|
|
for tagItem in item['object']['tag']:
|
|
|
|
tagType = tagItem['type'].lower()
|
|
|
|
if tagType == 'mention':
|
|
|
|
if tagItem.get('href'):
|
|
|
|
postDomain, postPort = \
|
|
|
|
getDomainFromActor(tagItem['href'])
|
2020-12-17 19:54:07 +00:00
|
|
|
if isBlockedDomain(baseDir, postDomain):
|
|
|
|
if item['object'].get('url'):
|
|
|
|
url = item['object']['url']
|
|
|
|
else:
|
|
|
|
url = item['object']['id']
|
2020-12-16 16:43:51 +00:00
|
|
|
if not blockedPosts.get(postDomain):
|
2020-12-16 17:09:08 +00:00
|
|
|
blockedPosts[postDomain] = [url]
|
2020-12-16 16:43:51 +00:00
|
|
|
else:
|
2020-12-17 11:11:31 +00:00
|
|
|
if url not in blockedPosts[postDomain]:
|
|
|
|
blockedPosts[postDomain].append(url)
|
2020-12-16 16:43:51 +00:00
|
|
|
return blockedPosts
|
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def deleteAllPosts(baseDir: str,
|
|
|
|
nickname: str, domain: str, boxname: str) -> None:
|
2019-07-04 16:24:23 +00:00
|
|
|
"""Deletes all posts for a person from inbox or outbox
|
2019-06-29 11:47:33 +00:00
|
|
|
"""
|
2020-08-23 11:13:35 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'outbox' and \
|
2021-07-01 21:30:36 +00:00
|
|
|
boxname != 'tlblogs' and boxname != 'tlnews':
|
2019-07-04 16:24:23 +00:00
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
boxDir = createPersonDir(nickname, domain, baseDir, boxname)
|
2019-09-27 12:09:04 +00:00
|
|
|
for deleteFilename in os.scandir(boxDir):
|
2020-04-04 10:05:27 +00:00
|
|
|
deleteFilename = deleteFilename.name
|
|
|
|
filePath = os.path.join(boxDir, deleteFilename)
|
2019-06-29 11:47:33 +00:00
|
|
|
try:
|
|
|
|
if os.path.isfile(filePath):
|
|
|
|
os.unlink(filePath)
|
2020-04-04 10:05:27 +00:00
|
|
|
elif os.path.isdir(filePath):
|
|
|
|
shutil.rmtree(filePath)
|
2019-06-29 11:47:33 +00:00
|
|
|
except Exception as e:
|
2021-05-20 12:04:05 +00:00
|
|
|
print('ERROR: deleteAllPosts ' + str(e))
|
2019-07-03 22:16:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def savePostToBox(baseDir: str, httpPrefix: str, postId: str,
|
|
|
|
nickname: str, domain: str, postJsonObject: {},
|
2019-08-01 11:43:22 +00:00
|
|
|
boxname: str) -> str:
|
2019-07-04 16:24:23 +00:00
|
|
|
"""Saves the give json to the give box
|
2019-08-01 11:43:22 +00:00
|
|
|
Returns the filename
|
2019-07-03 22:16:03 +00:00
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'outbox' and \
|
2020-10-07 09:10:42 +00:00
|
|
|
boxname != 'tlblogs' and boxname != 'tlnews' and \
|
2020-08-23 11:13:35 +00:00
|
|
|
boxname != 'scheduled':
|
2019-08-01 11:43:22 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
originalDomain = domain
|
2021-06-23 21:31:50 +00:00
|
|
|
domain = removeDomainPort(domain)
|
2019-07-04 16:24:23 +00:00
|
|
|
|
2019-07-03 22:59:56 +00:00
|
|
|
if not postId:
|
2020-04-04 10:05:27 +00:00
|
|
|
statusNumber, published = getStatusNumber()
|
|
|
|
postId = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, originalDomain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/statuses/' + statusNumber
|
|
|
|
postJsonObject['id'] = postId + '/activity'
|
2021-06-22 15:45:59 +00:00
|
|
|
if hasObjectDict(postJsonObject):
|
|
|
|
postJsonObject['object']['id'] = postId
|
|
|
|
postJsonObject['object']['atomUri'] = postId
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
boxDir = createPersonDir(nickname, domain, baseDir, boxname)
|
|
|
|
filename = boxDir + '/' + postId.replace('/', '#') + '.json'
|
2020-11-27 19:52:01 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
saveJson(postJsonObject, filename)
|
2019-08-01 11:43:22 +00:00
|
|
|
return filename
|
2019-07-03 22:16:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _updateHashtagsIndex(baseDir: str, tag: {}, newPostId: str) -> None:
|
2019-08-09 11:12:08 +00:00
|
|
|
"""Writes the post url for hashtags to a file
|
|
|
|
This allows posts for a hashtag to be quickly looked up
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if tag['type'] != 'Hashtag':
|
2019-08-09 17:42:11 +00:00
|
|
|
return
|
2019-12-17 10:24:52 +00:00
|
|
|
|
2020-03-22 21:16:02 +00:00
|
|
|
# create hashtags directory
|
2020-04-04 10:05:27 +00:00
|
|
|
tagsDir = baseDir + '/tags'
|
2019-08-09 11:12:08 +00:00
|
|
|
if not os.path.isdir(tagsDir):
|
|
|
|
os.mkdir(tagsDir)
|
2020-04-04 10:05:27 +00:00
|
|
|
tagName = tag['name']
|
|
|
|
tagsFilename = tagsDir + '/' + tagName[1:] + '.txt'
|
|
|
|
tagline = newPostId + '\n'
|
2019-12-17 10:24:52 +00:00
|
|
|
|
|
|
|
if not os.path.isfile(tagsFilename):
|
|
|
|
# create a new tags index file
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(tagsFilename, 'w+') as tagsFile:
|
2019-12-17 10:24:52 +00:00
|
|
|
tagsFile.write(tagline)
|
|
|
|
else:
|
2021-06-21 22:53:04 +00:00
|
|
|
# prepend to tags index file
|
|
|
|
if tagline not in open(tagsFilename).read():
|
|
|
|
try:
|
|
|
|
with open(tagsFilename, 'r+') as tagsFile:
|
|
|
|
content = tagsFile.read()
|
|
|
|
if tagline not in content:
|
|
|
|
tagsFile.seek(0, 0)
|
|
|
|
tagsFile.write(tagline + content)
|
|
|
|
except Exception as e:
|
|
|
|
print('WARN: Failed to write entry to tags file ' +
|
|
|
|
tagsFilename + ' ' + str(e))
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-08-09 11:12:08 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _addSchedulePost(baseDir: str, nickname: str, domain: str,
|
|
|
|
eventDateStr: str, postId: str) -> None:
|
2020-01-13 10:49:03 +00:00
|
|
|
"""Adds a scheduled post to the index
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = nickname + '@' + domain
|
|
|
|
scheduleIndexFilename = baseDir + '/accounts/' + handle + '/schedule.index'
|
2020-01-13 10:49:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
indexStr = eventDateStr + ' ' + postId.replace('/', '#')
|
2020-01-13 10:49:03 +00:00
|
|
|
if os.path.isfile(scheduleIndexFilename):
|
|
|
|
if indexStr not in open(scheduleIndexFilename).read():
|
|
|
|
try:
|
|
|
|
with open(scheduleIndexFilename, 'r+') as scheduleFile:
|
2020-04-04 10:05:27 +00:00
|
|
|
content = scheduleFile.read()
|
2020-12-29 20:22:28 +00:00
|
|
|
if indexStr + '\n' not in content:
|
|
|
|
scheduleFile.seek(0, 0)
|
|
|
|
scheduleFile.write(indexStr + '\n' + content)
|
|
|
|
print('DEBUG: scheduled post added to index')
|
2020-01-13 10:49:03 +00:00
|
|
|
except Exception as e:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('WARN: Failed to write entry to scheduled posts index ' +
|
|
|
|
scheduleIndexFilename + ' ' + str(e))
|
2020-01-13 10:49:03 +00:00
|
|
|
else:
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(scheduleIndexFilename, 'w+') as scheduleFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
scheduleFile.write(indexStr + '\n')
|
2020-01-13 10:49:03 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-10-07 22:08:15 +00:00
|
|
|
def validContentWarning(cw: str) -> str:
|
2020-08-25 19:35:55 +00:00
|
|
|
"""Returns a validated content warning
|
|
|
|
"""
|
|
|
|
cw = removeHtml(cw)
|
|
|
|
# hashtags within content warnings apparently cause a lot of trouble
|
|
|
|
# so remove them
|
|
|
|
if '#' in cw:
|
|
|
|
cw = cw.replace('#', '').replace(' ', ' ')
|
2021-02-11 11:02:05 +00:00
|
|
|
return removeInvalidChars(cw)
|
2020-08-25 19:35:55 +00:00
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _loadAutoCW(baseDir: str, nickname: str, domain: str) -> []:
|
2020-09-13 18:56:41 +00:00
|
|
|
"""Loads automatic CWs file and returns a list containing
|
|
|
|
the lines of the file
|
|
|
|
"""
|
2021-07-13 21:59:53 +00:00
|
|
|
filename = acctDir(baseDir, nickname, domain) + '/autocw.txt'
|
2020-09-13 18:56:41 +00:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
return []
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(filename, 'r') as f:
|
2020-09-13 18:56:41 +00:00
|
|
|
return f.readlines()
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _addAutoCW(baseDir: str, nickname: str, domain: str,
|
|
|
|
subject: str, content: str) -> str:
|
2020-09-13 18:56:41 +00:00
|
|
|
"""Appends any automatic CW to the subject line
|
|
|
|
and returns the new subject line
|
|
|
|
"""
|
|
|
|
newSubject = subject
|
2020-12-22 18:06:23 +00:00
|
|
|
autoCWList = _loadAutoCW(baseDir, nickname, domain)
|
2020-09-13 18:56:41 +00:00
|
|
|
for cwRule in autoCWList:
|
|
|
|
if '->' not in cwRule:
|
|
|
|
continue
|
|
|
|
match = cwRule.split('->')[0].strip()
|
|
|
|
if match not in content:
|
|
|
|
continue
|
|
|
|
cwStr = cwRule.split('->')[1].strip()
|
|
|
|
if newSubject:
|
|
|
|
if cwStr not in newSubject:
|
|
|
|
newSubject += ', ' + cwStr
|
|
|
|
else:
|
|
|
|
newSubject = cwStr
|
|
|
|
return newSubject
|
|
|
|
|
|
|
|
|
2021-06-26 21:29:49 +00:00
|
|
|
def _createPostCWFromReply(baseDir: str, nickname: str, domain: str,
|
|
|
|
inReplyTo: str,
|
|
|
|
sensitive: bool, summary: str) -> (bool, str):
|
|
|
|
"""If this is a reply and the original post has a CW
|
|
|
|
then use the same CW
|
|
|
|
"""
|
|
|
|
if inReplyTo and not sensitive:
|
|
|
|
# locate the post which this is a reply to and check if
|
|
|
|
# it has a content warning. If it does then reproduce
|
|
|
|
# the same warning
|
|
|
|
replyPostFilename = \
|
|
|
|
locatePost(baseDir, nickname, domain, inReplyTo)
|
|
|
|
if replyPostFilename:
|
|
|
|
replyToJson = loadJson(replyPostFilename)
|
|
|
|
if replyToJson:
|
|
|
|
if replyToJson.get('object'):
|
|
|
|
if replyToJson['object'].get('sensitive'):
|
|
|
|
if replyToJson['object']['sensitive']:
|
|
|
|
sensitive = True
|
|
|
|
if replyToJson['object'].get('summary'):
|
|
|
|
summary = replyToJson['object']['summary']
|
|
|
|
return sensitive, summary
|
|
|
|
|
|
|
|
|
2021-06-27 14:58:54 +00:00
|
|
|
def _createPostS2S(baseDir: str, nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str, content: str, statusNumber: str,
|
|
|
|
published: str, newPostId: str, postContext: {},
|
|
|
|
toRecipients: [], toCC: [], inReplyTo: str,
|
|
|
|
sensitive: bool, commentsEnabled: bool,
|
2021-06-27 14:59:45 +00:00
|
|
|
tags: [], attachImageFilename: str,
|
2021-06-27 14:58:54 +00:00
|
|
|
mediaType: str, imageDescription: str, city: str,
|
|
|
|
postObjectType: str, summary: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
inReplyToAtomUri: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2021-06-27 14:58:54 +00:00
|
|
|
"""Creates a new server-to-server post
|
|
|
|
"""
|
2021-08-14 11:13:39 +00:00
|
|
|
actorUrl = localActorUrl(httpPrefix, nickname, domain)
|
2021-06-27 14:58:54 +00:00
|
|
|
idStr = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
2021-06-27 14:58:54 +00:00
|
|
|
'/statuses/' + statusNumber + '/replies'
|
|
|
|
newPostUrl = \
|
|
|
|
httpPrefix + '://' + domain + '/@' + nickname + '/' + statusNumber
|
|
|
|
newPostAttributedTo = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain)
|
2021-08-08 16:52:32 +00:00
|
|
|
if not conversationId:
|
|
|
|
conversationId = newPostId
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = {
|
|
|
|
'@context': postContext,
|
|
|
|
'id': newPostId + '/activity',
|
|
|
|
'type': 'Create',
|
|
|
|
'actor': actorUrl,
|
|
|
|
'published': published,
|
|
|
|
'to': toRecipients,
|
|
|
|
'cc': toCC,
|
|
|
|
'object': {
|
|
|
|
'id': newPostId,
|
2021-08-08 16:52:32 +00:00
|
|
|
'conversation': conversationId,
|
2021-06-27 14:58:54 +00:00
|
|
|
'type': postObjectType,
|
|
|
|
'summary': summary,
|
|
|
|
'inReplyTo': inReplyTo,
|
|
|
|
'published': published,
|
|
|
|
'url': newPostUrl,
|
|
|
|
'attributedTo': newPostAttributedTo,
|
|
|
|
'to': toRecipients,
|
|
|
|
'cc': toCC,
|
|
|
|
'sensitive': sensitive,
|
|
|
|
'atomUri': newPostId,
|
|
|
|
'inReplyToAtomUri': inReplyToAtomUri,
|
|
|
|
'commentsEnabled': commentsEnabled,
|
|
|
|
'rejectReplies': not commentsEnabled,
|
|
|
|
'mediaType': 'text/html',
|
|
|
|
'content': content,
|
|
|
|
'contentMap': {
|
2021-07-18 09:55:49 +00:00
|
|
|
systemLanguage: content
|
2021-06-27 14:58:54 +00:00
|
|
|
},
|
|
|
|
'attachment': [],
|
|
|
|
'tag': tags,
|
|
|
|
'replies': {
|
|
|
|
'id': idStr,
|
|
|
|
'type': 'Collection',
|
|
|
|
'first': {
|
|
|
|
'type': 'CollectionPage',
|
|
|
|
'partOf': idStr,
|
|
|
|
'items': []
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if attachImageFilename:
|
|
|
|
newPost['object'] = \
|
|
|
|
attachMedia(baseDir, httpPrefix, nickname, domain, port,
|
|
|
|
newPost['object'], attachImageFilename,
|
2021-08-13 17:08:50 +00:00
|
|
|
mediaType, imageDescription, city, lowBandwidth)
|
2021-06-27 14:58:54 +00:00
|
|
|
return newPost
|
|
|
|
|
|
|
|
|
|
|
|
def _createPostC2S(baseDir: str, nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str, content: str, statusNumber: str,
|
|
|
|
published: str, newPostId: str, postContext: {},
|
|
|
|
toRecipients: [], toCC: [], inReplyTo: str,
|
|
|
|
sensitive: bool, commentsEnabled: bool,
|
2021-06-27 14:59:45 +00:00
|
|
|
tags: [], attachImageFilename: str,
|
2021-06-27 14:58:54 +00:00
|
|
|
mediaType: str, imageDescription: str, city: str,
|
|
|
|
postObjectType: str, summary: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
inReplyToAtomUri: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: str) -> {}:
|
2021-06-27 14:58:54 +00:00
|
|
|
"""Creates a new client-to-server post
|
|
|
|
"""
|
2021-08-14 11:13:39 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-06-27 14:58:54 +00:00
|
|
|
idStr = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domainFull) + \
|
2021-06-27 14:58:54 +00:00
|
|
|
'/statuses/' + statusNumber + '/replies'
|
|
|
|
newPostUrl = \
|
|
|
|
httpPrefix + '://' + domain + '/@' + nickname + '/' + statusNumber
|
2021-08-08 16:52:32 +00:00
|
|
|
if not conversationId:
|
|
|
|
conversationId = newPostId
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = {
|
|
|
|
"@context": postContext,
|
|
|
|
'id': newPostId,
|
2021-08-08 16:52:32 +00:00
|
|
|
'conversation': conversationId,
|
2021-06-27 14:58:54 +00:00
|
|
|
'type': postObjectType,
|
|
|
|
'summary': summary,
|
|
|
|
'inReplyTo': inReplyTo,
|
|
|
|
'published': published,
|
|
|
|
'url': newPostUrl,
|
2021-08-14 11:13:39 +00:00
|
|
|
'attributedTo': localActorUrl(httpPrefix, nickname, domainFull),
|
2021-06-27 14:58:54 +00:00
|
|
|
'to': toRecipients,
|
|
|
|
'cc': toCC,
|
|
|
|
'sensitive': sensitive,
|
|
|
|
'atomUri': newPostId,
|
|
|
|
'inReplyToAtomUri': inReplyToAtomUri,
|
|
|
|
'commentsEnabled': commentsEnabled,
|
|
|
|
'rejectReplies': not commentsEnabled,
|
|
|
|
'mediaType': 'text/html',
|
|
|
|
'content': content,
|
|
|
|
'contentMap': {
|
2021-07-18 09:55:49 +00:00
|
|
|
systemLanguage: content
|
2021-06-27 14:58:54 +00:00
|
|
|
},
|
|
|
|
'attachment': [],
|
|
|
|
'tag': tags,
|
|
|
|
'replies': {
|
|
|
|
'id': idStr,
|
|
|
|
'type': 'Collection',
|
|
|
|
'first': {
|
|
|
|
'type': 'CollectionPage',
|
|
|
|
'partOf': idStr,
|
|
|
|
'items': []
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if attachImageFilename:
|
|
|
|
newPost = \
|
|
|
|
attachMedia(baseDir, httpPrefix, nickname, domain, port,
|
|
|
|
newPost, attachImageFilename,
|
2021-08-13 17:08:50 +00:00
|
|
|
mediaType, imageDescription, city, lowBandwidth)
|
2021-06-27 14:58:54 +00:00
|
|
|
return newPost
|
|
|
|
|
|
|
|
|
2021-06-27 15:48:02 +00:00
|
|
|
def _createPostPlaceAndTime(eventDate: str, endDate: str,
|
|
|
|
eventTime: str, endTime: str,
|
|
|
|
summary: str, content: str,
|
|
|
|
schedulePost: bool,
|
|
|
|
eventUUID: str,
|
|
|
|
location: str,
|
|
|
|
tags: []) -> str:
|
|
|
|
"""Adds a place and time to the tags on a new post
|
|
|
|
"""
|
|
|
|
endDateStr = None
|
|
|
|
if endDate:
|
|
|
|
eventName = summary
|
|
|
|
if not eventName:
|
|
|
|
eventName = content
|
|
|
|
endDateStr = endDate
|
|
|
|
if endTime:
|
|
|
|
if endTime.endswith('Z'):
|
|
|
|
endDateStr = endDate + 'T' + endTime
|
|
|
|
else:
|
|
|
|
endDateStr = endDate + 'T' + endTime + \
|
|
|
|
':00' + strftime("%z", gmtime())
|
|
|
|
else:
|
|
|
|
endDateStr = endDate + 'T12:00:00Z'
|
|
|
|
|
|
|
|
# get the starting date and time
|
|
|
|
eventDateStr = None
|
|
|
|
if eventDate:
|
|
|
|
eventName = summary
|
|
|
|
if not eventName:
|
|
|
|
eventName = content
|
|
|
|
eventDateStr = eventDate
|
|
|
|
if eventTime:
|
|
|
|
if eventTime.endswith('Z'):
|
|
|
|
eventDateStr = eventDate + 'T' + eventTime
|
|
|
|
else:
|
|
|
|
eventDateStr = eventDate + 'T' + eventTime + \
|
|
|
|
':00' + strftime("%z", gmtime())
|
|
|
|
else:
|
|
|
|
eventDateStr = eventDate + 'T12:00:00Z'
|
|
|
|
if not endDateStr:
|
|
|
|
endDateStr = eventDateStr
|
|
|
|
if not schedulePost and not eventUUID:
|
|
|
|
tags.append({
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
"type": "Event",
|
|
|
|
"name": eventName,
|
|
|
|
"startTime": eventDateStr,
|
|
|
|
"endTime": endDateStr
|
|
|
|
})
|
|
|
|
if location and not eventUUID:
|
|
|
|
tags.append({
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
"type": "Place",
|
|
|
|
"name": location
|
|
|
|
})
|
|
|
|
return eventDateStr
|
|
|
|
|
|
|
|
|
2021-06-27 16:12:10 +00:00
|
|
|
def _createPostMentions(ccUrl: str, newPost: {},
|
|
|
|
toRecipients: [], tags: []) -> None:
|
|
|
|
"""Updates mentions for a new post
|
|
|
|
"""
|
|
|
|
if not ccUrl:
|
|
|
|
return
|
|
|
|
if len(ccUrl) == 0:
|
|
|
|
return
|
|
|
|
newPost['cc'] = [ccUrl]
|
|
|
|
if newPost.get('object'):
|
|
|
|
newPost['object']['cc'] = [ccUrl]
|
|
|
|
|
|
|
|
# if this is a public post then include any mentions in cc
|
|
|
|
toCC = newPost['object']['cc']
|
|
|
|
if len(toRecipients) != 1:
|
|
|
|
return
|
|
|
|
if toRecipients[0].endswith('#Public') and \
|
|
|
|
ccUrl.endswith('/followers'):
|
|
|
|
for tag in tags:
|
|
|
|
if tag['type'] != 'Mention':
|
|
|
|
continue
|
|
|
|
if tag['href'] not in toCC:
|
|
|
|
newPost['object']['cc'].append(tag['href'])
|
|
|
|
|
|
|
|
|
|
|
|
def _createPostModReport(baseDir: str,
|
|
|
|
isModerationReport: bool, newPost: {},
|
|
|
|
newPostId: str) -> None:
|
|
|
|
""" if this is a moderation report then add a status
|
|
|
|
"""
|
|
|
|
if not isModerationReport:
|
|
|
|
return
|
|
|
|
# add status
|
|
|
|
if newPost.get('object'):
|
|
|
|
newPost['object']['moderationStatus'] = 'pending'
|
|
|
|
else:
|
|
|
|
newPost['moderationStatus'] = 'pending'
|
|
|
|
# save to index file
|
|
|
|
moderationIndexFile = baseDir + '/accounts/moderation.txt'
|
|
|
|
with open(moderationIndexFile, 'a+') as modFile:
|
|
|
|
modFile.write(newPostId + '\n')
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _createPostBase(baseDir: str, nickname: str, domain: str, port: int,
|
|
|
|
toUrl: str, ccUrl: str, httpPrefix: str, content: str,
|
|
|
|
followersOnly: bool, saveToFile: bool,
|
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
|
|
|
attachImageFilename: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
mediaType: str, imageDescription: str, city: str,
|
2021-01-07 15:38:55 +00:00
|
|
|
isModerationReport: bool,
|
2020-12-22 18:06:23 +00:00
|
|
|
isArticle: bool,
|
2021-07-01 20:11:01 +00:00
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str,
|
|
|
|
eventUUID: str, category: str,
|
|
|
|
joinMode: str,
|
|
|
|
endDate: str, endTime: str,
|
|
|
|
maximumAttendeeCapacity: int,
|
|
|
|
repliesModerationOption: str,
|
|
|
|
anonymousParticipationEnabled: bool,
|
2021-07-18 09:55:49 +00:00
|
|
|
eventStatus: str, ticketUrl: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2019-07-01 12:14:49 +00:00
|
|
|
"""Creates a message
|
2019-06-29 22:29:18 +00:00
|
|
|
"""
|
2021-02-11 11:02:05 +00:00
|
|
|
content = removeInvalidChars(content)
|
2021-02-11 10:33:56 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
subject = _addAutoCW(baseDir, nickname, domain, subject, content)
|
2020-09-13 18:56:41 +00:00
|
|
|
|
2020-10-10 11:38:52 +00:00
|
|
|
if nickname != 'news':
|
|
|
|
mentionedRecipients = \
|
|
|
|
getMentionedPeople(baseDir, httpPrefix, content, domain, False)
|
|
|
|
else:
|
|
|
|
mentionedRecipients = ''
|
2019-08-19 09:37:14 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
tags = []
|
|
|
|
hashtagsDict = {}
|
2019-07-15 14:41:15 +00:00
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
domain = getFullDomain(domain, port)
|
2019-11-01 10:19:21 +00:00
|
|
|
|
|
|
|
# add tags
|
2020-10-10 11:38:52 +00:00
|
|
|
if nickname != 'news':
|
|
|
|
content = \
|
|
|
|
addHtmlTags(baseDir, httpPrefix,
|
|
|
|
nickname, domain, content,
|
|
|
|
mentionedRecipients,
|
|
|
|
hashtagsDict, True)
|
2020-02-21 15:17:55 +00:00
|
|
|
|
|
|
|
# replace emoji with unicode
|
2020-04-04 10:05:27 +00:00
|
|
|
tags = []
|
|
|
|
for tagName, tag in hashtagsDict.items():
|
2020-02-21 15:17:55 +00:00
|
|
|
tags.append(tag)
|
|
|
|
# get list of tags
|
2020-10-10 11:38:52 +00:00
|
|
|
if nickname != 'news':
|
|
|
|
content = replaceEmojiFromTags(content, tags, 'content')
|
2020-02-21 15:17:55 +00:00
|
|
|
# remove replaced emoji
|
2020-04-04 10:05:27 +00:00
|
|
|
hashtagsDictCopy = hashtagsDict.copy()
|
|
|
|
for tagName, tag in hashtagsDictCopy.items():
|
2020-02-21 15:17:55 +00:00
|
|
|
if tag.get('name'):
|
|
|
|
if tag['name'].startswith(':'):
|
|
|
|
if tag['name'] not in content:
|
|
|
|
del hashtagsDict[tagName]
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
statusNumber, published = getStatusNumber()
|
|
|
|
newPostId = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
|
|
|
'/statuses/' + statusNumber
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
sensitive = False
|
|
|
|
summary = None
|
2019-06-29 10:23:40 +00:00
|
|
|
if subject:
|
2021-02-11 11:02:05 +00:00
|
|
|
summary = removeInvalidChars(validContentWarning(subject))
|
2020-04-04 10:05:27 +00:00
|
|
|
sensitive = True
|
2019-07-15 14:41:15 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
toRecipients = []
|
|
|
|
toCC = []
|
2019-08-11 18:32:29 +00:00
|
|
|
if toUrl:
|
|
|
|
if not isinstance(toUrl, str):
|
|
|
|
print('ERROR: toUrl is not a string')
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
toRecipients = [toUrl]
|
2019-08-11 18:32:29 +00:00
|
|
|
|
2019-08-05 16:56:32 +00:00
|
|
|
# who to send to
|
2019-08-19 12:40:59 +00:00
|
|
|
if mentionedRecipients:
|
|
|
|
for mention in mentionedRecipients:
|
2019-11-04 12:09:59 +00:00
|
|
|
if mention not in toCC:
|
|
|
|
toCC.append(mention)
|
2019-08-09 11:12:08 +00:00
|
|
|
|
|
|
|
# create a list of hashtags
|
2019-09-05 11:37:41 +00:00
|
|
|
# Only posts which are #Public are searchable by hashtag
|
2019-08-10 16:55:17 +00:00
|
|
|
if hashtagsDict:
|
2020-04-04 10:05:27 +00:00
|
|
|
isPublic = False
|
2019-08-10 16:55:17 +00:00
|
|
|
for recipient in toRecipients:
|
|
|
|
if recipient.endswith('#Public'):
|
2020-04-04 10:05:27 +00:00
|
|
|
isPublic = True
|
2019-08-10 16:55:17 +00:00
|
|
|
break
|
2020-04-04 10:05:27 +00:00
|
|
|
for tagName, tag in hashtagsDict.items():
|
2020-12-13 20:07:45 +00:00
|
|
|
if not tagExists(tag['type'], tag['name'], tags):
|
|
|
|
tags.append(tag)
|
2019-08-10 11:51:54 +00:00
|
|
|
if isPublic:
|
2020-12-22 18:06:23 +00:00
|
|
|
_updateHashtagsIndex(baseDir, tag, newPostId)
|
2021-03-10 20:39:20 +00:00
|
|
|
# print('Content tags: ' + str(tags))
|
2019-10-02 14:40:39 +00:00
|
|
|
|
2021-06-26 21:29:49 +00:00
|
|
|
sensitive, summary = \
|
|
|
|
_createPostCWFromReply(baseDir, nickname, domain,
|
|
|
|
inReplyTo, sensitive, summary)
|
2020-08-21 16:10:47 +00:00
|
|
|
|
2021-06-27 15:48:02 +00:00
|
|
|
eventDateStr = \
|
|
|
|
_createPostPlaceAndTime(eventDate, endDate,
|
|
|
|
eventTime, endTime,
|
|
|
|
summary, content, schedulePost,
|
|
|
|
eventUUID, location, tags)
|
2019-10-19 15:59:49 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postContext = [
|
2019-10-19 15:59:49 +00:00
|
|
|
'https://www.w3.org/ns/activitystreams',
|
|
|
|
{
|
|
|
|
'Hashtag': 'as:Hashtag',
|
|
|
|
'sensitive': 'as:sensitive',
|
|
|
|
'toot': 'http://joinmastodon.org/ns#',
|
|
|
|
'votersCount': 'toot:votersCount'
|
|
|
|
}
|
|
|
|
]
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-06-07 13:35:22 +00:00
|
|
|
# make sure that CC doesn't also contain a To address
|
|
|
|
# eg. To: [ "https://mydomain/users/foo/followers" ]
|
|
|
|
# CC: [ "X", "Y", "https://mydomain/users/foo", "Z" ]
|
|
|
|
removeFromCC = []
|
|
|
|
for ccRecipient in toCC:
|
|
|
|
for sendToActor in toRecipients:
|
2020-06-07 18:02:20 +00:00
|
|
|
if ccRecipient in sendToActor and \
|
|
|
|
ccRecipient not in removeFromCC:
|
|
|
|
removeFromCC.append(ccRecipient)
|
|
|
|
break
|
2020-06-07 13:35:22 +00:00
|
|
|
for ccRemoval in removeFromCC:
|
|
|
|
toCC.remove(ccRemoval)
|
|
|
|
|
2020-08-21 11:08:31 +00:00
|
|
|
# the type of post to be made
|
|
|
|
postObjectType = 'Note'
|
2021-02-10 18:13:41 +00:00
|
|
|
if isArticle:
|
|
|
|
postObjectType = 'Article'
|
2020-08-21 11:08:31 +00:00
|
|
|
|
2019-07-03 15:10:18 +00:00
|
|
|
if not clientToServer:
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = \
|
|
|
|
_createPostS2S(baseDir, nickname, domain, port,
|
|
|
|
httpPrefix, content, statusNumber,
|
|
|
|
published, newPostId, postContext,
|
|
|
|
toRecipients, toCC, inReplyTo,
|
|
|
|
sensitive, commentsEnabled,
|
|
|
|
tags, attachImageFilename,
|
|
|
|
mediaType, imageDescription, city,
|
|
|
|
postObjectType, summary,
|
2021-08-08 16:52:32 +00:00
|
|
|
inReplyToAtomUri, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2019-07-03 15:10:18 +00:00
|
|
|
else:
|
2021-06-27 14:58:54 +00:00
|
|
|
newPost = \
|
|
|
|
_createPostC2S(baseDir, nickname, domain, port,
|
|
|
|
httpPrefix, content, statusNumber,
|
|
|
|
published, newPostId, postContext,
|
|
|
|
toRecipients, toCC, inReplyTo,
|
|
|
|
sensitive, commentsEnabled,
|
|
|
|
tags, attachImageFilename,
|
|
|
|
mediaType, imageDescription, city,
|
|
|
|
postObjectType, summary,
|
2021-08-08 16:52:32 +00:00
|
|
|
inReplyToAtomUri, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2021-06-27 16:12:10 +00:00
|
|
|
|
|
|
|
_createPostMentions(ccUrl, newPost, toRecipients, tags)
|
|
|
|
|
|
|
|
_createPostModReport(baseDir, isModerationReport, newPost, newPostId)
|
2019-08-11 20:38:10 +00:00
|
|
|
|
2020-05-03 12:52:13 +00:00
|
|
|
# If a patch has been posted - i.e. the output from
|
2020-05-03 12:39:54 +00:00
|
|
|
# git format-patch - then convert the activitypub type
|
2020-05-03 12:52:13 +00:00
|
|
|
convertPostToPatch(baseDir, nickname, domain, newPost)
|
2020-05-03 12:39:54 +00:00
|
|
|
|
2020-01-12 20:53:00 +00:00
|
|
|
if schedulePost:
|
2020-03-22 21:16:02 +00:00
|
|
|
if eventDate and eventTime:
|
2020-01-12 20:53:00 +00:00
|
|
|
# add an item to the scheduled post index file
|
2020-12-22 18:06:23 +00:00
|
|
|
_addSchedulePost(baseDir, nickname, domain,
|
|
|
|
eventDateStr, newPostId)
|
2020-04-04 10:05:27 +00:00
|
|
|
savePostToBox(baseDir, httpPrefix, newPostId,
|
|
|
|
nickname, domain, newPost, 'scheduled')
|
2020-01-12 20:53:00 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Unable to create scheduled post without ' +
|
|
|
|
'date and time values')
|
2020-01-12 20:53:00 +00:00
|
|
|
return newPost
|
|
|
|
elif saveToFile:
|
2020-08-23 11:13:35 +00:00
|
|
|
if isArticle:
|
2020-04-04 10:05:27 +00:00
|
|
|
savePostToBox(baseDir, httpPrefix, newPostId,
|
2020-08-23 11:13:35 +00:00
|
|
|
nickname, domain, newPost, 'tlblogs')
|
2020-02-24 22:34:54 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
savePostToBox(baseDir, httpPrefix, newPostId,
|
2020-08-23 11:13:35 +00:00
|
|
|
nickname, domain, newPost, 'outbox')
|
2019-06-28 18:55:29 +00:00
|
|
|
return newPost
|
2019-06-29 10:08:59 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def outboxMessageCreateWrap(httpPrefix: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
2019-07-06 17:00:22 +00:00
|
|
|
messageJson: {}) -> {}:
|
2019-07-03 21:37:46 +00:00
|
|
|
"""Wraps a received message in a Create
|
|
|
|
https://www.w3.org/TR/activitypub/#object-without-create
|
|
|
|
"""
|
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
domain = getFullDomain(domain, port)
|
2020-04-04 10:05:27 +00:00
|
|
|
statusNumber, published = getStatusNumber()
|
2019-07-03 21:37:46 +00:00
|
|
|
if messageJson.get('published'):
|
2020-04-04 10:05:27 +00:00
|
|
|
published = messageJson['published']
|
|
|
|
newPostId = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/statuses/' + statusNumber
|
|
|
|
cc = []
|
2019-07-03 21:37:46 +00:00
|
|
|
if messageJson.get('cc'):
|
2020-04-04 10:05:27 +00:00
|
|
|
cc = messageJson['cc']
|
|
|
|
newPost = {
|
2019-08-18 11:07:06 +00:00
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
2020-08-23 11:13:35 +00:00
|
|
|
'id': newPostId + '/activity',
|
2019-07-03 21:37:46 +00:00
|
|
|
'type': 'Create',
|
2021-08-14 11:13:39 +00:00
|
|
|
'actor': localActorUrl(httpPrefix, nickname, domain),
|
2019-07-03 21:37:46 +00:00
|
|
|
'published': published,
|
|
|
|
'to': messageJson['to'],
|
|
|
|
'cc': cc,
|
|
|
|
'object': messageJson
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
newPost['object']['id'] = newPost['id']
|
|
|
|
newPost['object']['url'] = \
|
|
|
|
httpPrefix + '://' + domain + '/@' + nickname + '/' + statusNumber
|
|
|
|
newPost['object']['atomUri'] = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/statuses/' + statusNumber
|
2019-07-03 21:37:46 +00:00
|
|
|
return newPost
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _postIsAddressedToFollowers(baseDir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str,
|
|
|
|
postJsonObject: {}) -> bool:
|
2019-07-08 13:30:04 +00:00
|
|
|
"""Returns true if the given post is addressed to followers of the nickname
|
|
|
|
"""
|
2020-12-18 10:43:19 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2019-07-08 13:30:04 +00:00
|
|
|
|
2019-07-14 16:57:06 +00:00
|
|
|
if not postJsonObject.get('object'):
|
2019-07-08 13:30:04 +00:00
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
toList = []
|
|
|
|
ccList = []
|
|
|
|
if postJsonObject['type'] != 'Update' and \
|
2021-07-06 10:00:19 +00:00
|
|
|
hasObjectDict(postJsonObject):
|
2019-11-04 12:46:51 +00:00
|
|
|
if postJsonObject['object'].get('to'):
|
2020-04-04 10:05:27 +00:00
|
|
|
toList = postJsonObject['object']['to']
|
2019-07-16 19:07:45 +00:00
|
|
|
if postJsonObject['object'].get('cc'):
|
2020-04-04 10:05:27 +00:00
|
|
|
ccList = postJsonObject['object']['cc']
|
2019-07-16 19:07:45 +00:00
|
|
|
else:
|
2019-11-04 12:46:51 +00:00
|
|
|
if postJsonObject.get('to'):
|
2020-04-04 10:05:27 +00:00
|
|
|
toList = postJsonObject['to']
|
2019-07-16 19:07:45 +00:00
|
|
|
if postJsonObject.get('cc'):
|
2020-04-04 10:05:27 +00:00
|
|
|
ccList = postJsonObject['cc']
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
followersUrl = \
|
|
|
|
localActorUrl(httpPrefix, nickname, domainFull) + '/followers'
|
2019-07-08 13:30:04 +00:00
|
|
|
|
|
|
|
# does the followers url exist in 'to' or 'cc' lists?
|
2020-04-04 10:05:27 +00:00
|
|
|
addressedToFollowers = False
|
2019-07-16 19:07:45 +00:00
|
|
|
if followersUrl in toList:
|
2020-04-04 10:05:27 +00:00
|
|
|
addressedToFollowers = True
|
2019-11-04 12:46:51 +00:00
|
|
|
elif followersUrl in ccList:
|
2020-04-04 10:05:27 +00:00
|
|
|
addressedToFollowers = True
|
2019-07-08 13:30:04 +00:00
|
|
|
return addressedToFollowers
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-01-24 18:09:21 +00:00
|
|
|
def pinPost(baseDir: str, nickname: str, domain: str,
|
|
|
|
pinnedContent: str) -> None:
|
|
|
|
"""Pins the given post Id to the profile of then given account
|
|
|
|
"""
|
2021-07-13 21:59:53 +00:00
|
|
|
accountDir = acctDir(baseDir, nickname, domain)
|
2021-01-24 18:09:21 +00:00
|
|
|
pinnedFilename = accountDir + '/pinToProfile.txt'
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(pinnedFilename, 'w+') as pinFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
pinFile.write(pinnedContent)
|
2021-01-24 18:09:21 +00:00
|
|
|
|
|
|
|
|
2021-01-24 21:35:26 +00:00
|
|
|
def undoPinnedPost(baseDir: str, nickname: str, domain: str) -> None:
|
2021-01-24 18:35:42 +00:00
|
|
|
"""Removes pinned content for then given account
|
|
|
|
"""
|
2021-07-13 21:59:53 +00:00
|
|
|
accountDir = acctDir(baseDir, nickname, domain)
|
2021-01-24 18:35:42 +00:00
|
|
|
pinnedFilename = accountDir + '/pinToProfile.txt'
|
|
|
|
if os.path.isfile(pinnedFilename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
|
|
|
os.remove(pinnedFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
2021-01-24 18:35:42 +00:00
|
|
|
|
|
|
|
|
2021-01-25 14:31:37 +00:00
|
|
|
def getPinnedPostAsJson(baseDir: str, httpPrefix: str,
|
|
|
|
nickname: str, domain: str,
|
2021-07-18 10:47:55 +00:00
|
|
|
domainFull: str, systemLanguage: str) -> {}:
|
2021-01-25 14:31:37 +00:00
|
|
|
"""Returns the pinned profile post as json
|
2021-01-24 21:35:26 +00:00
|
|
|
"""
|
2021-07-13 21:59:53 +00:00
|
|
|
accountDir = acctDir(baseDir, nickname, domain)
|
2021-01-24 21:35:26 +00:00
|
|
|
pinnedFilename = accountDir + '/pinToProfile.txt'
|
|
|
|
pinnedPostJson = {}
|
2021-08-14 11:13:39 +00:00
|
|
|
actor = localActorUrl(httpPrefix, nickname, domainFull)
|
2021-01-24 21:35:26 +00:00
|
|
|
if os.path.isfile(pinnedFilename):
|
|
|
|
pinnedContent = None
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(pinnedFilename, 'r') as pinFile:
|
2021-01-24 21:35:26 +00:00
|
|
|
pinnedContent = pinFile.read()
|
|
|
|
if pinnedContent:
|
|
|
|
pinnedPostJson = {
|
|
|
|
'atomUri': actor + '/pinned',
|
|
|
|
'attachment': [],
|
|
|
|
'attributedTo': actor,
|
|
|
|
'cc': [
|
|
|
|
actor + '/followers'
|
|
|
|
],
|
|
|
|
'content': pinnedContent,
|
|
|
|
'contentMap': {
|
2021-07-18 10:47:55 +00:00
|
|
|
systemLanguage: pinnedContent
|
2021-01-24 21:35:26 +00:00
|
|
|
},
|
|
|
|
'id': actor + '/pinned',
|
|
|
|
'inReplyTo': None,
|
|
|
|
'inReplyToAtomUri': None,
|
|
|
|
'published': fileLastModified(pinnedFilename),
|
|
|
|
'replies': {},
|
|
|
|
'sensitive': False,
|
|
|
|
'summary': None,
|
|
|
|
'tag': [],
|
|
|
|
'to': ['https://www.w3.org/ns/activitystreams#Public'],
|
|
|
|
'type': 'Note',
|
2021-08-22 18:38:02 +00:00
|
|
|
'url': replaceUsersWithAt(actor) + '/pinned'
|
2021-01-24 21:35:26 +00:00
|
|
|
}
|
2021-01-25 14:31:37 +00:00
|
|
|
return pinnedPostJson
|
|
|
|
|
|
|
|
|
|
|
|
def jsonPinPost(baseDir: str, httpPrefix: str,
|
|
|
|
nickname: str, domain: str,
|
2021-07-18 10:47:55 +00:00
|
|
|
domainFull: str, systemLanguage: str) -> {}:
|
2021-01-25 14:31:37 +00:00
|
|
|
"""Returns a pinned post as json
|
|
|
|
"""
|
|
|
|
pinnedPostJson = \
|
|
|
|
getPinnedPostAsJson(baseDir, httpPrefix,
|
|
|
|
nickname, domain,
|
2021-07-18 10:47:55 +00:00
|
|
|
domainFull, systemLanguage)
|
2021-01-25 14:31:37 +00:00
|
|
|
itemsList = []
|
|
|
|
if pinnedPostJson:
|
|
|
|
itemsList = [pinnedPostJson]
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
actor = localActorUrl(httpPrefix, nickname, domainFull)
|
2021-01-24 21:35:26 +00:00
|
|
|
return {
|
|
|
|
'@context': [
|
|
|
|
'https://www.w3.org/ns/activitystreams',
|
|
|
|
{
|
|
|
|
'atomUri': 'ostatus:atomUri',
|
|
|
|
'conversation': 'ostatus:conversation',
|
|
|
|
'inReplyToAtomUri': 'ostatus:inReplyToAtomUri',
|
|
|
|
'ostatus': 'http://ostatus.org#',
|
|
|
|
'sensitive': 'as:sensitive',
|
|
|
|
'toot': 'http://joinmastodon.org/ns#',
|
|
|
|
'votersCount': 'toot:votersCount'
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'id': actor + '/collections/featured',
|
|
|
|
'orderedItems': itemsList,
|
|
|
|
'totalItems': len(itemsList),
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-08-01 19:19:45 +00:00
|
|
|
def regenerateIndexForBox(baseDir: str,
|
|
|
|
nickname: str, domain: str, boxName: str) -> None:
|
|
|
|
"""Generates an index for the given box if it doesn't exist
|
|
|
|
Used by unit tests to artificially create an index
|
|
|
|
"""
|
|
|
|
boxDir = acctDir(baseDir, nickname, domain) + '/' + boxName
|
|
|
|
boxIndexFilename = boxDir + '.index'
|
|
|
|
|
|
|
|
if not os.path.isdir(boxDir):
|
|
|
|
return
|
|
|
|
if os.path.isfile(boxIndexFilename):
|
|
|
|
return
|
|
|
|
|
|
|
|
indexLines = []
|
|
|
|
for subdir, dirs, files in os.walk(boxDir):
|
|
|
|
for f in files:
|
|
|
|
if ':##' not in f:
|
|
|
|
continue
|
|
|
|
indexLines.append(f)
|
|
|
|
break
|
|
|
|
|
|
|
|
indexLines.sort(reverse=True)
|
|
|
|
|
|
|
|
result = ''
|
|
|
|
with open(boxIndexFilename, 'w+') as fp:
|
|
|
|
for line in indexLines:
|
|
|
|
result += line + '\n'
|
|
|
|
fp.write(line + '\n')
|
|
|
|
print('Index generated for ' + boxName + '\n' + result)
|
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def createPublicPost(baseDir: str,
|
|
|
|
nickname: str, domain: str, port: int, httpPrefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-07-01 20:41:17 +00:00
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str, subject: str,
|
|
|
|
schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
|
|
|
location: str,
|
2021-07-18 09:55:49 +00:00
|
|
|
isArticle: bool,
|
2021-08-08 16:52:32 +00:00
|
|
|
systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Public post
|
2019-06-30 10:14:02 +00:00
|
|
|
"""
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-02-10 17:03:51 +00:00
|
|
|
isModerationReport = False
|
|
|
|
eventUUID = None
|
|
|
|
category = None
|
|
|
|
joinMode = None
|
|
|
|
endDate = None
|
|
|
|
endTime = None
|
|
|
|
maximumAttendeeCapacity = None
|
|
|
|
repliesModerationOption = None
|
|
|
|
anonymousParticipationEnabled = None
|
|
|
|
eventStatus = None
|
|
|
|
ticketUrl = None
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor = localActorUrl(httpPrefix, nickname, domainFull)
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createPostBase(baseDir, nickname, domain, port,
|
|
|
|
'https://www.w3.org/ns/activitystreams#Public',
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor + '/followers',
|
2020-12-22 18:06:23 +00:00
|
|
|
httpPrefix, content, followersOnly, saveToFile,
|
|
|
|
clientToServer, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2021-02-10 17:03:51 +00:00
|
|
|
isModerationReport, isArticle,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
2020-12-22 18:06:23 +00:00
|
|
|
schedulePost, eventDate, eventTime, location,
|
2021-02-10 17:03:51 +00:00
|
|
|
eventUUID, category, joinMode, endDate, endTime,
|
|
|
|
maximumAttendeeCapacity,
|
|
|
|
repliesModerationOption,
|
|
|
|
anonymousParticipationEnabled,
|
2021-08-08 16:52:32 +00:00
|
|
|
eventStatus, ticketUrl, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-02-11 10:01:27 +00:00
|
|
|
def _appendCitationsToBlogPost(baseDir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
blogJson: {}) -> None:
|
|
|
|
"""Appends any citations to a new blog post
|
|
|
|
"""
|
|
|
|
# append citations tags, stored in a file
|
|
|
|
citationsFilename = \
|
2021-07-13 21:59:53 +00:00
|
|
|
acctDir(baseDir, nickname, domain) + '/.citations.txt'
|
2021-02-11 10:01:27 +00:00
|
|
|
if not os.path.isfile(citationsFilename):
|
|
|
|
return
|
|
|
|
citationsSeparator = '#####'
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(citationsFilename, 'r') as f:
|
2021-02-11 10:01:27 +00:00
|
|
|
citations = f.readlines()
|
|
|
|
for line in citations:
|
|
|
|
if citationsSeparator not in line:
|
|
|
|
continue
|
|
|
|
sections = line.strip().split(citationsSeparator)
|
|
|
|
if len(sections) != 3:
|
|
|
|
continue
|
|
|
|
# dateStr = sections[0]
|
|
|
|
title = sections[1]
|
|
|
|
link = sections[2]
|
|
|
|
tagJson = {
|
|
|
|
"type": "Article",
|
|
|
|
"name": title,
|
|
|
|
"url": link
|
|
|
|
}
|
|
|
|
blogJson['object']['tag'].append(tagJson)
|
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def createBlogPost(baseDir: str,
|
|
|
|
nickname: str, domain: str, port: int, httpPrefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
2020-10-13 13:46:16 +00:00
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-07-01 20:50:18 +00:00
|
|
|
inReplyTo: str, inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
location: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2021-02-11 10:01:27 +00:00
|
|
|
blogJson = \
|
2020-04-04 10:05:27 +00:00
|
|
|
createPublicPost(baseDir,
|
|
|
|
nickname, domain, port, httpPrefix,
|
|
|
|
content, followersOnly, saveToFile,
|
2020-10-07 22:29:05 +00:00
|
|
|
clientToServer, commentsEnabled,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2020-04-04 10:05:27 +00:00
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost,
|
2021-07-18 09:55:49 +00:00
|
|
|
eventDate, eventTime, location,
|
2021-08-13 17:08:50 +00:00
|
|
|
True, systemLanguage, conversationId,
|
|
|
|
lowBandwidth)
|
2021-02-17 11:26:14 +00:00
|
|
|
blogJson['object']['url'] = \
|
|
|
|
blogJson['object']['url'].replace('/@', '/users/')
|
2021-02-11 10:01:27 +00:00
|
|
|
_appendCitationsToBlogPost(baseDir, nickname, domain, blogJson)
|
2020-11-06 11:21:41 +00:00
|
|
|
|
2021-02-11 10:01:27 +00:00
|
|
|
return blogJson
|
2020-10-07 21:26:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def createNewsPost(baseDir: str,
|
2020-10-07 22:25:30 +00:00
|
|
|
domain: str, port: int, httpPrefix: str,
|
2020-10-07 21:26:03 +00:00
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
subject: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2020-10-07 21:26:03 +00:00
|
|
|
clientToServer = False
|
|
|
|
inReplyTo = None
|
|
|
|
inReplyToAtomUri = None
|
2020-10-07 21:40:52 +00:00
|
|
|
schedulePost = False
|
2020-10-07 21:26:03 +00:00
|
|
|
eventDate = None
|
|
|
|
eventTime = None
|
|
|
|
location = None
|
|
|
|
blog = \
|
|
|
|
createPublicPost(baseDir,
|
2020-10-07 22:29:05 +00:00
|
|
|
'news', domain, port, httpPrefix,
|
|
|
|
content, followersOnly, saveToFile,
|
|
|
|
clientToServer, False,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2020-10-07 22:29:05 +00:00
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
|
|
|
schedulePost,
|
2021-07-18 09:55:49 +00:00
|
|
|
eventDate, eventTime, location,
|
2021-08-13 17:08:50 +00:00
|
|
|
True, systemLanguage, conversationId,
|
|
|
|
lowBandwidth)
|
2020-10-07 12:05:49 +00:00
|
|
|
blog['object']['type'] = 'Article'
|
|
|
|
return blog
|
|
|
|
|
|
|
|
|
2019-11-25 22:34:26 +00:00
|
|
|
def createQuestionPost(baseDir: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname: str, domain: str, port: int, httpPrefix: str,
|
|
|
|
content: str, qOptions: [],
|
|
|
|
followersOnly: bool, saveToFile: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-07-18 09:55:49 +00:00
|
|
|
subject: str, durationDays: int,
|
2021-08-13 17:08:50 +00:00
|
|
|
systemLanguage: str, lowBandwidth: bool) -> {}:
|
2019-11-25 22:34:26 +00:00
|
|
|
"""Question post with multiple choice options
|
|
|
|
"""
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor = localActorUrl(httpPrefix, nickname, domainFull)
|
2020-04-04 10:05:27 +00:00
|
|
|
messageJson = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_createPostBase(baseDir, nickname, domain, port,
|
|
|
|
'https://www.w3.org/ns/activitystreams#Public',
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor + '/followers',
|
2020-12-22 18:06:23 +00:00
|
|
|
httpPrefix, content, followersOnly, saveToFile,
|
|
|
|
clientToServer, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2020-12-22 18:06:23 +00:00
|
|
|
False, False, None, None, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
None, lowBandwidth)
|
2020-04-04 10:05:27 +00:00
|
|
|
messageJson['object']['type'] = 'Question'
|
|
|
|
messageJson['object']['oneOf'] = []
|
|
|
|
messageJson['object']['votersCount'] = 0
|
|
|
|
currTime = datetime.datetime.utcnow()
|
|
|
|
daysSinceEpoch = \
|
|
|
|
int((currTime - datetime.datetime(1970, 1, 1)).days + durationDays)
|
|
|
|
endTime = datetime.datetime(1970, 1, 1) + \
|
|
|
|
datetime.timedelta(daysSinceEpoch)
|
|
|
|
messageJson['object']['endTime'] = endTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
2019-11-25 22:34:26 +00:00
|
|
|
for questionOption in qOptions:
|
2019-11-29 13:54:25 +00:00
|
|
|
messageJson['object']['oneOf'].append({
|
2019-11-25 22:34:26 +00:00
|
|
|
"type": "Note",
|
|
|
|
"name": questionOption,
|
|
|
|
"replies": {
|
|
|
|
"type": "Collection",
|
|
|
|
"totalItems": 0
|
|
|
|
}
|
|
|
|
})
|
|
|
|
return messageJson
|
|
|
|
|
2020-02-24 13:32:19 +00:00
|
|
|
|
2019-07-28 11:08:14 +00:00
|
|
|
def createUnlistedPost(baseDir: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname: str, domain: str, port: int, httpPrefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-07-01 20:54:33 +00:00
|
|
|
inReplyTo: str, inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
location: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2019-07-28 11:08:14 +00:00
|
|
|
"""Unlisted post. This has the #Public and followers links inverted.
|
|
|
|
"""
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor = localActorUrl(httpPrefix, domainFull, nickname)
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createPostBase(baseDir, nickname, domain, port,
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor + '/followers',
|
2020-12-22 18:06:23 +00:00
|
|
|
'https://www.w3.org/ns/activitystreams#Public',
|
|
|
|
httpPrefix, content, followersOnly, saveToFile,
|
|
|
|
clientToServer, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
|
|
|
False, False,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
2020-12-22 18:06:23 +00:00
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
None, None, None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-07-28 11:08:14 +00:00
|
|
|
|
2019-07-27 22:48:34 +00:00
|
|
|
def createFollowersOnlyPost(baseDir: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str,
|
|
|
|
content: str, followersOnly: bool,
|
|
|
|
saveToFile: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-07-01 20:58:16 +00:00
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str,
|
|
|
|
subject: str, schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
location: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Followers only post
|
|
|
|
"""
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor = localActorUrl(httpPrefix, domainFull, nickname)
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createPostBase(baseDir, nickname, domain, port,
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor + '/followers',
|
2020-12-22 18:06:23 +00:00
|
|
|
None,
|
|
|
|
httpPrefix, content, followersOnly, saveToFile,
|
|
|
|
clientToServer, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
|
|
|
False, False,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
2020-12-22 18:06:23 +00:00
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
None, None, None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2020-08-21 11:08:31 +00:00
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def getMentionedPeople(baseDir: str, httpPrefix: str,
|
|
|
|
content: str, domain: str, debug: bool) -> []:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Extracts a list of mentioned actors from the given message content
|
|
|
|
"""
|
|
|
|
if '@' not in content:
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
mentions = []
|
|
|
|
words = content.split(' ')
|
2019-07-27 22:48:34 +00:00
|
|
|
for wrd in words:
|
2021-07-04 12:50:42 +00:00
|
|
|
if not wrd.startswith('@'):
|
|
|
|
continue
|
|
|
|
handle = wrd[1:]
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mentioned handle ' + handle)
|
|
|
|
if '@' not in handle:
|
|
|
|
handle = handle + '@' + domain
|
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
externalDomain = handle.split('@')[1]
|
|
|
|
if not ('.' in externalDomain or
|
|
|
|
externalDomain == 'localhost'):
|
2019-07-27 22:48:34 +00:00
|
|
|
continue
|
2021-07-04 12:50:42 +00:00
|
|
|
mentionedNickname = handle.split('@')[0]
|
|
|
|
mentionedDomain = handle.split('@')[1].strip('\n').strip('\r')
|
|
|
|
if ':' in mentionedDomain:
|
|
|
|
mentionedDomain = removeDomainPort(mentionedDomain)
|
|
|
|
if not validNickname(mentionedDomain, mentionedNickname):
|
|
|
|
continue
|
|
|
|
actor = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, mentionedNickname, handle.split('@')[1])
|
2021-07-04 12:50:42 +00:00
|
|
|
mentions.append(actor)
|
2019-08-19 09:16:33 +00:00
|
|
|
return mentions
|
2019-07-27 22:48:34 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-07-27 22:48:34 +00:00
|
|
|
def createDirectMessagePost(baseDir: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str,
|
|
|
|
content: str, followersOnly: bool,
|
|
|
|
saveToFile: bool, clientToServer: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-07-01 21:05:22 +00:00
|
|
|
inReplyTo: str,
|
|
|
|
inReplyToAtomUri: str,
|
|
|
|
subject: str, debug: bool,
|
|
|
|
schedulePost: bool,
|
|
|
|
eventDate: str, eventTime: str,
|
2021-08-08 16:52:32 +00:00
|
|
|
location: str, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId: str, lowBandwidth: bool) -> {}:
|
2019-07-27 22:48:34 +00:00
|
|
|
"""Direct Message post
|
|
|
|
"""
|
2020-06-29 19:15:51 +00:00
|
|
|
content = resolvePetnames(baseDir, nickname, domain, content)
|
2020-04-04 10:05:27 +00:00
|
|
|
mentionedPeople = \
|
|
|
|
getMentionedPeople(baseDir, httpPrefix, content, domain, debug)
|
2019-08-19 09:11:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('mentionedPeople: ' + str(mentionedPeople))
|
2019-07-27 22:48:34 +00:00
|
|
|
if not mentionedPeople:
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
postTo = None
|
|
|
|
postCc = None
|
|
|
|
messageJson = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_createPostBase(baseDir, nickname, domain, port,
|
|
|
|
postTo, postCc,
|
|
|
|
httpPrefix, content, followersOnly, saveToFile,
|
|
|
|
clientToServer, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
|
|
|
False, False,
|
|
|
|
inReplyTo, inReplyToAtomUri, subject,
|
2020-12-22 18:06:23 +00:00
|
|
|
schedulePost, eventDate, eventTime, location,
|
|
|
|
None, None, None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2019-11-10 12:28:12 +00:00
|
|
|
# mentioned recipients go into To rather than Cc
|
2020-04-04 10:05:27 +00:00
|
|
|
messageJson['to'] = messageJson['object']['cc']
|
2020-06-24 12:33:06 +00:00
|
|
|
messageJson['object']['to'] = messageJson['to']
|
|
|
|
messageJson['cc'] = []
|
|
|
|
messageJson['object']['cc'] = []
|
2020-06-24 11:53:43 +00:00
|
|
|
if schedulePost:
|
|
|
|
savePostToBox(baseDir, httpPrefix, messageJson['object']['id'],
|
|
|
|
nickname, domain, messageJson, 'scheduled')
|
|
|
|
return messageJson
|
|
|
|
|
|
|
|
|
2019-08-11 11:25:27 +00:00
|
|
|
def createReportPost(baseDir: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname: str, domain: str, port: int, httpPrefix: str,
|
|
|
|
content: str, followersOnly: bool, saveToFile: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
clientToServer: bool, commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
debug: bool, subject: str, systemLanguage: str,
|
|
|
|
lowBandwidth: bool) -> {}:
|
2019-08-11 11:25:27 +00:00
|
|
|
"""Send a report to moderators
|
|
|
|
"""
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2019-08-11 11:25:27 +00:00
|
|
|
|
2019-08-11 11:33:29 +00:00
|
|
|
# add a title to distinguish moderation reports from other posts
|
2020-04-04 10:05:27 +00:00
|
|
|
reportTitle = 'Moderation Report'
|
2019-08-11 11:33:29 +00:00
|
|
|
if not subject:
|
2020-04-04 10:05:27 +00:00
|
|
|
subject = reportTitle
|
2019-08-11 11:33:29 +00:00
|
|
|
else:
|
|
|
|
if not subject.startswith(reportTitle):
|
2020-04-04 10:05:27 +00:00
|
|
|
subject = reportTitle + ': ' + subject
|
2019-08-11 11:33:29 +00:00
|
|
|
|
2019-08-11 13:02:36 +00:00
|
|
|
# create the list of moderators from the moderators file
|
2020-04-04 10:05:27 +00:00
|
|
|
moderatorsList = []
|
|
|
|
moderatorsFile = baseDir + '/accounts/moderators.txt'
|
2019-08-11 11:25:27 +00:00
|
|
|
if os.path.isfile(moderatorsFile):
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(moderatorsFile, 'r') as fileHandler:
|
2019-08-11 11:25:27 +00:00
|
|
|
for line in fileHandler:
|
2020-05-22 11:32:38 +00:00
|
|
|
line = line.strip('\n').strip('\r')
|
2019-08-11 11:25:27 +00:00
|
|
|
if line.startswith('#'):
|
|
|
|
continue
|
|
|
|
if line.startswith('/users/'):
|
2020-04-04 10:05:27 +00:00
|
|
|
line = line.replace('users', '')
|
2019-08-11 11:25:27 +00:00
|
|
|
if line.startswith('@'):
|
2020-04-04 10:05:27 +00:00
|
|
|
line = line[1:]
|
2019-08-11 11:25:27 +00:00
|
|
|
if '@' in line:
|
2021-08-14 11:13:39 +00:00
|
|
|
nick = line.split('@')[0]
|
|
|
|
moderatorActor = \
|
|
|
|
localActorUrl(httpPrefix, nick, domainFull)
|
2020-04-04 10:05:27 +00:00
|
|
|
if moderatorActor not in moderatorsList:
|
2019-08-11 11:25:27 +00:00
|
|
|
moderatorsList.append(moderatorActor)
|
|
|
|
continue
|
2021-07-01 17:59:24 +00:00
|
|
|
if line.startswith('http') or line.startswith('hyper'):
|
2019-08-11 11:25:27 +00:00
|
|
|
# must be a local address - no remote moderators
|
2020-04-04 10:05:27 +00:00
|
|
|
if '://' + domainFull + '/' in line:
|
2019-08-11 11:25:27 +00:00
|
|
|
if line not in moderatorsList:
|
|
|
|
moderatorsList.append(line)
|
|
|
|
else:
|
|
|
|
if '/' not in line:
|
2021-08-14 11:13:39 +00:00
|
|
|
moderatorActor = \
|
|
|
|
localActorUrl(httpPrefix, line, domainFull)
|
2019-08-11 11:25:27 +00:00
|
|
|
if moderatorActor not in moderatorsList:
|
|
|
|
moderatorsList.append(moderatorActor)
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(moderatorsList) == 0:
|
2019-08-11 11:25:27 +00:00
|
|
|
# if there are no moderators then the admin becomes the moderator
|
2020-04-04 10:05:27 +00:00
|
|
|
adminNickname = getConfigParam(baseDir, 'admin')
|
2019-08-11 11:25:27 +00:00
|
|
|
if adminNickname:
|
2021-08-14 11:13:39 +00:00
|
|
|
localActor = localActorUrl(httpPrefix, adminNickname, domainFull)
|
|
|
|
moderatorsList.append(localActor)
|
2019-08-11 11:25:27 +00:00
|
|
|
if not moderatorsList:
|
|
|
|
return None
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: Sending report to moderators')
|
|
|
|
print(str(moderatorsList))
|
2020-04-04 10:05:27 +00:00
|
|
|
postTo = moderatorsList
|
|
|
|
postCc = None
|
|
|
|
postJsonObject = None
|
2020-03-22 21:16:02 +00:00
|
|
|
for toUrl in postTo:
|
2019-11-16 22:09:54 +00:00
|
|
|
# who is this report going to?
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = toUrl.split('/users/')[1]
|
|
|
|
handle = toNickname + '@' + domain
|
|
|
|
|
|
|
|
postJsonObject = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_createPostBase(baseDir, nickname, domain, port,
|
|
|
|
toUrl, postCc,
|
|
|
|
httpPrefix, content, followersOnly, saveToFile,
|
|
|
|
clientToServer, commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2020-12-22 18:06:23 +00:00
|
|
|
True, False, None, None, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
None, lowBandwidth)
|
2019-11-16 15:24:07 +00:00
|
|
|
if not postJsonObject:
|
|
|
|
continue
|
2019-11-16 18:14:00 +00:00
|
|
|
|
2019-11-16 15:24:07 +00:00
|
|
|
# save a notification file so that the moderator
|
|
|
|
# knows something new has appeared
|
2020-04-04 10:05:27 +00:00
|
|
|
newReportFile = baseDir + '/accounts/' + handle + '/.newReport'
|
2019-11-16 15:24:07 +00:00
|
|
|
if os.path.isfile(newReportFile):
|
|
|
|
continue
|
2021-06-21 22:53:04 +00:00
|
|
|
try:
|
|
|
|
with open(newReportFile, 'w+') as fp:
|
|
|
|
fp.write(toUrl + '/moderation')
|
|
|
|
except BaseException:
|
|
|
|
pass
|
2019-11-16 15:24:07 +00:00
|
|
|
|
2019-08-11 18:32:29 +00:00
|
|
|
return postJsonObject
|
2019-08-11 11:25:27 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def threadSendPost(session, postJsonStr: str, federationList: [],
|
|
|
|
inboxUrl: str, baseDir: str,
|
|
|
|
signatureHeaderJson: {}, postLog: [],
|
|
|
|
debug: bool) -> None:
|
2019-10-23 18:28:04 +00:00
|
|
|
"""Sends a with retries
|
2019-06-30 13:38:01 +00:00
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
tries = 0
|
|
|
|
sendIntervalSec = 30
|
2019-06-30 13:38:01 +00:00
|
|
|
for attempt in range(20):
|
2020-04-04 10:05:27 +00:00
|
|
|
postResult = None
|
|
|
|
unauthorized = False
|
2021-02-02 21:08:33 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting postJsonString for ' + inboxUrl)
|
2019-10-14 21:05:14 +00:00
|
|
|
try:
|
2020-04-04 10:05:27 +00:00
|
|
|
postResult, unauthorized = \
|
|
|
|
postJsonString(session, postJsonStr, federationList,
|
|
|
|
inboxUrl, signatureHeaderJson,
|
2020-09-27 19:27:24 +00:00
|
|
|
debug)
|
2021-02-02 21:08:33 +00:00
|
|
|
if debug:
|
|
|
|
print('Obtained postJsonString for ' + inboxUrl +
|
|
|
|
' unauthorized: ' + str(unauthorized))
|
2019-10-14 21:05:14 +00:00
|
|
|
except Exception as e:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('ERROR: postJsonString failed ' + str(e))
|
|
|
|
if unauthorized:
|
2019-10-23 18:44:03 +00:00
|
|
|
print(postJsonStr)
|
|
|
|
print('threadSendPost: Post is unauthorized')
|
|
|
|
break
|
2019-08-21 21:05:37 +00:00
|
|
|
if postResult:
|
2020-04-04 10:05:27 +00:00
|
|
|
logStr = 'Success on try ' + str(tries) + ': ' + postJsonStr
|
2019-08-21 21:05:37 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
logStr = 'Retry ' + str(tries) + ': ' + postJsonStr
|
2019-08-21 21:05:37 +00:00
|
|
|
postLog.append(logStr)
|
|
|
|
# keep the length of the log finite
|
|
|
|
# Don't accumulate massive files on systems with limited resources
|
2020-04-04 10:05:27 +00:00
|
|
|
while len(postLog) > 16:
|
2019-09-01 10:11:06 +00:00
|
|
|
postLog.pop(0)
|
2019-10-16 11:27:43 +00:00
|
|
|
if debug:
|
|
|
|
# save the log file
|
2020-04-04 10:05:27 +00:00
|
|
|
postLogFilename = baseDir + '/post.log'
|
2021-07-31 13:19:45 +00:00
|
|
|
if os.path.isfile(postLogFilename):
|
|
|
|
with open(postLogFilename, 'a+') as logFile:
|
|
|
|
logFile.write(logStr + '\n')
|
|
|
|
else:
|
|
|
|
with open(postLogFilename, 'w+') as logFile:
|
|
|
|
logFile.write(logStr + '\n')
|
2019-08-21 21:05:37 +00:00
|
|
|
|
2019-06-30 13:38:01 +00:00
|
|
|
if postResult:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: successful json post to ' + inboxUrl)
|
2019-06-30 13:38:01 +00:00
|
|
|
# our work here is done
|
2019-06-30 13:20:23 +00:00
|
|
|
break
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2019-08-18 09:58:28 +00:00
|
|
|
print(postJsonStr)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: json post to ' + inboxUrl +
|
|
|
|
' failed. Waiting for ' +
|
|
|
|
str(sendIntervalSec) + ' seconds.')
|
2019-10-23 18:28:04 +00:00
|
|
|
time.sleep(sendIntervalSec)
|
2020-04-04 10:05:27 +00:00
|
|
|
tries += 1
|
|
|
|
|
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
def sendPost(signingPrivateKeyPem: str, projectVersion: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
session, baseDir: str, nickname: str, domain: str, port: int,
|
|
|
|
toNickname: str, toDomain: str, toPort: int, cc: str,
|
|
|
|
httpPrefix: str, content: str, followersOnly: bool,
|
|
|
|
saveToFile: bool, clientToServer: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
federationList: [], sendThreads: [], postLog: [],
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-07-18 09:55:49 +00:00
|
|
|
isArticle: bool, systemLanguage: str,
|
2021-08-05 11:24:24 +00:00
|
|
|
sharedItemsFederatedDomains: [],
|
|
|
|
sharedItemFederationTokens: {},
|
2021-08-13 17:08:50 +00:00
|
|
|
lowBandwidth: bool,
|
2021-06-20 11:28:35 +00:00
|
|
|
debug: bool = False, inReplyTo: str = None,
|
|
|
|
inReplyToAtomUri: str = None, subject: str = None) -> int:
|
2021-08-05 11:24:24 +00:00
|
|
|
"""Post to another inbox. Used by unit tests.
|
2019-06-30 10:14:02 +00:00
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
withDigest = True
|
2021-08-08 16:52:32 +00:00
|
|
|
conversationId = None
|
2019-07-01 09:31:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if toNickname == 'inbox':
|
2019-08-23 14:08:10 +00:00
|
|
|
# shared inbox actor on @domain@domain
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = toDomain
|
2019-08-23 14:08:10 +00:00
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
toDomain = getFullDomain(toDomain, toPort)
|
2019-06-30 22:56:37 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = httpPrefix + '://' + toDomain + '/@' + toNickname
|
2019-06-30 22:56:37 +00:00
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2020-04-04 10:05:27 +00:00
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix,
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2019-06-30 10:14:02 +00:00
|
|
|
if not wfRequest:
|
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
return 1
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if not clientToServer:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'inbox'
|
2019-07-05 22:13:20 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'outbox'
|
2020-02-24 22:34:54 +00:00
|
|
|
if isArticle:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'tlblogs'
|
2019-07-05 22:13:20 +00:00
|
|
|
|
2019-06-30 22:56:37 +00:00
|
|
|
# get the actor inbox for the To handle
|
2020-04-04 10:05:27 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey,
|
|
|
|
toPersonId, sharedInbox,
|
2021-08-31 14:17:11 +00:00
|
|
|
avatarUrl, displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-04-04 10:05:27 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix,
|
2020-12-18 17:49:17 +00:00
|
|
|
nickname, domain, postToBox,
|
|
|
|
72533)
|
2019-07-05 14:39:24 +00:00
|
|
|
|
2019-06-30 10:14:02 +00:00
|
|
|
if not inboxUrl:
|
|
|
|
return 3
|
2019-07-05 22:13:20 +00:00
|
|
|
if not pubKey:
|
2019-06-30 10:14:02 +00:00
|
|
|
return 4
|
2019-07-05 22:13:20 +00:00
|
|
|
if not toPersonId:
|
|
|
|
return 5
|
2020-09-27 18:35:35 +00:00
|
|
|
# sharedInbox is optional
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_createPostBase(baseDir, nickname, domain, port,
|
|
|
|
toPersonId, cc, httpPrefix, content,
|
|
|
|
followersOnly, saveToFile, clientToServer,
|
|
|
|
commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2020-12-22 18:06:23 +00:00
|
|
|
False, isArticle, inReplyTo,
|
|
|
|
inReplyToAtomUri, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2019-06-30 22:56:37 +00:00
|
|
|
# get the senders private key
|
2021-01-21 18:12:15 +00:00
|
|
|
privateKeyPem = _getPersonKey(nickname, domain, baseDir, 'private')
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(privateKeyPem) == 0:
|
2019-07-05 22:13:20 +00:00
|
|
|
return 6
|
2019-06-30 10:14:02 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if toDomain not in inboxUrl:
|
|
|
|
return 7
|
2020-04-04 10:05:27 +00:00
|
|
|
postPath = inboxUrl.split(toDomain, 1)[1]
|
2019-08-17 10:15:01 +00:00
|
|
|
|
2020-06-15 13:38:21 +00:00
|
|
|
if not postJsonObject.get('signature'):
|
|
|
|
try:
|
2021-01-04 19:02:24 +00:00
|
|
|
signedPostJsonObject = postJsonObject.copy()
|
|
|
|
generateJsonSignature(signedPostJsonObject, privateKeyPem)
|
2020-06-15 13:38:21 +00:00
|
|
|
postJsonObject = signedPostJsonObject
|
2020-10-11 14:08:16 +00:00
|
|
|
except Exception as e:
|
|
|
|
print('WARN: failed to JSON-LD sign post, ' + str(e))
|
2020-06-15 13:38:21 +00:00
|
|
|
pass
|
2020-06-15 13:08:19 +00:00
|
|
|
|
2019-08-17 10:15:01 +00:00
|
|
|
# convert json to string so that there are no
|
|
|
|
# subsequent conversions after creating message body digest
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonStr = json.dumps(postJsonObject)
|
2019-08-17 10:15:01 +00:00
|
|
|
|
|
|
|
# construct the http header, including the message body digest
|
2020-04-04 10:05:27 +00:00
|
|
|
signatureHeaderJson = \
|
|
|
|
createSignedHeader(privateKeyPem, nickname, domain, port,
|
|
|
|
toDomain, toPort,
|
|
|
|
postPath, httpPrefix, withDigest, postJsonStr)
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2021-08-05 11:24:24 +00:00
|
|
|
# if the "to" domain is within the shared items
|
|
|
|
# federation list then send the token for this domain
|
|
|
|
# so that it can request a catalog
|
|
|
|
if toDomain in sharedItemsFederatedDomains:
|
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
if sharedItemFederationTokens.get(domainFull):
|
|
|
|
signatureHeaderJson['Origin'] = domainFull
|
|
|
|
signatureHeaderJson['SharesCatalog'] = \
|
|
|
|
sharedItemFederationTokens[domainFull]
|
|
|
|
if debug:
|
|
|
|
print('SharesCatalog added to header')
|
|
|
|
elif debug:
|
|
|
|
print(domainFull + ' not in sharedItemFederationTokens')
|
|
|
|
elif debug:
|
|
|
|
print(toDomain + ' not in sharedItemsFederatedDomains ' +
|
|
|
|
str(sharedItemsFederatedDomains))
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('signatureHeaderJson: ' + str(signatureHeaderJson))
|
|
|
|
|
2019-07-05 18:57:19 +00:00
|
|
|
# Keep the number of threads being used small
|
2020-04-04 10:05:27 +00:00
|
|
|
while len(sendThreads) > 1000:
|
2019-10-16 14:46:29 +00:00
|
|
|
print('WARN: Maximum threads reached - killing send thread')
|
2019-07-05 18:57:19 +00:00
|
|
|
sendThreads[0].kill()
|
|
|
|
sendThreads.pop(0)
|
2019-10-16 14:46:29 +00:00
|
|
|
print('WARN: thread killed')
|
2020-04-04 10:05:27 +00:00
|
|
|
thr = \
|
|
|
|
threadWithTrace(target=threadSendPost,
|
|
|
|
args=(session,
|
|
|
|
postJsonStr,
|
|
|
|
federationList,
|
|
|
|
inboxUrl, baseDir,
|
|
|
|
signatureHeaderJson.copy(),
|
2019-12-12 09:58:06 +00:00
|
|
|
postLog,
|
2020-04-04 10:05:27 +00:00
|
|
|
debug), daemon=True)
|
2019-07-05 18:57:19 +00:00
|
|
|
sendThreads.append(thr)
|
|
|
|
thr.start()
|
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
def sendPostViaServer(signingPrivateKeyPem: str, projectVersion: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
baseDir: str, session, fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
toNickname: str, toDomain: str, toPort: int, cc: str,
|
|
|
|
httpPrefix: str, content: str, followersOnly: bool,
|
2020-08-21 16:10:47 +00:00
|
|
|
commentsEnabled: bool,
|
2020-04-04 10:05:27 +00:00
|
|
|
attachImageFilename: str, mediaType: str,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription: str, city: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-07-18 09:55:49 +00:00
|
|
|
isArticle: bool, systemLanguage: str,
|
2021-08-13 17:08:50 +00:00
|
|
|
lowBandwidth: bool,
|
2021-07-18 09:55:49 +00:00
|
|
|
debug: bool = False,
|
2021-06-20 11:28:35 +00:00
|
|
|
inReplyTo: str = None,
|
|
|
|
inReplyToAtomUri: str = None,
|
2021-08-08 16:52:32 +00:00
|
|
|
conversationId: str = None,
|
2021-06-20 11:28:35 +00:00
|
|
|
subject: str = None) -> int:
|
2019-07-16 10:19:04 +00:00
|
|
|
"""Send a post via a proxy (c2s)
|
|
|
|
"""
|
2019-07-16 11:33:40 +00:00
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendPostViaServer')
|
|
|
|
return 6
|
2019-07-16 10:19:04 +00:00
|
|
|
|
2021-08-05 11:24:24 +00:00
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
2019-07-16 10:19:04 +00:00
|
|
|
|
2021-08-05 11:24:24 +00:00
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
2019-07-16 10:19:04 +00:00
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2020-04-04 10:05:27 +00:00
|
|
|
wfRequest = \
|
|
|
|
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
fromDomainFull, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2019-07-16 10:19:04 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post webfinger failed for ' + handle)
|
2019-07-16 10:19:04 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: post webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
return 1
|
2019-07-16 10:19:04 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'outbox'
|
2020-02-24 22:34:54 +00:00
|
|
|
if isArticle:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'tlblogs'
|
2019-07-16 10:19:04 +00:00
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2020-04-04 10:05:27 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey,
|
|
|
|
fromPersonId, sharedInbox,
|
2021-08-31 14:17:11 +00:00
|
|
|
avatarUrl, displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-04-04 10:05:27 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix,
|
|
|
|
fromNickname,
|
2021-08-05 11:24:24 +00:00
|
|
|
fromDomainFull, postToBox,
|
2020-12-18 17:49:17 +00:00
|
|
|
82796)
|
2019-07-16 10:19:04 +00:00
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post no ' + postToBox +
|
|
|
|
' was found for ' + handle)
|
2019-07-16 10:19:04 +00:00
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post no actor was found for ' + handle)
|
2019-07-16 10:19:04 +00:00
|
|
|
return 4
|
|
|
|
|
|
|
|
# Get the json for the c2s post, not saving anything to file
|
|
|
|
# Note that baseDir is set to None
|
2020-04-04 10:05:27 +00:00
|
|
|
saveToFile = False
|
|
|
|
clientToServer = True
|
2019-07-17 14:43:51 +00:00
|
|
|
if toDomain.lower().endswith('public'):
|
2020-04-04 10:05:27 +00:00
|
|
|
toPersonId = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
cc = localActorUrl(httpPrefix, fromNickname, fromDomainFull) + \
|
|
|
|
'/followers'
|
2019-07-17 14:43:51 +00:00
|
|
|
else:
|
|
|
|
if toDomain.lower().endswith('followers') or \
|
|
|
|
toDomain.lower().endswith('followersonly'):
|
2020-04-04 10:05:27 +00:00
|
|
|
toPersonId = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, fromNickname, fromDomainFull) + \
|
|
|
|
'/followers'
|
2019-07-17 14:43:51 +00:00
|
|
|
else:
|
2020-12-16 11:04:46 +00:00
|
|
|
toDomainFull = getFullDomain(toDomain, toPort)
|
2021-08-14 11:13:39 +00:00
|
|
|
toPersonId = localActorUrl(httpPrefix, toNickname, toDomainFull)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
postJsonObject = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_createPostBase(baseDir,
|
|
|
|
fromNickname, fromDomain, fromPort,
|
|
|
|
toPersonId, cc, httpPrefix, content,
|
|
|
|
followersOnly, saveToFile, clientToServer,
|
|
|
|
commentsEnabled,
|
|
|
|
attachImageFilename, mediaType,
|
2021-05-09 19:11:05 +00:00
|
|
|
imageDescription, city,
|
2020-12-22 18:06:23 +00:00
|
|
|
False, isArticle, inReplyTo,
|
|
|
|
inReplyToAtomUri, subject,
|
|
|
|
False, None, None, None, None, None,
|
|
|
|
None, None, None,
|
2021-08-08 16:52:32 +00:00
|
|
|
None, None, None, None, None, systemLanguage,
|
2021-08-13 17:08:50 +00:00
|
|
|
conversationId, lowBandwidth)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
2019-07-16 14:23:06 +00:00
|
|
|
|
|
|
|
if attachImageFilename:
|
2020-04-04 10:05:27 +00:00
|
|
|
headers = {
|
2021-08-05 11:24:24 +00:00
|
|
|
'host': fromDomainFull,
|
2020-03-22 20:36:19 +00:00
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
postResult = \
|
|
|
|
postImage(session, attachImageFilename, [],
|
2020-09-27 19:27:24 +00:00
|
|
|
inboxUrl, headers)
|
2020-04-04 10:05:27 +00:00
|
|
|
if not postResult:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: post failed to upload image')
|
2020-04-04 10:08:37 +00:00
|
|
|
# return 9
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
headers = {
|
2021-08-05 11:24:24 +00:00
|
|
|
'host': fromDomainFull,
|
2020-04-04 10:05:27 +00:00
|
|
|
'Content-type': 'application/json',
|
2020-03-22 20:36:19 +00:00
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2020-12-22 21:24:46 +00:00
|
|
|
postDumps = json.dumps(postJsonObject)
|
2020-04-04 10:05:27 +00:00
|
|
|
postResult = \
|
2020-12-22 21:24:46 +00:00
|
|
|
postJsonString(session, postDumps, [],
|
2021-03-21 13:35:15 +00:00
|
|
|
inboxUrl, headers, debug, 5, True)
|
2020-04-04 10:05:27 +00:00
|
|
|
if not postResult:
|
|
|
|
if debug:
|
2021-03-12 18:13:51 +00:00
|
|
|
print('DEBUG: POST failed for c2s to ' + inboxUrl)
|
2020-04-04 10:05:27 +00:00
|
|
|
return 5
|
2019-07-16 10:19:04 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST success')
|
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def groupFollowersByDomain(baseDir: str, nickname: str, domain: str) -> {}:
|
2019-07-08 08:51:33 +00:00
|
|
|
"""Returns a dictionary with followers grouped by domain
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = nickname + '@' + domain
|
|
|
|
followersFilename = baseDir + '/accounts/' + handle + '/followers.txt'
|
2019-07-08 08:51:33 +00:00
|
|
|
if not os.path.isfile(followersFilename):
|
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
grouped = {}
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(followersFilename, 'r') as f:
|
2019-07-08 08:51:33 +00:00
|
|
|
for followerHandle in f:
|
2021-07-04 12:50:42 +00:00
|
|
|
if '@' not in followerHandle:
|
|
|
|
continue
|
|
|
|
fHandle = \
|
|
|
|
followerHandle.strip().replace('\n', '').replace('\r', '')
|
|
|
|
followerDomain = fHandle.split('@')[1]
|
|
|
|
if not grouped.get(followerDomain):
|
|
|
|
grouped[followerDomain] = [fHandle]
|
|
|
|
else:
|
|
|
|
grouped[followerDomain].append(fHandle)
|
2019-07-08 08:51:33 +00:00
|
|
|
return grouped
|
2019-10-16 10:58:31 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _addFollowersToPublicPost(postJsonObject: {}) -> None:
|
2019-10-16 10:58:31 +00:00
|
|
|
"""Adds followers entry to cc if it doesn't exist
|
|
|
|
"""
|
|
|
|
if not postJsonObject.get('actor'):
|
|
|
|
return
|
|
|
|
|
|
|
|
if isinstance(postJsonObject['object'], str):
|
|
|
|
if not postJsonObject.get('to'):
|
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(postJsonObject['to']) > 1:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(postJsonObject['to']) == 0:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
|
|
|
if not postJsonObject['to'][0].endswith('#Public'):
|
|
|
|
return
|
|
|
|
if postJsonObject.get('cc'):
|
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject['cc'] = postJsonObject['actor'] + '/followers'
|
2021-07-06 10:00:19 +00:00
|
|
|
elif hasObjectDict(postJsonObject):
|
2019-10-16 10:58:31 +00:00
|
|
|
if not postJsonObject['object'].get('to'):
|
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(postJsonObject['object']['to']) > 1:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-07-04 12:50:42 +00:00
|
|
|
elif len(postJsonObject['object']['to']) == 0:
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
2021-07-04 12:50:42 +00:00
|
|
|
elif not postJsonObject['object']['to'][0].endswith('#Public'):
|
2019-10-16 10:58:31 +00:00
|
|
|
return
|
|
|
|
if postJsonObject['object'].get('cc'):
|
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject['object']['cc'] = postJsonObject['actor'] + '/followers'
|
|
|
|
|
|
|
|
|
|
|
|
def sendSignedJson(postJsonObject: {}, session, baseDir: str,
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
toNickname: str, toDomain: str, toPort: int, cc: str,
|
|
|
|
httpPrefix: str, saveToFile: bool, clientToServer: bool,
|
|
|
|
federationList: [],
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
2021-07-26 13:12:51 +00:00
|
|
|
personCache: {}, debug: bool, projectVersion: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemsToken: str, groupAccount: bool,
|
|
|
|
signingPrivateKeyPem: str) -> int:
|
2019-07-05 18:57:19 +00:00
|
|
|
"""Sends a signed json object to an inbox/outbox
|
|
|
|
"""
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: sendSignedJson start')
|
2019-07-16 10:19:04 +00:00
|
|
|
if not session:
|
|
|
|
print('WARN: No session specified for sendSignedJson')
|
|
|
|
return 8
|
2020-04-04 10:05:27 +00:00
|
|
|
withDigest = True
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2020-06-19 22:50:41 +00:00
|
|
|
if toDomain.endswith('.onion') or toDomain.endswith('.i2p'):
|
2020-04-04 10:05:27 +00:00
|
|
|
httpPrefix = 'http'
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if toNickname == 'inbox':
|
2019-08-23 13:47:29 +00:00
|
|
|
# shared inbox actor on @domain@domain
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = toDomain
|
2019-08-16 20:04:24 +00:00
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
toDomain = getFullDomain(toDomain, toPort)
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2020-06-23 09:46:38 +00:00
|
|
|
toDomainUrl = httpPrefix + '://' + toDomain
|
|
|
|
if not siteIsActive(toDomainUrl):
|
|
|
|
print('Domain is inactive: ' + toDomainUrl)
|
|
|
|
return 9
|
2020-06-23 10:41:12 +00:00
|
|
|
print('Domain is active: ' + toDomainUrl)
|
2020-06-23 09:46:38 +00:00
|
|
|
handleBase = toDomainUrl + '/@'
|
2019-10-21 14:12:22 +00:00
|
|
|
if toNickname:
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = handleBase + toNickname
|
2019-10-21 14:12:22 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
singleUserInstanceNickname = 'dev'
|
2020-05-17 12:16:40 +00:00
|
|
|
handle = handleBase + singleUserInstanceNickname
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: handle - ' + handle + ' toPort ' + str(toPort))
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2019-08-23 13:47:29 +00:00
|
|
|
# lookup the inbox for the To handle
|
2020-04-04 10:05:27 +00:00
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain, projectVersion, debug, groupAccount,
|
|
|
|
signingPrivateKeyPem)
|
2019-08-23 13:47:29 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: webfinger for ' + handle + ' failed')
|
2019-08-23 13:47:29 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
return 1
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2019-10-17 14:41:47 +00:00
|
|
|
if wfRequest.get('errors'):
|
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: webfinger for ' + handle +
|
|
|
|
' failed with errors ' + str(wfRequest['errors']))
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if not clientToServer:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'inbox'
|
2019-07-05 22:13:20 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
postToBox = 'outbox'
|
2019-08-22 20:26:57 +00:00
|
|
|
|
2020-09-27 18:35:35 +00:00
|
|
|
# get the actor inbox/outbox for the To handle
|
2020-09-27 19:27:24 +00:00
|
|
|
(inboxUrl, pubKeyId, pubKey, toPersonId, sharedInboxUrl, avatarUrl,
|
2021-08-31 14:17:11 +00:00
|
|
|
displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-05-17 12:16:40 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix,
|
2020-12-18 17:49:17 +00:00
|
|
|
nickname, domain, postToBox,
|
|
|
|
30873)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-09-27 18:35:35 +00:00
|
|
|
print("inboxUrl: " + str(inboxUrl))
|
|
|
|
print("toPersonId: " + str(toPersonId))
|
|
|
|
print("sharedInboxUrl: " + str(sharedInboxUrl))
|
|
|
|
if inboxUrl:
|
|
|
|
if inboxUrl.endswith('/actor/inbox'):
|
|
|
|
inboxUrl = sharedInboxUrl
|
2019-07-06 13:49:25 +00:00
|
|
|
|
2019-07-05 18:57:19 +00:00
|
|
|
if not inboxUrl:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: missing inboxUrl')
|
2019-07-05 18:57:19 +00:00
|
|
|
return 3
|
2019-08-04 21:26:31 +00:00
|
|
|
|
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Sending to endpoint ' + inboxUrl)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if not pubKey:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: missing pubkey')
|
2019-07-05 18:57:19 +00:00
|
|
|
return 4
|
2019-07-05 22:13:20 +00:00
|
|
|
if not toPersonId:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: missing personId')
|
2019-07-05 22:13:20 +00:00
|
|
|
return 5
|
2020-09-27 18:35:35 +00:00
|
|
|
# sharedInbox is optional
|
2019-07-05 18:57:19 +00:00
|
|
|
|
|
|
|
# get the senders private key
|
2021-01-21 18:12:15 +00:00
|
|
|
privateKeyPem = _getPersonKey(nickname, domain, baseDir, 'private', debug)
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(privateKeyPem) == 0:
|
2019-07-06 13:49:25 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Private key not found for ' +
|
|
|
|
nickname + '@' + domain + ' in ' + baseDir + '/keys/private')
|
2019-07-05 22:13:20 +00:00
|
|
|
return 6
|
2019-07-05 18:57:19 +00:00
|
|
|
|
2019-07-05 22:13:20 +00:00
|
|
|
if toDomain not in inboxUrl:
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: ' + toDomain + ' is not in ' + inboxUrl)
|
2019-07-05 22:13:20 +00:00
|
|
|
return 7
|
2020-04-04 10:05:27 +00:00
|
|
|
postPath = inboxUrl.split(toDomain, 1)[1]
|
2019-08-17 10:15:01 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
_addFollowersToPublicPost(postJsonObject)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-06-15 13:38:21 +00:00
|
|
|
if not postJsonObject.get('signature'):
|
|
|
|
try:
|
2021-01-04 19:02:24 +00:00
|
|
|
signedPostJsonObject = postJsonObject.copy()
|
|
|
|
generateJsonSignature(signedPostJsonObject, privateKeyPem)
|
2020-06-15 13:38:21 +00:00
|
|
|
postJsonObject = signedPostJsonObject
|
2020-10-11 14:08:16 +00:00
|
|
|
except Exception as e:
|
|
|
|
print('WARN: failed to JSON-LD sign post, ' + str(e))
|
2020-06-15 13:38:21 +00:00
|
|
|
pass
|
2020-06-15 13:08:19 +00:00
|
|
|
|
2019-08-17 10:15:01 +00:00
|
|
|
# convert json to string so that there are no
|
|
|
|
# subsequent conversions after creating message body digest
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonStr = json.dumps(postJsonObject)
|
2019-08-17 10:15:01 +00:00
|
|
|
|
|
|
|
# construct the http header, including the message body digest
|
2020-04-04 10:05:27 +00:00
|
|
|
signatureHeaderJson = \
|
|
|
|
createSignedHeader(privateKeyPem, nickname, domain, port,
|
|
|
|
toDomain, toPort,
|
|
|
|
postPath, httpPrefix, withDigest, postJsonStr)
|
2021-07-26 13:12:51 +00:00
|
|
|
# optionally add a token so that the receiving instance may access
|
|
|
|
# your shared items catalog
|
|
|
|
if sharedItemsToken:
|
2021-08-05 11:24:24 +00:00
|
|
|
signatureHeaderJson['Origin'] = getFullDomain(domain, port)
|
2021-07-26 13:12:51 +00:00
|
|
|
signatureHeaderJson['SharesCatalog'] = sharedItemsToken
|
2021-08-05 11:24:24 +00:00
|
|
|
elif debug:
|
|
|
|
print('Not sending shared items federation token')
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-06-30 13:20:23 +00:00
|
|
|
# Keep the number of threads being used small
|
2020-04-04 10:05:27 +00:00
|
|
|
while len(sendThreads) > 1000:
|
2019-10-04 12:22:56 +00:00
|
|
|
print('WARN: Maximum threads reached - killing send thread')
|
2019-06-30 15:03:26 +00:00
|
|
|
sendThreads[0].kill()
|
2019-06-30 13:38:01 +00:00
|
|
|
sendThreads.pop(0)
|
2019-10-04 12:22:56 +00:00
|
|
|
print('WARN: thread killed')
|
2019-10-16 18:19:18 +00:00
|
|
|
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: starting thread to send post')
|
2019-07-18 11:35:48 +00:00
|
|
|
pprint(postJsonObject)
|
2020-04-04 10:05:27 +00:00
|
|
|
thr = \
|
|
|
|
threadWithTrace(target=threadSendPost,
|
|
|
|
args=(session,
|
|
|
|
postJsonStr,
|
|
|
|
federationList,
|
|
|
|
inboxUrl, baseDir,
|
|
|
|
signatureHeaderJson.copy(),
|
2020-03-22 20:36:19 +00:00
|
|
|
postLog,
|
2020-04-04 10:05:27 +00:00
|
|
|
debug), daemon=True)
|
2019-06-30 13:20:23 +00:00
|
|
|
sendThreads.append(thr)
|
2020-04-04 10:05:27 +00:00
|
|
|
# thr.start()
|
2019-06-30 10:14:02 +00:00
|
|
|
return 0
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def addToField(activityType: str, postJsonObject: {},
|
|
|
|
debug: bool) -> ({}, bool):
|
2021-03-20 12:00:00 +00:00
|
|
|
"""The Follow/Add/Remove activity doesn't have a 'to' field and so one
|
2019-08-18 09:39:12 +00:00
|
|
|
needs to be added so that activity distribution happens in a consistent way
|
|
|
|
Returns true if a 'to' field exists or was added
|
|
|
|
"""
|
|
|
|
if postJsonObject.get('to'):
|
2020-04-04 10:05:27 +00:00
|
|
|
return postJsonObject, True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
|
|
|
pprint(postJsonObject)
|
|
|
|
print('DEBUG: no "to" field when sending to named addresses 2')
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
isSameType = False
|
|
|
|
toFieldAdded = False
|
2019-08-18 09:39:12 +00:00
|
|
|
if postJsonObject.get('object'):
|
|
|
|
if isinstance(postJsonObject['object'], str):
|
|
|
|
if postJsonObject.get('type'):
|
2020-04-04 10:05:27 +00:00
|
|
|
if postJsonObject['type'] == activityType:
|
|
|
|
isSameType = True
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
2021-03-20 12:00:00 +00:00
|
|
|
print('DEBUG: "to" field assigned to ' + activityType)
|
2020-04-04 10:05:27 +00:00
|
|
|
toAddress = postJsonObject['object']
|
2019-08-18 16:49:35 +00:00
|
|
|
if '/statuses/' in toAddress:
|
2020-04-04 10:05:27 +00:00
|
|
|
toAddress = toAddress.split('/statuses/')[0]
|
|
|
|
postJsonObject['to'] = [toAddress]
|
|
|
|
toFieldAdded = True
|
2021-06-22 15:45:59 +00:00
|
|
|
elif hasObjectDict(postJsonObject):
|
2021-03-20 12:00:00 +00:00
|
|
|
# add a to field to bookmark add or remove
|
|
|
|
if postJsonObject.get('type') and \
|
|
|
|
postJsonObject.get('actor') and \
|
|
|
|
postJsonObject['object'].get('type'):
|
|
|
|
if postJsonObject['type'] == 'Add' or \
|
|
|
|
postJsonObject['type'] == 'Remove':
|
|
|
|
if postJsonObject['object']['type'] == 'Document':
|
|
|
|
postJsonObject['to'] = \
|
|
|
|
[postJsonObject['actor']]
|
|
|
|
postJsonObject['object']['to'] = \
|
|
|
|
[postJsonObject['actor']]
|
|
|
|
toFieldAdded = True
|
|
|
|
|
|
|
|
if not toFieldAdded and \
|
|
|
|
postJsonObject['object'].get('type'):
|
2020-04-04 10:05:27 +00:00
|
|
|
if postJsonObject['object']['type'] == activityType:
|
|
|
|
isSameType = True
|
2019-08-18 09:39:12 +00:00
|
|
|
if isinstance(postJsonObject['object']['object'], str):
|
|
|
|
if debug:
|
2021-03-20 12:00:00 +00:00
|
|
|
print('DEBUG: "to" field assigned to ' +
|
|
|
|
activityType)
|
2020-04-04 10:05:27 +00:00
|
|
|
toAddress = postJsonObject['object']['object']
|
2019-08-18 16:49:35 +00:00
|
|
|
if '/statuses/' in toAddress:
|
2020-04-04 10:05:27 +00:00
|
|
|
toAddress = toAddress.split('/statuses/')[0]
|
|
|
|
postJsonObject['object']['to'] = [toAddress]
|
|
|
|
postJsonObject['to'] = \
|
2019-12-12 09:58:06 +00:00
|
|
|
[postJsonObject['object']['object']]
|
2020-04-04 10:05:27 +00:00
|
|
|
toFieldAdded = True
|
2019-08-18 09:39:12 +00:00
|
|
|
|
|
|
|
if not isSameType:
|
2020-04-04 10:05:27 +00:00
|
|
|
return postJsonObject, True
|
2019-08-18 09:39:12 +00:00
|
|
|
if toFieldAdded:
|
2020-04-04 10:05:27 +00:00
|
|
|
return postJsonObject, True
|
|
|
|
return postJsonObject, False
|
|
|
|
|
|
|
|
|
2021-07-04 12:50:42 +00:00
|
|
|
def _isProfileUpdate(postJsonObject: {}) -> bool:
|
|
|
|
"""Is the given post a profile update?
|
|
|
|
for actor updates there is no 'to' within the object
|
|
|
|
"""
|
|
|
|
if postJsonObject['object'].get('type') and postJsonObject.get('type'):
|
|
|
|
if (postJsonObject['type'] == 'Update' and
|
|
|
|
(postJsonObject['object']['type'] == 'Person' or
|
|
|
|
postJsonObject['object']['type'] == 'Application' or
|
|
|
|
postJsonObject['object']['type'] == 'Group' or
|
|
|
|
postJsonObject['object']['type'] == 'Service')):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def sendToNamedAddresses(session, baseDir: str,
|
2020-07-11 22:36:52 +00:00
|
|
|
nickname: str, domain: str,
|
2020-06-03 20:21:44 +00:00
|
|
|
onionDomain: str, i2pDomain: str, port: int,
|
2020-04-04 10:05:27 +00:00
|
|
|
httpPrefix: str, federationList: [],
|
|
|
|
sendThreads: [], postLog: [],
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
|
|
|
postJsonObject: {}, debug: bool,
|
2021-07-26 13:12:51 +00:00
|
|
|
projectVersion: str,
|
|
|
|
sharedItemsFederatedDomains: [],
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signingPrivateKeyPem: str) -> None:
|
2019-07-15 18:20:52 +00:00
|
|
|
"""sends a post to the specific named addresses in to/cc
|
|
|
|
"""
|
2019-07-16 10:19:04 +00:00
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendToNamedAddresses')
|
|
|
|
return
|
2019-07-15 18:20:52 +00:00
|
|
|
if not postJsonObject.get('object'):
|
2019-07-16 10:19:04 +00:00
|
|
|
return
|
2021-03-18 23:39:54 +00:00
|
|
|
isProfileUpdate = False
|
2021-07-06 10:00:19 +00:00
|
|
|
if hasObjectDict(postJsonObject):
|
2021-07-04 12:50:42 +00:00
|
|
|
if _isProfileUpdate(postJsonObject):
|
|
|
|
# use the original object, which has a 'to'
|
|
|
|
recipientsObject = postJsonObject
|
|
|
|
isProfileUpdate = True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-08-20 20:35:15 +00:00
|
|
|
if not isProfileUpdate:
|
2019-08-18 09:39:12 +00:00
|
|
|
if not postJsonObject['object'].get('to'):
|
2019-08-20 20:35:15 +00:00
|
|
|
if debug:
|
|
|
|
pprint(postJsonObject)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'no "to" field when sending to named addresses')
|
2020-03-22 21:16:02 +00:00
|
|
|
if postJsonObject['object'].get('type'):
|
2021-02-08 14:48:37 +00:00
|
|
|
if postJsonObject['object']['type'] == 'Follow' or \
|
|
|
|
postJsonObject['object']['type'] == 'Join':
|
2019-08-20 20:35:15 +00:00
|
|
|
if isinstance(postJsonObject['object']['object'], str):
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: "to" field assigned to Follow')
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject['object']['to'] = \
|
2019-12-12 09:58:06 +00:00
|
|
|
[postJsonObject['object']['object']]
|
2019-08-20 20:35:15 +00:00
|
|
|
if not postJsonObject['object'].get('to'):
|
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
recipientsObject = postJsonObject['object']
|
2020-03-22 21:16:02 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject, fieldAdded = \
|
|
|
|
addToField('Follow', postJsonObject, debug)
|
2019-08-18 16:49:35 +00:00
|
|
|
if not fieldAdded:
|
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject, fieldAdded = addToField('Like', postJsonObject, debug)
|
2019-08-18 09:39:12 +00:00
|
|
|
if not fieldAdded:
|
2019-07-16 19:07:45 +00:00
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
recipientsObject = postJsonObject
|
2019-07-15 18:20:52 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
recipients = []
|
|
|
|
recipientType = ('to', 'cc')
|
2019-07-15 18:20:52 +00:00
|
|
|
for rType in recipientType:
|
2019-08-18 09:39:12 +00:00
|
|
|
if not recipientsObject.get(rType):
|
|
|
|
continue
|
2019-08-18 20:54:33 +00:00
|
|
|
if isinstance(recipientsObject[rType], list):
|
2019-08-18 21:08:38 +00:00
|
|
|
if debug:
|
2019-08-19 08:58:04 +00:00
|
|
|
pprint(recipientsObject)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('recipientsObject: ' + str(recipientsObject[rType]))
|
2019-08-18 20:54:33 +00:00
|
|
|
for address in recipientsObject[rType]:
|
2019-08-18 21:15:09 +00:00
|
|
|
if not address:
|
|
|
|
continue
|
2019-08-18 21:12:37 +00:00
|
|
|
if '/' not in address:
|
|
|
|
continue
|
2019-08-18 20:54:33 +00:00
|
|
|
if address.endswith('#Public'):
|
|
|
|
continue
|
|
|
|
if address.endswith('/followers'):
|
|
|
|
continue
|
|
|
|
recipients.append(address)
|
|
|
|
elif isinstance(recipientsObject[rType], str):
|
2020-04-04 10:05:27 +00:00
|
|
|
address = recipientsObject[rType]
|
2019-08-18 21:15:09 +00:00
|
|
|
if address:
|
|
|
|
if '/' in address:
|
|
|
|
if address.endswith('#Public'):
|
|
|
|
continue
|
|
|
|
if address.endswith('/followers'):
|
|
|
|
continue
|
|
|
|
recipients.append(address)
|
2019-07-15 18:20:52 +00:00
|
|
|
if not recipients:
|
2019-08-18 20:54:33 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: no individual recipients')
|
2019-07-15 18:20:52 +00:00
|
|
|
return
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Sending individually addressed posts: ' +
|
|
|
|
str(recipients))
|
2019-07-15 18:29:30 +00:00
|
|
|
# this is after the message has arrived at the server
|
2020-04-04 10:05:27 +00:00
|
|
|
clientToServer = False
|
2019-07-15 18:20:52 +00:00
|
|
|
for address in recipients:
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = getNicknameFromActor(address)
|
2019-07-15 18:20:52 +00:00
|
|
|
if not toNickname:
|
|
|
|
continue
|
2020-04-04 10:05:27 +00:00
|
|
|
toDomain, toPort = getDomainFromActor(address)
|
2019-07-15 18:20:52 +00:00
|
|
|
if not toDomain:
|
|
|
|
continue
|
2021-03-18 23:25:27 +00:00
|
|
|
# Don't send profile/actor updates to yourself
|
|
|
|
if isProfileUpdate:
|
2021-03-19 10:14:57 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
toDomainFull = getFullDomain(toDomain, toPort)
|
2021-03-18 23:25:27 +00:00
|
|
|
if nickname == toNickname and \
|
|
|
|
domainFull == toDomainFull:
|
|
|
|
if debug:
|
|
|
|
print('Not sending profile update to self. ' +
|
|
|
|
nickname + '@' + domainFull)
|
|
|
|
continue
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
toDomainFull = getFullDomain(toDomain, toPort)
|
2020-04-04 10:05:27 +00:00
|
|
|
print('DEBUG: Post sending s2s: ' + nickname + '@' + domainFull +
|
|
|
|
' to ' + toNickname + '@' + toDomainFull)
|
2020-03-02 16:11:34 +00:00
|
|
|
|
|
|
|
# if we have an alt onion domain and we are sending to
|
|
|
|
# another onion domain then switch the clearnet
|
|
|
|
# domain for the onion one
|
2020-04-04 10:05:27 +00:00
|
|
|
fromDomain = domain
|
2021-08-05 16:46:02 +00:00
|
|
|
fromDomainFull = getFullDomain(domain, port)
|
2020-04-04 10:05:27 +00:00
|
|
|
fromHttpPrefix = httpPrefix
|
2020-03-02 16:11:34 +00:00
|
|
|
if onionDomain:
|
|
|
|
if toDomain.endswith('.onion'):
|
2020-04-04 10:05:27 +00:00
|
|
|
fromDomain = onionDomain
|
2021-08-05 16:46:02 +00:00
|
|
|
fromDomainFull = onionDomain
|
2020-04-04 10:05:27 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2020-06-03 20:21:44 +00:00
|
|
|
elif i2pDomain:
|
|
|
|
if toDomain.endswith('.i2p'):
|
|
|
|
fromDomain = i2pDomain
|
2021-08-05 16:46:02 +00:00
|
|
|
fromDomainFull = i2pDomain
|
2020-06-19 22:50:41 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2020-04-04 10:05:27 +00:00
|
|
|
cc = []
|
2021-07-26 13:12:51 +00:00
|
|
|
|
|
|
|
# if the "to" domain is within the shared items
|
|
|
|
# federation list then send the token for this domain
|
|
|
|
# so that it can request a catalog
|
|
|
|
sharedItemsToken = None
|
|
|
|
if toDomain in sharedItemsFederatedDomains:
|
2021-08-05 16:46:02 +00:00
|
|
|
if sharedItemFederationTokens.get(fromDomainFull):
|
|
|
|
sharedItemsToken = sharedItemFederationTokens[fromDomainFull]
|
2021-07-26 13:12:51 +00:00
|
|
|
|
2021-07-31 13:19:45 +00:00
|
|
|
groupAccount = hasGroupType(baseDir, address, personCache)
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
sendSignedJson(postJsonObject, session, baseDir,
|
|
|
|
nickname, fromDomain, port,
|
|
|
|
toNickname, toDomain, toPort,
|
|
|
|
cc, fromHttpPrefix, True, clientToServer,
|
|
|
|
federationList,
|
|
|
|
sendThreads, postLog, cachedWebfingers,
|
2021-07-26 13:12:51 +00:00
|
|
|
personCache, debug, projectVersion,
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemsToken, groupAccount,
|
|
|
|
signingPrivateKeyPem)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-03-14 19:22:58 +00:00
|
|
|
def _hasSharedInbox(session, httpPrefix: str, domain: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
debug: bool, signingPrivateKeyPem: str) -> bool:
|
2019-08-26 17:42:06 +00:00
|
|
|
"""Returns true if the given domain has a shared inbox
|
2020-12-18 11:05:31 +00:00
|
|
|
This tries the new and the old way of webfingering the shared inbox
|
2019-08-26 17:42:06 +00:00
|
|
|
"""
|
2021-08-04 17:14:23 +00:00
|
|
|
tryHandles = []
|
|
|
|
if ':' not in domain:
|
|
|
|
tryHandles.append(domain + '@' + domain)
|
|
|
|
tryHandles.append('inbox@' + domain)
|
2020-12-18 11:05:31 +00:00
|
|
|
for handle in tryHandles:
|
2021-03-14 19:22:58 +00:00
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix, {},
|
2021-09-08 12:34:13 +00:00
|
|
|
domain, __version__, debug, False,
|
2021-08-31 14:17:11 +00:00
|
|
|
signingPrivateKeyPem)
|
2020-12-18 11:05:31 +00:00
|
|
|
if wfRequest:
|
|
|
|
if isinstance(wfRequest, dict):
|
|
|
|
if not wfRequest.get('errors'):
|
|
|
|
return True
|
2019-08-26 17:42:06 +00:00
|
|
|
return False
|
2019-11-04 10:43:19 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _sendingProfileUpdate(postJsonObject: {}) -> bool:
|
2020-12-18 10:43:19 +00:00
|
|
|
"""Returns true if the given json is a profile update
|
|
|
|
"""
|
|
|
|
if postJsonObject['type'] != 'Update':
|
|
|
|
return False
|
2021-06-22 15:45:59 +00:00
|
|
|
if not hasObjectDict(postJsonObject):
|
2020-12-18 10:43:19 +00:00
|
|
|
return False
|
|
|
|
if not postJsonObject['object'].get('type'):
|
|
|
|
return False
|
|
|
|
activityType = postJsonObject['object']['type']
|
|
|
|
if activityType == 'Person' or \
|
|
|
|
activityType == 'Application' or \
|
|
|
|
activityType == 'Group' or \
|
|
|
|
activityType == 'Service':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def sendToFollowers(session, baseDir: str,
|
|
|
|
nickname: str,
|
2020-06-03 20:21:44 +00:00
|
|
|
domain: str,
|
|
|
|
onionDomain: str, i2pDomain: str, port: int,
|
2020-04-04 10:05:27 +00:00
|
|
|
httpPrefix: str, federationList: [],
|
|
|
|
sendThreads: [], postLog: [],
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
|
|
|
postJsonObject: {}, debug: bool,
|
2021-07-26 13:12:51 +00:00
|
|
|
projectVersion: str,
|
|
|
|
sharedItemsFederatedDomains: [],
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signingPrivateKeyPem: str) -> None:
|
2019-07-08 13:30:04 +00:00
|
|
|
"""sends a post to the followers of the given nickname
|
|
|
|
"""
|
2019-08-20 21:04:24 +00:00
|
|
|
print('sendToFollowers')
|
2019-07-16 10:19:04 +00:00
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendToFollowers')
|
|
|
|
return
|
2020-12-22 18:06:23 +00:00
|
|
|
if not _postIsAddressedToFollowers(baseDir, nickname, domain,
|
|
|
|
port, httpPrefix,
|
|
|
|
postJsonObject):
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
|
|
|
print('Post is not addressed to followers')
|
2019-07-08 13:30:04 +00:00
|
|
|
return
|
2019-08-20 21:04:24 +00:00
|
|
|
print('Post is addressed to followers')
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
grouped = groupFollowersByDomain(baseDir, nickname, domain)
|
2019-07-08 13:30:04 +00:00
|
|
|
if not grouped:
|
2019-07-15 18:29:30 +00:00
|
|
|
if debug:
|
|
|
|
print('Post to followers did not resolve any domains')
|
2019-07-08 13:30:04 +00:00
|
|
|
return
|
2019-08-20 21:04:24 +00:00
|
|
|
print('Post to followers resolved domains')
|
2021-08-11 12:49:15 +00:00
|
|
|
# print(str(grouped))
|
2019-07-08 13:30:04 +00:00
|
|
|
|
2019-07-15 18:29:30 +00:00
|
|
|
# this is after the message has arrived at the server
|
2020-04-04 10:05:27 +00:00
|
|
|
clientToServer = False
|
2019-07-15 18:20:52 +00:00
|
|
|
|
2019-07-08 13:30:04 +00:00
|
|
|
# for each instance
|
2020-12-18 15:09:41 +00:00
|
|
|
sendingStartTime = datetime.datetime.utcnow()
|
|
|
|
print('Sending post to followers begins ' +
|
|
|
|
sendingStartTime.strftime("%Y-%m-%dT%H:%M:%SZ"))
|
|
|
|
sendingCtr = 0
|
2020-04-04 10:05:27 +00:00
|
|
|
for followerDomain, followerHandles in grouped.items():
|
2020-12-18 15:09:41 +00:00
|
|
|
print('Sending post to followers progress ' +
|
2020-12-18 19:59:19 +00:00
|
|
|
str(int(sendingCtr * 100 / len(grouped.items()))) + '% ' +
|
|
|
|
followerDomain)
|
2020-12-18 15:09:41 +00:00
|
|
|
sendingCtr += 1
|
|
|
|
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
|
|
|
pprint(followerHandles)
|
2020-06-23 09:46:38 +00:00
|
|
|
|
2021-07-26 13:12:51 +00:00
|
|
|
# if the followers domain is within the shared items
|
|
|
|
# federation list then send the token for this domain
|
|
|
|
# so that it can request a catalog
|
|
|
|
sharedItemsToken = None
|
|
|
|
if followerDomain in sharedItemsFederatedDomains:
|
2021-08-05 16:46:02 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
if sharedItemFederationTokens.get(domainFull):
|
|
|
|
sharedItemsToken = sharedItemFederationTokens[domainFull]
|
2021-07-26 13:12:51 +00:00
|
|
|
|
2020-06-23 09:46:38 +00:00
|
|
|
# check that the follower's domain is active
|
|
|
|
followerDomainUrl = httpPrefix + '://' + followerDomain
|
|
|
|
if not siteIsActive(followerDomainUrl):
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers domain is inactive: ' +
|
|
|
|
followerDomainUrl)
|
2020-06-23 09:46:38 +00:00
|
|
|
continue
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers domain is active: ' +
|
|
|
|
followerDomainUrl)
|
2020-06-23 09:46:38 +00:00
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
withSharedInbox = \
|
|
|
|
_hasSharedInbox(session, httpPrefix, followerDomain, debug,
|
|
|
|
signingPrivateKeyPem)
|
2019-08-26 17:42:06 +00:00
|
|
|
if debug:
|
2019-08-26 17:44:21 +00:00
|
|
|
if withSharedInbox:
|
2020-04-04 10:05:27 +00:00
|
|
|
print(followerDomain + ' has shared inbox')
|
2020-12-18 11:05:31 +00:00
|
|
|
if not withSharedInbox:
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers, ' + followerDomain +
|
|
|
|
' does not have a shared inbox')
|
2019-08-26 17:42:06 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
toPort = port
|
|
|
|
index = 0
|
|
|
|
toDomain = followerHandles[index].split('@')[1]
|
2019-07-08 13:30:04 +00:00
|
|
|
if ':' in toDomain:
|
2021-06-23 21:44:31 +00:00
|
|
|
toPort = getPortFromDomain(toDomain)
|
2021-06-23 21:31:50 +00:00
|
|
|
toDomain = removeDomainPort(toDomain)
|
2019-08-22 19:47:10 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
cc = ''
|
2019-11-07 20:51:29 +00:00
|
|
|
|
2020-03-02 16:23:30 +00:00
|
|
|
# if we are sending to an onion domain and we
|
|
|
|
# have an alt onion domain then use the alt
|
2020-04-04 10:05:27 +00:00
|
|
|
fromDomain = domain
|
|
|
|
fromHttpPrefix = httpPrefix
|
2020-03-02 16:23:30 +00:00
|
|
|
if onionDomain:
|
2020-03-02 19:31:41 +00:00
|
|
|
if toDomain.endswith('.onion'):
|
2020-04-04 10:05:27 +00:00
|
|
|
fromDomain = onionDomain
|
|
|
|
fromHttpPrefix = 'http'
|
2020-06-03 20:21:44 +00:00
|
|
|
elif i2pDomain:
|
|
|
|
if toDomain.endswith('.i2p'):
|
|
|
|
fromDomain = i2pDomain
|
2020-06-19 22:50:41 +00:00
|
|
|
fromHttpPrefix = 'http'
|
2020-03-02 16:23:30 +00:00
|
|
|
|
2019-11-07 21:12:53 +00:00
|
|
|
if withSharedInbox:
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = followerHandles[index].split('@')[0]
|
2019-08-26 17:42:06 +00:00
|
|
|
|
2021-07-31 13:19:45 +00:00
|
|
|
groupAccount = False
|
|
|
|
if toNickname.startswith('!'):
|
|
|
|
groupAccount = True
|
|
|
|
toNickname = toNickname[1:]
|
|
|
|
|
2019-08-26 17:42:06 +00:00
|
|
|
# if there are more than one followers on the domain
|
|
|
|
# then send the post to the shared inbox
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(followerHandles) > 1:
|
|
|
|
toNickname = 'inbox'
|
2019-08-26 17:42:06 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if toNickname != 'inbox' and postJsonObject.get('type'):
|
2020-12-22 18:06:23 +00:00
|
|
|
if _sendingProfileUpdate(postJsonObject):
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers ' +
|
2020-12-18 10:43:19 +00:00
|
|
|
'shared inbox of ' + toDomain)
|
|
|
|
toNickname = 'inbox'
|
2020-02-05 11:46:05 +00:00
|
|
|
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers from ' +
|
|
|
|
nickname + '@' + domain +
|
|
|
|
' to ' + toNickname + '@' + toDomain)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
sendSignedJson(postJsonObject, session, baseDir,
|
|
|
|
nickname, fromDomain, port,
|
|
|
|
toNickname, toDomain, toPort,
|
|
|
|
cc, fromHttpPrefix, True, clientToServer,
|
|
|
|
federationList,
|
|
|
|
sendThreads, postLog, cachedWebfingers,
|
2021-07-26 13:12:51 +00:00
|
|
|
personCache, debug, projectVersion,
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemsToken, groupAccount,
|
|
|
|
signingPrivateKeyPem)
|
2019-08-26 17:42:06 +00:00
|
|
|
else:
|
|
|
|
# send to individual followers without using a shared inbox
|
|
|
|
for handle in followerHandles:
|
2020-12-18 19:59:19 +00:00
|
|
|
print('Sending post to followers ' + handle)
|
2020-04-04 10:05:27 +00:00
|
|
|
toNickname = handle.split('@')[0]
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-07-31 13:19:45 +00:00
|
|
|
groupAccount = False
|
|
|
|
if toNickname.startswith('!'):
|
|
|
|
groupAccount = True
|
|
|
|
toNickname = toNickname[1:]
|
|
|
|
|
2020-12-18 19:59:19 +00:00
|
|
|
if postJsonObject['type'] != 'Update':
|
|
|
|
print('Sending post to followers from ' +
|
|
|
|
nickname + '@' + domain + ' to ' +
|
|
|
|
toNickname + '@' + toDomain)
|
|
|
|
else:
|
|
|
|
print('Sending post to followers profile update from ' +
|
|
|
|
nickname + '@' + domain + ' to ' +
|
|
|
|
toNickname + '@' + toDomain)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
sendSignedJson(postJsonObject, session, baseDir,
|
|
|
|
nickname, fromDomain, port,
|
|
|
|
toNickname, toDomain, toPort,
|
|
|
|
cc, fromHttpPrefix, True, clientToServer,
|
|
|
|
federationList,
|
|
|
|
sendThreads, postLog, cachedWebfingers,
|
2021-07-26 13:12:51 +00:00
|
|
|
personCache, debug, projectVersion,
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemsToken, groupAccount,
|
|
|
|
signingPrivateKeyPem)
|
2020-02-05 11:46:05 +00:00
|
|
|
|
2019-11-07 20:51:29 +00:00
|
|
|
time.sleep(4)
|
2019-11-04 10:43:19 +00:00
|
|
|
|
2019-11-07 21:16:40 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: End of sendToFollowers')
|
|
|
|
|
2020-12-18 15:09:41 +00:00
|
|
|
sendingEndTime = datetime.datetime.utcnow()
|
|
|
|
sendingMins = int((sendingEndTime - sendingStartTime).total_seconds() / 60)
|
|
|
|
print('Sending post to followers ends ' + str(sendingMins) + ' mins')
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def sendToFollowersThread(session, baseDir: str,
|
|
|
|
nickname: str,
|
2020-06-03 20:21:44 +00:00
|
|
|
domain: str,
|
|
|
|
onionDomain: str, i2pDomain: str, port: int,
|
2020-04-04 10:05:27 +00:00
|
|
|
httpPrefix: str, federationList: [],
|
|
|
|
sendThreads: [], postLog: [],
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
|
|
|
postJsonObject: {}, debug: bool,
|
2021-07-26 13:12:51 +00:00
|
|
|
projectVersion: str,
|
|
|
|
sharedItemsFederatedDomains: [],
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemFederationTokens: {},
|
|
|
|
signingPrivateKeyPem: str):
|
2019-11-04 10:43:19 +00:00
|
|
|
"""Returns a thread used to send a post to followers
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
sendThread = \
|
|
|
|
threadWithTrace(target=sendToFollowers,
|
|
|
|
args=(session, baseDir,
|
2020-06-03 20:21:44 +00:00
|
|
|
nickname, domain,
|
|
|
|
onionDomain, i2pDomain, port,
|
2020-04-04 10:05:27 +00:00
|
|
|
httpPrefix, federationList,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers, personCache,
|
|
|
|
postJsonObject.copy(), debug,
|
2021-07-26 13:12:51 +00:00
|
|
|
projectVersion,
|
|
|
|
sharedItemsFederatedDomains,
|
2021-08-31 14:17:11 +00:00
|
|
|
sharedItemFederationTokens,
|
|
|
|
signingPrivateKeyPem), daemon=True)
|
2020-06-23 14:48:10 +00:00
|
|
|
try:
|
|
|
|
sendThread.start()
|
|
|
|
except SocketError as e:
|
|
|
|
print('WARN: socket error while starting ' +
|
|
|
|
'thread to send to followers. ' + str(e))
|
|
|
|
return None
|
2020-06-23 21:39:19 +00:00
|
|
|
except ValueError as e:
|
2020-06-23 14:48:10 +00:00
|
|
|
print('WARN: error while starting ' +
|
|
|
|
'thread to send to followers. ' + str(e))
|
|
|
|
return None
|
2019-11-04 10:43:19 +00:00
|
|
|
return sendThread
|
2019-07-08 13:30:04 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def createInbox(recentPostsCache: {},
|
|
|
|
session, baseDir: str, nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str, itemsPerPage: int, headerOnly: bool,
|
2021-06-20 11:28:35 +00:00
|
|
|
pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed(recentPostsCache,
|
|
|
|
session, baseDir, 'inbox',
|
|
|
|
nickname, domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
def createBookmarksTimeline(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
headerOnly: bool, pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed({}, session, baseDir, 'tlbookmarks',
|
|
|
|
nickname, domain,
|
|
|
|
port, httpPrefix, itemsPerPage, headerOnly,
|
|
|
|
True, 0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2020-08-26 10:21:24 +00:00
|
|
|
def createDMTimeline(recentPostsCache: {},
|
|
|
|
session, baseDir: str, nickname: str, domain: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
headerOnly: bool, pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed(recentPostsCache,
|
|
|
|
session, baseDir, 'dm', nickname,
|
|
|
|
domain, port, httpPrefix, itemsPerPage,
|
|
|
|
headerOnly, True, 0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2020-08-26 10:21:24 +00:00
|
|
|
def createRepliesTimeline(recentPostsCache: {},
|
|
|
|
session, baseDir: str, nickname: str, domain: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
headerOnly: bool, pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed(recentPostsCache, session, baseDir, 'tlreplies',
|
|
|
|
nickname, domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
def createBlogsTimeline(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
headerOnly: bool, pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed({}, session, baseDir, 'tlblogs', nickname,
|
|
|
|
domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2020-11-27 12:29:20 +00:00
|
|
|
def createFeaturesTimeline(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
headerOnly: bool, pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed({}, session, baseDir, 'tlfeatures', nickname,
|
|
|
|
domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-11-27 12:29:20 +00:00
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def createMediaTimeline(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
headerOnly: bool, pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed({}, session, baseDir, 'tlmedia', nickname,
|
|
|
|
domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2020-10-07 09:10:42 +00:00
|
|
|
def createNewsTimeline(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
port: int, httpPrefix: str, itemsPerPage: int,
|
2020-10-08 19:47:23 +00:00
|
|
|
headerOnly: bool, newswireVotesThreshold: int,
|
2020-10-09 12:15:20 +00:00
|
|
|
positiveVoting: bool, votingTimeMins: int,
|
2021-06-20 11:28:35 +00:00
|
|
|
pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed({}, session, baseDir, 'outbox', 'news',
|
|
|
|
domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, True,
|
|
|
|
newswireVotesThreshold, positiveVoting,
|
|
|
|
votingTimeMins, pageNumber)
|
2020-10-07 09:10:42 +00:00
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def createOutbox(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
port: int, httpPrefix: str,
|
|
|
|
itemsPerPage: int, headerOnly: bool, authorized: bool,
|
2021-06-20 11:28:35 +00:00
|
|
|
pageNumber: int = None) -> {}:
|
2020-12-22 18:06:23 +00:00
|
|
|
return _createBoxIndexed({}, session, baseDir, 'outbox',
|
|
|
|
nickname, domain, port, httpPrefix,
|
|
|
|
itemsPerPage, headerOnly, authorized,
|
|
|
|
0, False, 0, pageNumber)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
def createModeration(baseDir: str, nickname: str, domain: str, port: int,
|
|
|
|
httpPrefix: str, itemsPerPage: int, headerOnly: bool,
|
2021-06-20 11:28:35 +00:00
|
|
|
pageNumber: int = None) -> {}:
|
2020-04-04 10:05:27 +00:00
|
|
|
boxDir = createPersonDir(nickname, domain, baseDir, 'inbox')
|
|
|
|
boxname = 'moderation'
|
2019-08-12 13:22:17 +00:00
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
domain = getFullDomain(domain, port)
|
2019-08-12 13:22:17 +00:00
|
|
|
|
|
|
|
if not pageNumber:
|
2020-04-04 10:05:27 +00:00
|
|
|
pageNumber = 1
|
2019-11-16 22:09:54 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
pageStr = '?page=' + str(pageNumber)
|
2021-08-14 11:13:39 +00:00
|
|
|
boxUrl = localActorUrl(httpPrefix, nickname, domain) + '/' + boxname
|
2020-04-04 10:05:27 +00:00
|
|
|
boxHeader = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2021-03-23 19:14:49 +00:00
|
|
|
'first': boxUrl + '?page=true',
|
2020-04-04 10:05:27 +00:00
|
|
|
'id': boxUrl,
|
2021-03-23 19:14:49 +00:00
|
|
|
'last': boxUrl + '?page=true',
|
2020-03-22 20:36:19 +00:00
|
|
|
'totalItems': 0,
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
boxItems = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2021-03-23 19:14:49 +00:00
|
|
|
'id': boxUrl + pageStr,
|
2020-03-22 20:36:19 +00:00
|
|
|
'orderedItems': [
|
|
|
|
],
|
2020-04-04 10:05:27 +00:00
|
|
|
'partOf': boxUrl,
|
2020-03-22 20:36:19 +00:00
|
|
|
'type': 'OrderedCollectionPage'
|
|
|
|
}
|
2019-08-12 13:22:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if isModerator(baseDir, nickname):
|
|
|
|
moderationIndexFile = baseDir + '/accounts/moderation.txt'
|
2019-08-12 13:22:17 +00:00
|
|
|
if os.path.isfile(moderationIndexFile):
|
2021-07-13 14:40:49 +00:00
|
|
|
with open(moderationIndexFile, 'r') as f:
|
2020-04-04 10:05:27 +00:00
|
|
|
lines = f.readlines()
|
|
|
|
boxHeader['totalItems'] = len(lines)
|
2019-08-12 13:22:17 +00:00
|
|
|
if headerOnly:
|
|
|
|
return boxHeader
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
pageLines = []
|
|
|
|
if len(lines) > 0:
|
|
|
|
endLineNumber = len(lines) - 1 - int(itemsPerPage * pageNumber)
|
|
|
|
if endLineNumber < 0:
|
|
|
|
endLineNumber = 0
|
|
|
|
startLineNumber = \
|
|
|
|
len(lines) - 1 - int(itemsPerPage * (pageNumber - 1))
|
|
|
|
if startLineNumber < 0:
|
|
|
|
startLineNumber = 0
|
|
|
|
lineNumber = startLineNumber
|
|
|
|
while lineNumber >= endLineNumber:
|
2020-05-22 11:32:38 +00:00
|
|
|
pageLines.append(lines[lineNumber].strip('\n').strip('\r'))
|
2020-04-04 10:05:27 +00:00
|
|
|
lineNumber -= 1
|
2019-11-16 22:09:54 +00:00
|
|
|
|
2019-08-12 13:22:17 +00:00
|
|
|
for postUrl in pageLines:
|
2020-04-04 10:05:27 +00:00
|
|
|
postFilename = \
|
|
|
|
boxDir + '/' + postUrl.replace('/', '#') + '.json'
|
2019-08-12 13:22:17 +00:00
|
|
|
if os.path.isfile(postFilename):
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject = loadJson(postFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
if postJsonObject:
|
|
|
|
boxItems['orderedItems'].append(postJsonObject)
|
2019-09-17 12:14:36 +00:00
|
|
|
|
2019-08-12 13:22:17 +00:00
|
|
|
if headerOnly:
|
|
|
|
return boxHeader
|
|
|
|
return boxItems
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def isImageMedia(session, baseDir: str, httpPrefix: str,
|
|
|
|
nickname: str, domain: str,
|
2020-08-02 09:51:20 +00:00
|
|
|
postJsonObject: {}, translate: {},
|
2021-01-30 11:47:09 +00:00
|
|
|
YTReplacementDomain: str,
|
2021-03-05 19:23:33 +00:00
|
|
|
allowLocalNetworkAccess: bool,
|
2021-07-18 14:15:16 +00:00
|
|
|
recentPostsCache: {}, debug: bool,
|
2021-07-19 08:46:21 +00:00
|
|
|
systemLanguage: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
domainFull: str, personCache: {},
|
|
|
|
signingPrivateKeyPem: str) -> bool:
|
2019-09-28 11:29:42 +00:00
|
|
|
"""Returns true if the given post has attached image media
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if postJsonObject['type'] == 'Announce':
|
2021-03-03 17:09:31 +00:00
|
|
|
postJsonAnnounce = \
|
2020-04-04 10:05:27 +00:00
|
|
|
downloadAnnounce(session, baseDir, httpPrefix,
|
|
|
|
nickname, domain, postJsonObject,
|
2020-08-02 09:51:20 +00:00
|
|
|
__version__, translate,
|
2021-01-30 11:47:09 +00:00
|
|
|
YTReplacementDomain,
|
2021-03-05 19:23:33 +00:00
|
|
|
allowLocalNetworkAccess,
|
2021-07-18 14:15:16 +00:00
|
|
|
recentPostsCache, debug,
|
2021-07-19 08:46:21 +00:00
|
|
|
systemLanguage,
|
2021-08-31 14:17:11 +00:00
|
|
|
domainFull, personCache,
|
|
|
|
signingPrivateKeyPem)
|
2019-09-28 16:21:43 +00:00
|
|
|
if postJsonAnnounce:
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject = postJsonAnnounce
|
|
|
|
if postJsonObject['type'] != 'Create':
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2021-06-22 15:45:59 +00:00
|
|
|
if not hasObjectDict(postJsonObject):
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
2019-11-16 22:20:16 +00:00
|
|
|
if postJsonObject['object'].get('moderationStatus'):
|
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
if postJsonObject['object']['type'] != 'Note' and \
|
2020-08-21 11:08:31 +00:00
|
|
|
postJsonObject['object']['type'] != 'Event' and \
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject['object']['type'] != 'Article':
|
2019-09-28 11:29:42 +00:00
|
|
|
return False
|
|
|
|
if not postJsonObject['object'].get('attachment'):
|
|
|
|
return False
|
|
|
|
if not isinstance(postJsonObject['object']['attachment'], list):
|
|
|
|
return False
|
|
|
|
for attach in postJsonObject['object']['attachment']:
|
|
|
|
if attach.get('mediaType') and attach.get('url'):
|
2019-10-23 13:24:09 +00:00
|
|
|
if attach['mediaType'].startswith('image/') or \
|
|
|
|
attach['mediaType'].startswith('audio/') or \
|
|
|
|
attach['mediaType'].startswith('video/'):
|
2019-09-28 11:29:42 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _addPostStringToTimeline(postStr: str, boxname: str,
|
|
|
|
postsInBox: [], boxActor: str) -> bool:
|
2019-11-18 11:28:17 +00:00
|
|
|
""" is this a valid timeline post?
|
|
|
|
"""
|
2020-05-03 13:18:35 +00:00
|
|
|
# must be a recognized ActivityPub type
|
2020-04-04 10:05:27 +00:00
|
|
|
if ('"Note"' in postStr or
|
2020-08-05 12:24:09 +00:00
|
|
|
'"EncryptedMessage"' in postStr or
|
2020-08-26 12:12:43 +00:00
|
|
|
'"Event"' in postStr or
|
2020-04-04 10:05:27 +00:00
|
|
|
'"Article"' in postStr or
|
2020-05-03 12:52:13 +00:00
|
|
|
'"Patch"' in postStr or
|
2020-04-04 10:05:27 +00:00
|
|
|
'"Announce"' in postStr or
|
|
|
|
('"Question"' in postStr and
|
|
|
|
('"Create"' in postStr or '"Update"' in postStr))):
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname == 'dm':
|
2019-11-24 12:12:29 +00:00
|
|
|
if '#Public' in postStr or '/followers' in postStr:
|
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
elif boxname == 'tlreplies':
|
2019-11-24 12:12:29 +00:00
|
|
|
if boxActor not in postStr:
|
|
|
|
return False
|
2020-11-27 14:17:00 +00:00
|
|
|
elif (boxname == 'tlblogs' or
|
|
|
|
boxname == 'tlnews' or
|
|
|
|
boxname == 'tlfeatures'):
|
2020-02-24 14:39:25 +00:00
|
|
|
if '"Create"' not in postStr:
|
|
|
|
return False
|
|
|
|
if '"Article"' not in postStr:
|
|
|
|
return False
|
2020-04-04 10:05:27 +00:00
|
|
|
elif boxname == 'tlmedia':
|
2019-11-24 12:12:29 +00:00
|
|
|
if '"Create"' in postStr:
|
2021-03-07 12:17:06 +00:00
|
|
|
if ('mediaType' not in postStr or
|
2021-03-07 10:58:18 +00:00
|
|
|
('image/' not in postStr and
|
|
|
|
'video/' not in postStr and
|
|
|
|
'audio/' not in postStr)):
|
2019-11-18 11:28:17 +00:00
|
|
|
return False
|
2019-11-24 12:12:29 +00:00
|
|
|
# add the post to the dictionary
|
|
|
|
postsInBox.append(postStr)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _addPostToTimeline(filePath: str, boxname: str,
|
|
|
|
postsInBox: [], boxActor: str) -> bool:
|
2019-11-24 12:12:29 +00:00
|
|
|
""" Reads a post from file and decides whether it is valid
|
|
|
|
"""
|
2021-06-21 22:52:04 +00:00
|
|
|
with open(filePath, 'r') as postFile:
|
|
|
|
postStr = postFile.read()
|
|
|
|
|
2020-11-28 19:39:37 +00:00
|
|
|
if filePath.endswith('.json'):
|
|
|
|
repliesFilename = filePath.replace('.json', '.replies')
|
|
|
|
if os.path.isfile(repliesFilename):
|
|
|
|
# append a replies identifier, which will later be removed
|
|
|
|
postStr += '<hasReplies>'
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
return _addPostStringToTimeline(postStr, boxname, postsInBox, boxActor)
|
2019-11-18 11:28:17 +00:00
|
|
|
return False
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-04-30 11:45:46 +00:00
|
|
|
def removePostInteractions(postJsonObject: {}, force: bool) -> bool:
|
2021-04-30 09:48:39 +00:00
|
|
|
""" Don't show likes, replies, bookmarks, DMs or shares (announces) to
|
|
|
|
unauthorized viewers. This makes the timeline less useful to
|
|
|
|
marketers and other surveillance-oriented organizations.
|
|
|
|
Returns False if this is a private post
|
|
|
|
"""
|
2021-04-30 11:45:46 +00:00
|
|
|
hasObject = False
|
2021-06-22 15:45:59 +00:00
|
|
|
if hasObjectDict(postJsonObject):
|
|
|
|
hasObject = True
|
2021-04-30 11:45:46 +00:00
|
|
|
if hasObject:
|
|
|
|
postObj = postJsonObject['object']
|
|
|
|
if not force:
|
|
|
|
# If not authorized and it's a private post
|
|
|
|
# then just don't show it within timelines
|
2021-08-01 19:19:45 +00:00
|
|
|
if not isPublicPost(postJsonObject):
|
2021-04-30 11:45:46 +00:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
postObj = postJsonObject
|
|
|
|
|
2021-04-30 09:48:39 +00:00
|
|
|
# clear the likes
|
2021-04-30 11:45:46 +00:00
|
|
|
if postObj.get('likes'):
|
|
|
|
postObj['likes'] = {
|
|
|
|
'items': []
|
|
|
|
}
|
2021-04-30 09:48:39 +00:00
|
|
|
# remove other collections
|
|
|
|
removeCollections = (
|
|
|
|
'replies', 'shares', 'bookmarks', 'ignores'
|
|
|
|
)
|
|
|
|
for removeName in removeCollections:
|
2021-04-30 11:45:46 +00:00
|
|
|
if postObj.get(removeName):
|
|
|
|
postObj[removeName] = {}
|
2021-04-30 09:48:39 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-04-30 13:24:33 +00:00
|
|
|
def _passedNewswireVoting(newswireVotesThreshold: int,
|
|
|
|
baseDir: str, domain: str,
|
|
|
|
postFilename: str,
|
|
|
|
positiveVoting: bool,
|
|
|
|
votingTimeMins: int) -> bool:
|
|
|
|
"""Returns true if the post has passed through newswire voting
|
|
|
|
"""
|
|
|
|
# apply votes within this timeline
|
|
|
|
if newswireVotesThreshold <= 0:
|
|
|
|
return True
|
|
|
|
# note that the presence of an arrival file also indicates
|
|
|
|
# that this post is moderated
|
|
|
|
arrivalDate = \
|
|
|
|
locateNewsArrival(baseDir, domain, postFilename)
|
|
|
|
if not arrivalDate:
|
|
|
|
return True
|
|
|
|
# how long has elapsed since this post arrived?
|
|
|
|
currDate = datetime.datetime.utcnow()
|
|
|
|
timeDiffMins = \
|
|
|
|
int((currDate - arrivalDate).total_seconds() / 60)
|
|
|
|
# has the voting time elapsed?
|
|
|
|
if timeDiffMins < votingTimeMins:
|
|
|
|
# voting is still happening, so don't add this
|
|
|
|
# post to the timeline
|
|
|
|
return False
|
|
|
|
# if there a votes file for this post?
|
|
|
|
votesFilename = \
|
|
|
|
locateNewsVotes(baseDir, domain, postFilename)
|
|
|
|
if not votesFilename:
|
|
|
|
return True
|
|
|
|
# load the votes file and count the votes
|
|
|
|
votesJson = loadJson(votesFilename, 0, 2)
|
|
|
|
if not votesJson:
|
|
|
|
return True
|
|
|
|
if not positiveVoting:
|
|
|
|
if votesOnNewswireItem(votesJson) >= \
|
|
|
|
newswireVotesThreshold:
|
|
|
|
# Too many veto votes.
|
|
|
|
# Continue without incrementing
|
|
|
|
# the posts counter
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if votesOnNewswireItem < \
|
|
|
|
newswireVotesThreshold:
|
|
|
|
# Not enough votes.
|
|
|
|
# Continue without incrementing
|
|
|
|
# the posts counter
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _createBoxIndexed(recentPostsCache: {},
|
|
|
|
session, baseDir: str, boxname: str,
|
|
|
|
nickname: str, domain: str, port: int, httpPrefix: str,
|
|
|
|
itemsPerPage: int, headerOnly: bool, authorized: bool,
|
|
|
|
newswireVotesThreshold: int, positiveVoting: bool,
|
2021-06-20 11:28:35 +00:00
|
|
|
votingTimeMins: int, pageNumber: int = None) -> {}:
|
2019-11-18 11:28:17 +00:00
|
|
|
"""Constructs the box feed for a person with the given nickname
|
|
|
|
"""
|
|
|
|
if not authorized or not pageNumber:
|
2020-04-04 10:05:27 +00:00
|
|
|
pageNumber = 1
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'dm' and \
|
|
|
|
boxname != 'tlreplies' and boxname != 'tlmedia' and \
|
2020-10-07 09:10:42 +00:00
|
|
|
boxname != 'tlblogs' and boxname != 'tlnews' and \
|
2020-11-27 14:17:00 +00:00
|
|
|
boxname != 'tlfeatures' and \
|
2020-05-21 20:48:51 +00:00
|
|
|
boxname != 'outbox' and boxname != 'tlbookmarks' and \
|
2021-07-01 21:30:36 +00:00
|
|
|
boxname != 'bookmarks':
|
2021-08-01 16:17:36 +00:00
|
|
|
print('ERROR: invalid boxname ' + boxname)
|
2019-11-18 11:28:17 +00:00
|
|
|
return None
|
|
|
|
|
2020-08-23 11:13:35 +00:00
|
|
|
# bookmarks and events timelines are like the inbox
|
|
|
|
# but have their own separate index
|
2020-04-04 10:05:27 +00:00
|
|
|
indexBoxName = boxname
|
2020-11-28 13:04:30 +00:00
|
|
|
timelineNickname = nickname
|
2020-05-21 22:02:27 +00:00
|
|
|
if boxname == "tlbookmarks":
|
|
|
|
boxname = "bookmarks"
|
|
|
|
indexBoxName = boxname
|
2020-11-27 14:17:00 +00:00
|
|
|
elif boxname == "tlfeatures":
|
|
|
|
boxname = "tlblogs"
|
|
|
|
indexBoxName = boxname
|
2020-11-28 13:04:30 +00:00
|
|
|
timelineNickname = 'news'
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2021-08-01 19:19:45 +00:00
|
|
|
originalDomain = domain
|
2020-12-16 11:04:46 +00:00
|
|
|
domain = getFullDomain(domain, port)
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
boxActor = localActorUrl(httpPrefix, nickname, domain)
|
2020-01-19 20:19:56 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
pageStr = '?page=true'
|
2019-11-18 11:28:17 +00:00
|
|
|
if pageNumber:
|
2020-05-21 21:43:33 +00:00
|
|
|
if pageNumber < 1:
|
|
|
|
pageNumber = 1
|
2019-11-18 11:28:17 +00:00
|
|
|
try:
|
2020-04-04 10:05:27 +00:00
|
|
|
pageStr = '?page=' + str(pageNumber)
|
|
|
|
except BaseException:
|
2019-11-18 11:28:17 +00:00
|
|
|
pass
|
2021-08-14 11:13:39 +00:00
|
|
|
boxUrl = localActorUrl(httpPrefix, nickname, domain) + '/' + boxname
|
2020-04-04 10:05:27 +00:00
|
|
|
boxHeader = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2020-05-21 21:43:33 +00:00
|
|
|
'first': boxUrl + '?page=true',
|
2020-04-04 10:05:27 +00:00
|
|
|
'id': boxUrl,
|
2020-05-21 21:43:33 +00:00
|
|
|
'last': boxUrl + '?page=true',
|
2020-03-22 20:36:19 +00:00
|
|
|
'totalItems': 0,
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
boxItems = {
|
2020-03-22 20:36:19 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2020-05-21 21:43:33 +00:00
|
|
|
'id': boxUrl + pageStr,
|
2020-03-22 20:36:19 +00:00
|
|
|
'orderedItems': [
|
|
|
|
],
|
2020-04-04 10:05:27 +00:00
|
|
|
'partOf': boxUrl,
|
2020-03-22 20:36:19 +00:00
|
|
|
'type': 'OrderedCollectionPage'
|
|
|
|
}
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBox = []
|
2021-09-06 08:48:58 +00:00
|
|
|
postUrlsInBox = []
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
indexFilename = \
|
2021-08-01 19:19:45 +00:00
|
|
|
acctDir(baseDir, timelineNickname, originalDomain) + \
|
2020-04-04 10:05:27 +00:00
|
|
|
'/' + indexBoxName + '.index'
|
2021-04-30 09:17:22 +00:00
|
|
|
totalPostsCount = 0
|
2021-03-06 20:00:24 +00:00
|
|
|
postsAddedToTimeline = 0
|
2019-11-18 11:28:17 +00:00
|
|
|
if os.path.isfile(indexFilename):
|
|
|
|
with open(indexFilename, 'r') as indexFile:
|
2021-03-05 15:01:58 +00:00
|
|
|
postsAddedToTimeline = 0
|
|
|
|
while postsAddedToTimeline < itemsPerPage:
|
2020-04-04 10:05:27 +00:00
|
|
|
postFilename = indexFile.readline()
|
2019-11-18 12:40:38 +00:00
|
|
|
|
2019-11-18 15:04:08 +00:00
|
|
|
if not postFilename:
|
2020-05-21 21:53:12 +00:00
|
|
|
break
|
2019-11-18 15:04:08 +00:00
|
|
|
|
2021-04-30 13:24:33 +00:00
|
|
|
# Has this post passed through the newswire voting stage?
|
|
|
|
if not _passedNewswireVoting(newswireVotesThreshold,
|
|
|
|
baseDir, domain,
|
|
|
|
postFilename,
|
|
|
|
positiveVoting,
|
|
|
|
votingTimeMins):
|
|
|
|
continue
|
2020-10-08 19:47:23 +00:00
|
|
|
|
2019-11-18 12:40:38 +00:00
|
|
|
# Skip through any posts previous to the current page
|
2021-04-30 09:17:22 +00:00
|
|
|
if totalPostsCount < int((pageNumber - 1) * itemsPerPage):
|
|
|
|
totalPostsCount += 1
|
2019-11-18 11:28:17 +00:00
|
|
|
continue
|
|
|
|
|
2019-11-18 12:54:41 +00:00
|
|
|
# if this is a full path then remove the directories
|
|
|
|
if '/' in postFilename:
|
2020-04-04 10:05:27 +00:00
|
|
|
postFilename = postFilename.split('/')[-1]
|
2019-11-18 12:54:41 +00:00
|
|
|
|
2019-11-18 11:28:17 +00:00
|
|
|
# filename of the post without any extension or path
|
2020-04-04 10:05:27 +00:00
|
|
|
# This should also correspond to any index entry in
|
|
|
|
# the posts cache
|
|
|
|
postUrl = \
|
2020-05-22 11:32:38 +00:00
|
|
|
postFilename.replace('\n', '').replace('\r', '')
|
|
|
|
postUrl = postUrl.replace('.json', '').strip()
|
2019-11-24 12:12:29 +00:00
|
|
|
|
2021-09-06 08:52:34 +00:00
|
|
|
if postUrl in postUrlsInBox:
|
2021-09-06 08:48:58 +00:00
|
|
|
continue
|
|
|
|
|
2019-11-25 10:10:59 +00:00
|
|
|
# is the post cached in memory?
|
2019-11-24 12:12:29 +00:00
|
|
|
if recentPostsCache.get('index'):
|
|
|
|
if postUrl in recentPostsCache['index']:
|
|
|
|
if recentPostsCache['json'].get(postUrl):
|
2020-04-04 10:05:27 +00:00
|
|
|
url = recentPostsCache['json'][postUrl]
|
2021-09-06 08:50:44 +00:00
|
|
|
if _addPostStringToTimeline(url,
|
|
|
|
boxname, postsInBox,
|
|
|
|
boxActor):
|
|
|
|
totalPostsCount += 1
|
|
|
|
postsAddedToTimeline += 1
|
2021-09-06 08:52:34 +00:00
|
|
|
postUrlsInBox.append(postUrl)
|
2021-09-06 08:50:44 +00:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
print('Post not added to timeline')
|
2020-05-21 19:28:09 +00:00
|
|
|
|
|
|
|
# read the post from file
|
|
|
|
fullPostFilename = \
|
|
|
|
locatePost(baseDir, nickname,
|
2021-08-01 19:19:45 +00:00
|
|
|
originalDomain, postUrl, False)
|
2020-05-21 19:28:09 +00:00
|
|
|
if fullPostFilename:
|
2021-03-05 19:29:09 +00:00
|
|
|
# has the post been rejected?
|
|
|
|
if os.path.isfile(fullPostFilename + '.reject'):
|
|
|
|
continue
|
2021-03-05 19:37:58 +00:00
|
|
|
|
2021-03-05 15:18:32 +00:00
|
|
|
if _addPostToTimeline(fullPostFilename, boxname,
|
|
|
|
postsInBox, boxActor):
|
|
|
|
postsAddedToTimeline += 1
|
2021-04-30 09:17:22 +00:00
|
|
|
totalPostsCount += 1
|
2021-09-06 08:52:34 +00:00
|
|
|
postUrlsInBox.append(postUrl)
|
2021-04-30 09:24:56 +00:00
|
|
|
else:
|
|
|
|
print('WARN: Unable to add post ' + postUrl +
|
|
|
|
' nickname ' + nickname +
|
|
|
|
' timeline ' + boxname)
|
2020-05-21 19:28:09 +00:00
|
|
|
else:
|
2020-11-28 13:04:30 +00:00
|
|
|
if timelineNickname != nickname:
|
2021-02-17 14:01:45 +00:00
|
|
|
# if this is the features timeline
|
2020-11-28 13:04:30 +00:00
|
|
|
fullPostFilename = \
|
|
|
|
locatePost(baseDir, timelineNickname,
|
2021-08-01 19:19:45 +00:00
|
|
|
originalDomain, postUrl, False)
|
2020-11-28 13:04:30 +00:00
|
|
|
if fullPostFilename:
|
2021-03-05 15:18:32 +00:00
|
|
|
if _addPostToTimeline(fullPostFilename, boxname,
|
|
|
|
postsInBox, boxActor):
|
|
|
|
postsAddedToTimeline += 1
|
2021-04-30 09:17:22 +00:00
|
|
|
totalPostsCount += 1
|
2021-09-06 08:52:34 +00:00
|
|
|
postUrlsInBox.append(postUrl)
|
2021-04-30 09:24:56 +00:00
|
|
|
else:
|
|
|
|
print('WARN: Unable to add features post ' +
|
|
|
|
postUrl + ' nickname ' + nickname +
|
|
|
|
' timeline ' + boxname)
|
2020-11-28 13:04:30 +00:00
|
|
|
else:
|
2021-02-17 14:01:45 +00:00
|
|
|
print('WARN: features timeline. ' +
|
|
|
|
'Unable to locate post ' + postUrl)
|
2020-11-28 13:04:30 +00:00
|
|
|
else:
|
2021-02-17 14:01:45 +00:00
|
|
|
print('WARN: Unable to locate post ' + postUrl +
|
|
|
|
' nickname ' + nickname)
|
2019-11-24 12:12:29 +00:00
|
|
|
|
2021-04-30 09:17:22 +00:00
|
|
|
if totalPostsCount < 3:
|
2021-03-05 16:10:25 +00:00
|
|
|
print('Posts added to json timeline ' + boxname + ': ' +
|
|
|
|
str(postsAddedToTimeline))
|
2021-03-05 15:59:39 +00:00
|
|
|
|
2019-11-18 11:28:17 +00:00
|
|
|
# Generate first and last entries within header
|
2021-04-30 09:17:22 +00:00
|
|
|
if totalPostsCount > 0:
|
|
|
|
lastPage = int(totalPostsCount / itemsPerPage)
|
2020-04-04 10:05:27 +00:00
|
|
|
if lastPage < 1:
|
|
|
|
lastPage = 1
|
|
|
|
boxHeader['last'] = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
|
|
|
'/' + boxname + '?page=' + str(lastPage)
|
2019-11-18 11:28:17 +00:00
|
|
|
|
|
|
|
if headerOnly:
|
2020-04-04 10:05:27 +00:00
|
|
|
boxHeader['totalItems'] = len(postsInBox)
|
|
|
|
prevPageStr = 'true'
|
|
|
|
if pageNumber > 1:
|
|
|
|
prevPageStr = str(pageNumber - 1)
|
|
|
|
boxHeader['prev'] = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
|
|
|
'/' + boxname + '?page=' + prevPageStr
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
nextPageStr = str(pageNumber + 1)
|
|
|
|
boxHeader['next'] = \
|
2021-08-14 11:13:39 +00:00
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
|
|
|
'/' + boxname + '?page=' + nextPageStr
|
2019-11-18 11:28:17 +00:00
|
|
|
return boxHeader
|
|
|
|
|
2019-11-18 11:55:27 +00:00
|
|
|
for postStr in postsInBox:
|
2020-11-28 19:39:37 +00:00
|
|
|
# Check if the post has replies
|
|
|
|
hasReplies = False
|
|
|
|
if postStr.endswith('<hasReplies>'):
|
|
|
|
hasReplies = True
|
|
|
|
# remove the replies identifier
|
|
|
|
postStr = postStr.replace('<hasReplies>', '')
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
p = None
|
2019-11-18 11:28:17 +00:00
|
|
|
try:
|
2020-04-04 10:05:27 +00:00
|
|
|
p = json.loads(postStr)
|
|
|
|
except BaseException:
|
2019-11-18 11:28:17 +00:00
|
|
|
continue
|
|
|
|
|
2020-11-28 19:39:37 +00:00
|
|
|
# Does this post have replies?
|
|
|
|
# This will be used to indicate that replies exist within the html
|
|
|
|
# created by individualPostAsHtml
|
|
|
|
p['hasReplies'] = hasReplies
|
|
|
|
|
2021-04-30 11:45:46 +00:00
|
|
|
if not authorized:
|
|
|
|
if not removePostInteractions(p, False):
|
|
|
|
continue
|
2019-11-18 11:28:17 +00:00
|
|
|
|
2019-11-18 12:02:55 +00:00
|
|
|
boxItems['orderedItems'].append(p)
|
2019-11-18 11:28:17 +00:00
|
|
|
|
|
|
|
return boxItems
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def expireCache(baseDir: str, personCache: {},
|
2020-06-24 13:30:50 +00:00
|
|
|
httpPrefix: str, archiveDir: str,
|
|
|
|
recentPostsCache: {},
|
|
|
|
maxPostsInBox=32000):
|
2019-08-20 11:51:29 +00:00
|
|
|
"""Thread used to expire actors from the cache and archive old posts
|
|
|
|
"""
|
|
|
|
while True:
|
|
|
|
# once per day
|
2020-04-04 10:05:27 +00:00
|
|
|
time.sleep(60 * 60 * 24)
|
2021-06-22 11:01:45 +00:00
|
|
|
expirePersonCache(personCache)
|
2020-06-24 13:30:50 +00:00
|
|
|
archivePosts(baseDir, httpPrefix, archiveDir, recentPostsCache,
|
|
|
|
maxPostsInBox)
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2019-08-20 11:51:29 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def archivePosts(baseDir: str, httpPrefix: str, archiveDir: str,
|
2020-06-24 13:30:50 +00:00
|
|
|
recentPostsCache: {},
|
2019-12-12 09:58:06 +00:00
|
|
|
maxPostsInBox=32000) -> None:
|
2019-07-12 20:43:55 +00:00
|
|
|
"""Archives posts for all accounts
|
|
|
|
"""
|
2020-12-08 14:09:54 +00:00
|
|
|
if maxPostsInBox == 0:
|
|
|
|
return
|
|
|
|
|
2019-07-12 20:43:55 +00:00
|
|
|
if archiveDir:
|
|
|
|
if not os.path.isdir(archiveDir):
|
|
|
|
os.mkdir(archiveDir)
|
2020-12-08 14:09:54 +00:00
|
|
|
|
2019-07-12 20:43:55 +00:00
|
|
|
if archiveDir:
|
2020-04-04 10:05:27 +00:00
|
|
|
if not os.path.isdir(archiveDir + '/accounts'):
|
|
|
|
os.mkdir(archiveDir + '/accounts')
|
2019-07-12 20:43:55 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
2019-07-12 20:43:55 +00:00
|
|
|
for handle in dirs:
|
|
|
|
if '@' in handle:
|
2020-04-04 10:05:27 +00:00
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
domain = handle.split('@')[1]
|
|
|
|
archiveSubdir = None
|
2019-07-12 20:43:55 +00:00
|
|
|
if archiveDir:
|
2020-04-04 10:05:27 +00:00
|
|
|
if not os.path.isdir(archiveDir + '/accounts/' + handle):
|
|
|
|
os.mkdir(archiveDir + '/accounts/' + handle)
|
|
|
|
if not os.path.isdir(archiveDir + '/accounts/' +
|
|
|
|
handle + '/inbox'):
|
|
|
|
os.mkdir(archiveDir + '/accounts/' +
|
|
|
|
handle + '/inbox')
|
|
|
|
if not os.path.isdir(archiveDir + '/accounts/' +
|
|
|
|
handle + '/outbox'):
|
|
|
|
os.mkdir(archiveDir + '/accounts/' +
|
|
|
|
handle + '/outbox')
|
|
|
|
archiveSubdir = archiveDir + '/accounts/' + \
|
|
|
|
handle + '/inbox'
|
|
|
|
archivePostsForPerson(httpPrefix, nickname, domain, baseDir,
|
|
|
|
'inbox', archiveSubdir,
|
2020-06-24 13:30:50 +00:00
|
|
|
recentPostsCache, maxPostsInBox)
|
2019-07-12 20:43:55 +00:00
|
|
|
if archiveDir:
|
2020-04-04 10:05:27 +00:00
|
|
|
archiveSubdir = archiveDir + '/accounts/' + \
|
|
|
|
handle + '/outbox'
|
|
|
|
archivePostsForPerson(httpPrefix, nickname, domain, baseDir,
|
|
|
|
'outbox', archiveSubdir,
|
2020-06-24 13:30:50 +00:00
|
|
|
recentPostsCache, maxPostsInBox)
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2019-07-12 20:43:55 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def archivePostsForPerson(httpPrefix: str, nickname: str, domain: str,
|
|
|
|
baseDir: str,
|
|
|
|
boxname: str, archiveDir: str,
|
2020-06-24 13:30:50 +00:00
|
|
|
recentPostsCache: {},
|
2020-04-04 10:05:27 +00:00
|
|
|
maxPostsInBox=32000) -> None:
|
2019-07-04 16:24:23 +00:00
|
|
|
"""Retain a maximum number of posts within the given box
|
2019-06-29 13:44:21 +00:00
|
|
|
Move any others to an archive directory
|
|
|
|
"""
|
2020-04-04 10:05:27 +00:00
|
|
|
if boxname != 'inbox' and boxname != 'outbox':
|
2019-07-04 16:24:23 +00:00
|
|
|
return
|
2019-07-12 20:43:55 +00:00
|
|
|
if archiveDir:
|
|
|
|
if not os.path.isdir(archiveDir):
|
2020-03-22 21:16:02 +00:00
|
|
|
os.mkdir(archiveDir)
|
2020-04-04 10:05:27 +00:00
|
|
|
boxDir = createPersonDir(nickname, domain, baseDir, boxname)
|
|
|
|
postsInBox = os.scandir(boxDir)
|
|
|
|
noOfPosts = 0
|
2019-10-19 10:19:19 +00:00
|
|
|
for f in postsInBox:
|
2020-04-04 10:05:27 +00:00
|
|
|
noOfPosts += 1
|
|
|
|
if noOfPosts <= maxPostsInBox:
|
|
|
|
print('Checked ' + str(noOfPosts) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
2019-06-29 13:44:21 +00:00
|
|
|
return
|
2019-09-14 11:18:34 +00:00
|
|
|
|
2019-10-20 11:18:25 +00:00
|
|
|
# remove entries from the index
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = nickname + '@' + domain
|
|
|
|
indexFilename = baseDir + '/accounts/' + handle + '/' + boxname + '.index'
|
2019-10-20 11:18:25 +00:00
|
|
|
if os.path.isfile(indexFilename):
|
2020-04-04 10:05:27 +00:00
|
|
|
indexCtr = 0
|
2019-10-20 11:18:25 +00:00
|
|
|
# get the existing index entries as a string
|
2020-04-04 10:05:27 +00:00
|
|
|
newIndex = ''
|
2019-10-20 11:18:25 +00:00
|
|
|
with open(indexFilename, 'r') as indexFile:
|
|
|
|
for postId in indexFile:
|
2020-04-04 10:05:27 +00:00
|
|
|
newIndex += postId
|
|
|
|
indexCtr += 1
|
|
|
|
if indexCtr >= maxPostsInBox:
|
2019-10-20 11:18:25 +00:00
|
|
|
break
|
|
|
|
# save the new index file
|
2020-04-04 10:05:27 +00:00
|
|
|
if len(newIndex) > 0:
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(indexFilename, 'w+') as indexFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
indexFile.write(newIndex)
|
2019-10-20 11:18:25 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBoxDict = {}
|
|
|
|
postsCtr = 0
|
|
|
|
postsInBox = os.scandir(boxDir)
|
2019-09-14 11:18:34 +00:00
|
|
|
for postFilename in postsInBox:
|
2020-04-04 10:05:27 +00:00
|
|
|
postFilename = postFilename.name
|
2019-09-14 11:18:34 +00:00
|
|
|
if not postFilename.endswith('.json'):
|
|
|
|
continue
|
2019-11-06 13:35:25 +00:00
|
|
|
# Time of file creation
|
2020-04-04 10:05:27 +00:00
|
|
|
fullFilename = os.path.join(boxDir, postFilename)
|
2019-11-06 14:50:17 +00:00
|
|
|
if os.path.isfile(fullFilename):
|
2020-04-04 10:05:27 +00:00
|
|
|
content = open(fullFilename).read()
|
2019-11-06 14:50:17 +00:00
|
|
|
if '"published":' in content:
|
2020-04-04 10:05:27 +00:00
|
|
|
publishedStr = content.split('"published":')[1]
|
2019-11-06 14:50:17 +00:00
|
|
|
if '"' in publishedStr:
|
2020-04-04 10:05:27 +00:00
|
|
|
publishedStr = publishedStr.split('"')[1]
|
2019-11-06 14:54:17 +00:00
|
|
|
if publishedStr.endswith('Z'):
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBoxDict[publishedStr] = postFilename
|
|
|
|
postsCtr += 1
|
2019-09-14 11:18:34 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
noOfPosts = postsCtr
|
|
|
|
if noOfPosts <= maxPostsInBox:
|
|
|
|
print('Checked ' + str(noOfPosts) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
2019-09-14 11:18:34 +00:00
|
|
|
return
|
2019-11-06 14:50:17 +00:00
|
|
|
|
2019-09-14 11:18:34 +00:00
|
|
|
# sort the list in ascending order of date
|
2020-04-04 10:05:27 +00:00
|
|
|
postsInBoxSorted = \
|
|
|
|
OrderedDict(sorted(postsInBoxDict.items(), reverse=False))
|
2019-09-14 17:12:03 +00:00
|
|
|
|
2019-10-19 10:10:52 +00:00
|
|
|
# directory containing cached html posts
|
2020-04-04 10:05:27 +00:00
|
|
|
postCacheDir = boxDir.replace('/' + boxname, '/postcache')
|
2019-10-19 10:10:52 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
removeCtr = 0
|
|
|
|
for publishedStr, postFilename in postsInBoxSorted.items():
|
|
|
|
filePath = os.path.join(boxDir, postFilename)
|
2019-09-24 21:16:44 +00:00
|
|
|
if not os.path.isfile(filePath):
|
|
|
|
continue
|
|
|
|
if archiveDir:
|
2020-04-04 10:05:27 +00:00
|
|
|
archivePath = os.path.join(archiveDir, postFilename)
|
|
|
|
os.rename(filePath, archivePath)
|
2020-10-21 10:39:09 +00:00
|
|
|
|
|
|
|
extensions = ('replies', 'votes', 'arrived', 'muted')
|
|
|
|
for ext in extensions:
|
|
|
|
extPath = filePath.replace('.json', '.' + ext)
|
|
|
|
if os.path.isfile(extPath):
|
|
|
|
os.rename(extPath,
|
|
|
|
archivePath.replace('.json', '.' + ext))
|
|
|
|
else:
|
|
|
|
extPath = filePath.replace('.json',
|
|
|
|
'.json.' + ext)
|
|
|
|
if os.path.isfile(extPath):
|
|
|
|
os.rename(extPath,
|
|
|
|
archivePath.replace('.json', '.json.' + ext))
|
2019-09-24 21:16:44 +00:00
|
|
|
else:
|
2020-06-24 13:30:50 +00:00
|
|
|
deletePost(baseDir, httpPrefix, nickname, domain,
|
|
|
|
filePath, False, recentPostsCache)
|
2019-10-19 10:10:52 +00:00
|
|
|
|
|
|
|
# remove cached html posts
|
2020-04-04 10:05:27 +00:00
|
|
|
postCacheFilename = \
|
|
|
|
os.path.join(postCacheDir, postFilename).replace('.json', '.html')
|
2019-10-19 10:10:52 +00:00
|
|
|
if os.path.isfile(postCacheFilename):
|
2021-09-05 10:17:43 +00:00
|
|
|
try:
|
|
|
|
os.remove(postCacheFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
2019-10-19 10:10:52 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
noOfPosts -= 1
|
|
|
|
removeCtr += 1
|
|
|
|
if noOfPosts <= maxPostsInBox:
|
2019-09-24 21:16:44 +00:00
|
|
|
break
|
2020-02-26 20:39:18 +00:00
|
|
|
if archiveDir:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Archived ' + str(removeCtr) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
2020-02-26 20:39:18 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Removed ' + str(removeCtr) + ' ' + boxname +
|
|
|
|
' posts for ' + nickname + '@' + domain)
|
|
|
|
print(nickname + '@' + domain + ' has ' + str(noOfPosts) +
|
|
|
|
' in ' + boxname)
|
|
|
|
|
2019-07-03 10:31:02 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def getPublicPostsOfPerson(baseDir: str, nickname: str, domain: str,
|
2020-06-09 11:03:59 +00:00
|
|
|
raw: bool, simple: bool, proxyType: str,
|
2020-04-04 10:05:27 +00:00
|
|
|
port: int, httpPrefix: str,
|
2021-07-18 14:15:16 +00:00
|
|
|
debug: bool, projectVersion: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
systemLanguage: str,
|
2021-09-10 22:08:04 +00:00
|
|
|
signingPrivateKeyPem: str,
|
|
|
|
originDomain: str) -> None:
|
2019-07-03 10:31:02 +00:00
|
|
|
""" This is really just for test purposes
|
|
|
|
"""
|
2020-06-24 09:04:58 +00:00
|
|
|
print('Starting new session for getting public posts')
|
2020-06-09 11:03:59 +00:00
|
|
|
session = createSession(proxyType)
|
2020-06-08 20:22:18 +00:00
|
|
|
if not session:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Session was not created')
|
2020-06-08 20:22:18 +00:00
|
|
|
return
|
2020-04-04 10:05:27 +00:00
|
|
|
personCache = {}
|
|
|
|
cachedWebfingers = {}
|
|
|
|
federationList = []
|
2021-08-02 20:43:53 +00:00
|
|
|
groupAccount = False
|
|
|
|
if nickname.startswith('!'):
|
|
|
|
nickname = nickname[1:]
|
|
|
|
groupAccount = True
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2020-04-04 10:05:27 +00:00
|
|
|
handle = httpPrefix + "://" + domainFull + "/@" + nickname
|
2021-08-02 20:43:53 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
wfRequest = \
|
|
|
|
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain, projectVersion, debug, groupAccount,
|
|
|
|
signingPrivateKeyPem)
|
2019-07-03 10:31:02 +00:00
|
|
|
if not wfRequest:
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('No webfinger result was returned for ' + handle)
|
2019-07-03 10:31:02 +00:00
|
|
|
sys.exit()
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
sys.exit()
|
2019-07-03 10:31:02 +00:00
|
|
|
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Getting the outbox for ' + handle)
|
2020-04-04 10:05:27 +00:00
|
|
|
(personUrl, pubKeyId, pubKey,
|
|
|
|
personId, shaedInbox,
|
2021-08-31 14:17:11 +00:00
|
|
|
avatarUrl, displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-04-04 10:05:27 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix,
|
2020-12-18 17:49:17 +00:00
|
|
|
nickname, domain, 'outbox',
|
|
|
|
62524)
|
2021-08-01 13:44:27 +00:00
|
|
|
if debug:
|
|
|
|
print('Actor url: ' + personId)
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
maxMentions = 10
|
|
|
|
maxEmoji = 10
|
|
|
|
maxAttachments = 5
|
2020-12-22 18:06:23 +00:00
|
|
|
_getPosts(session, personUrl, 30, maxMentions, maxEmoji,
|
|
|
|
maxAttachments, federationList,
|
|
|
|
personCache, raw, simple, debug,
|
2021-09-10 22:08:04 +00:00
|
|
|
projectVersion, httpPrefix, originDomain, systemLanguage,
|
2021-08-31 14:17:11 +00:00
|
|
|
signingPrivateKeyPem)
|
2020-07-08 10:09:51 +00:00
|
|
|
|
|
|
|
|
2020-09-25 10:05:23 +00:00
|
|
|
def getPublicPostDomains(session, baseDir: str, nickname: str, domain: str,
|
2020-07-08 12:28:41 +00:00
|
|
|
proxyType: str, port: int, httpPrefix: str,
|
2020-07-08 10:30:29 +00:00
|
|
|
debug: bool, projectVersion: str,
|
2021-07-18 14:15:16 +00:00
|
|
|
wordFrequency: {}, domainList: [],
|
2021-08-31 14:17:11 +00:00
|
|
|
systemLanguage: str,
|
|
|
|
signingPrivateKeyPem: str) -> []:
|
2020-07-08 10:09:51 +00:00
|
|
|
""" Returns a list of domains referenced within public posts
|
|
|
|
"""
|
2020-09-25 10:05:23 +00:00
|
|
|
if not session:
|
|
|
|
session = createSession(proxyType)
|
2020-07-08 10:09:51 +00:00
|
|
|
if not session:
|
2020-07-08 10:30:29 +00:00
|
|
|
return domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
personCache = {}
|
|
|
|
cachedWebfingers = {}
|
|
|
|
federationList = []
|
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2020-07-08 10:09:51 +00:00
|
|
|
handle = httpPrefix + "://" + domainFull + "/@" + nickname
|
|
|
|
wfRequest = \
|
|
|
|
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2020-07-08 10:09:51 +00:00
|
|
|
if not wfRequest:
|
2020-07-08 10:30:29 +00:00
|
|
|
return domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
2020-07-08 10:30:29 +00:00
|
|
|
return domainList
|
2020-07-08 10:09:51 +00:00
|
|
|
|
|
|
|
(personUrl, pubKeyId, pubKey,
|
2020-09-25 13:09:20 +00:00
|
|
|
personId, sharedInbox,
|
2021-08-31 14:17:11 +00:00
|
|
|
avatarUrl, displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-07-08 10:09:51 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix,
|
2020-12-18 17:49:17 +00:00
|
|
|
nickname, domain, 'outbox',
|
|
|
|
92522)
|
2020-07-08 10:09:51 +00:00
|
|
|
maxMentions = 99
|
|
|
|
maxEmoji = 99
|
|
|
|
maxAttachments = 5
|
|
|
|
postDomains = \
|
2020-07-08 10:30:29 +00:00
|
|
|
getPostDomains(session, personUrl, 64, maxMentions, maxEmoji,
|
2020-07-08 10:09:51 +00:00
|
|
|
maxAttachments, federationList,
|
2020-07-08 12:28:41 +00:00
|
|
|
personCache, debug,
|
2021-01-11 13:14:22 +00:00
|
|
|
projectVersion, httpPrefix, domain,
|
2021-08-31 14:17:11 +00:00
|
|
|
wordFrequency, domainList, systemLanguage,
|
|
|
|
signingPrivateKeyPem)
|
2020-07-08 10:09:51 +00:00
|
|
|
postDomains.sort()
|
|
|
|
return postDomains
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
|
2021-08-31 14:17:11 +00:00
|
|
|
def downloadFollowCollection(signingPrivateKeyPem: str,
|
|
|
|
followType: str,
|
2021-06-20 11:28:35 +00:00
|
|
|
session, httpPrefix: str,
|
|
|
|
actor: str, pageNumber: int = 1,
|
|
|
|
noOfPages: int = 1, debug: bool = False) -> []:
|
2021-01-11 10:08:05 +00:00
|
|
|
"""Returns a list of following/followers for the given actor
|
|
|
|
by downloading the json for their following/followers collection
|
2021-01-10 21:57:53 +00:00
|
|
|
"""
|
|
|
|
prof = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
if '/channel/' not in actor or '/accounts/' not in actor:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/activity+json, application/ld+json; ' + \
|
|
|
|
'profile="' + prof + '"'
|
2021-01-10 21:57:53 +00:00
|
|
|
sessionHeaders = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2021-01-10 21:57:53 +00:00
|
|
|
}
|
|
|
|
else:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/ld+json, application/activity+json; ' + \
|
|
|
|
'profile="' + prof + '"'
|
2021-01-10 21:57:53 +00:00
|
|
|
sessionHeaders = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2021-01-10 21:57:53 +00:00
|
|
|
}
|
|
|
|
result = []
|
|
|
|
for pageCtr in range(noOfPages):
|
2021-01-11 10:08:05 +00:00
|
|
|
url = actor + '/' + followType + '?page=' + str(pageNumber + pageCtr)
|
2021-01-10 21:57:53 +00:00
|
|
|
followersJson = \
|
2021-08-31 14:17:11 +00:00
|
|
|
getJson(signingPrivateKeyPem, session, url, sessionHeaders, None,
|
2021-03-14 20:55:37 +00:00
|
|
|
debug, __version__, httpPrefix, None)
|
2021-01-10 21:57:53 +00:00
|
|
|
if followersJson:
|
|
|
|
if followersJson.get('orderedItems'):
|
2021-01-10 23:29:03 +00:00
|
|
|
for followerActor in followersJson['orderedItems']:
|
|
|
|
if followerActor not in result:
|
|
|
|
result.append(followerActor)
|
2021-09-11 12:10:44 +00:00
|
|
|
elif followersJson.get('items'):
|
|
|
|
for followerActor in followersJson['items']:
|
|
|
|
if followerActor not in result:
|
|
|
|
result.append(followerActor)
|
2021-01-10 21:57:53 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2020-12-16 16:43:51 +00:00
|
|
|
def getPublicPostInfo(session, baseDir: str, nickname: str, domain: str,
|
|
|
|
proxyType: str, port: int, httpPrefix: str,
|
2021-01-11 13:14:22 +00:00
|
|
|
debug: bool, projectVersion: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
wordFrequency: {}, systemLanguage: str,
|
|
|
|
signingPrivateKeyPem: str) -> []:
|
2020-12-16 16:43:51 +00:00
|
|
|
""" Returns a dict of domains referenced within public posts
|
|
|
|
"""
|
|
|
|
if not session:
|
|
|
|
session = createSession(proxyType)
|
|
|
|
if not session:
|
|
|
|
return {}
|
|
|
|
personCache = {}
|
|
|
|
cachedWebfingers = {}
|
|
|
|
federationList = []
|
|
|
|
|
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
handle = httpPrefix + "://" + domainFull + "/@" + nickname
|
|
|
|
wfRequest = \
|
|
|
|
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2020-12-16 16:43:51 +00:00
|
|
|
if not wfRequest:
|
|
|
|
return {}
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('Webfinger for ' + handle + ' did not return a dict. ' +
|
|
|
|
str(wfRequest))
|
|
|
|
return {}
|
|
|
|
|
|
|
|
(personUrl, pubKeyId, pubKey,
|
|
|
|
personId, sharedInbox,
|
2021-08-31 14:17:11 +00:00
|
|
|
avatarUrl, displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-12-16 16:43:51 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix,
|
2020-12-18 17:49:17 +00:00
|
|
|
nickname, domain, 'outbox',
|
|
|
|
13863)
|
2020-12-16 16:43:51 +00:00
|
|
|
maxMentions = 99
|
|
|
|
maxEmoji = 99
|
|
|
|
maxAttachments = 5
|
|
|
|
maxPosts = 64
|
|
|
|
postDomains = \
|
|
|
|
getPostDomains(session, personUrl, maxPosts, maxMentions, maxEmoji,
|
|
|
|
maxAttachments, federationList,
|
|
|
|
personCache, debug,
|
2021-01-11 13:14:22 +00:00
|
|
|
projectVersion, httpPrefix, domain,
|
2021-08-31 14:17:11 +00:00
|
|
|
wordFrequency, [], systemLanguage, signingPrivateKeyPem)
|
2020-12-16 16:43:51 +00:00
|
|
|
postDomains.sort()
|
|
|
|
domainsInfo = {}
|
|
|
|
for d in postDomains:
|
|
|
|
if not domainsInfo.get(d):
|
|
|
|
domainsInfo[d] = []
|
|
|
|
|
|
|
|
blockedPosts = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_getPostsForBlockedDomains(baseDir, session, personUrl, maxPosts,
|
|
|
|
maxMentions,
|
|
|
|
maxEmoji, maxAttachments,
|
|
|
|
federationList,
|
|
|
|
personCache,
|
|
|
|
debug,
|
|
|
|
projectVersion, httpPrefix,
|
2021-08-31 14:17:11 +00:00
|
|
|
domain, signingPrivateKeyPem)
|
2020-12-16 17:09:08 +00:00
|
|
|
for blockedDomain, postUrlList in blockedPosts.items():
|
|
|
|
domainsInfo[blockedDomain] += postUrlList
|
2020-12-16 16:43:51 +00:00
|
|
|
|
|
|
|
return domainsInfo
|
|
|
|
|
|
|
|
|
2020-09-25 10:05:23 +00:00
|
|
|
def getPublicPostDomainsBlocked(session, baseDir: str,
|
|
|
|
nickname: str, domain: str,
|
2020-09-25 09:52:13 +00:00
|
|
|
proxyType: str, port: int, httpPrefix: str,
|
|
|
|
debug: bool, projectVersion: str,
|
2021-07-18 14:15:16 +00:00
|
|
|
wordFrequency: {}, domainList: [],
|
2021-08-31 14:17:11 +00:00
|
|
|
systemLanguage: str,
|
|
|
|
signingPrivateKeyPem: str) -> []:
|
2020-09-25 09:52:13 +00:00
|
|
|
""" Returns a list of domains referenced within public posts which
|
|
|
|
are globally blocked on this instance
|
|
|
|
"""
|
|
|
|
postDomains = \
|
2020-09-25 10:05:23 +00:00
|
|
|
getPublicPostDomains(session, baseDir, nickname, domain,
|
2020-09-25 09:52:13 +00:00
|
|
|
proxyType, port, httpPrefix,
|
|
|
|
debug, projectVersion,
|
2021-08-31 14:17:11 +00:00
|
|
|
wordFrequency, domainList, systemLanguage,
|
|
|
|
signingPrivateKeyPem)
|
2020-09-25 09:52:13 +00:00
|
|
|
if not postDomains:
|
|
|
|
return []
|
|
|
|
|
|
|
|
blockingFilename = baseDir + '/accounts/blocking.txt'
|
|
|
|
if not os.path.isfile(blockingFilename):
|
|
|
|
return []
|
|
|
|
|
|
|
|
# read the blocked domains as a single string
|
2021-06-21 22:52:04 +00:00
|
|
|
blockedStr = ''
|
|
|
|
with open(blockingFilename, 'r') as fp:
|
|
|
|
blockedStr = fp.read()
|
2020-09-25 09:52:13 +00:00
|
|
|
|
|
|
|
blockedDomains = []
|
|
|
|
for domainName in postDomains:
|
|
|
|
if '@' not in domainName:
|
|
|
|
continue
|
|
|
|
# get the domain after the @
|
|
|
|
domainName = domainName.split('@')[1].strip()
|
2020-09-25 10:12:36 +00:00
|
|
|
if isEvil(domainName):
|
|
|
|
blockedDomains.append(domainName)
|
|
|
|
continue
|
2020-09-25 09:52:13 +00:00
|
|
|
if domainName in blockedStr:
|
|
|
|
blockedDomains.append(domainName)
|
|
|
|
|
|
|
|
return blockedDomains
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _getNonMutualsOfPerson(baseDir: str,
|
|
|
|
nickname: str, domain: str) -> []:
|
2020-09-25 13:21:56 +00:00
|
|
|
"""Returns the followers who are not mutuals of a person
|
|
|
|
i.e. accounts which follow you but you don't follow them
|
|
|
|
"""
|
|
|
|
followers = \
|
2020-09-25 14:14:59 +00:00
|
|
|
getFollowersList(baseDir, nickname, domain, 'followers.txt')
|
2020-09-25 13:21:56 +00:00
|
|
|
following = \
|
2020-09-25 14:14:59 +00:00
|
|
|
getFollowersList(baseDir, nickname, domain, 'following.txt')
|
2020-09-25 13:21:56 +00:00
|
|
|
nonMutuals = []
|
2020-09-25 14:33:20 +00:00
|
|
|
for handle in followers:
|
|
|
|
if handle not in following:
|
2020-09-25 13:21:56 +00:00
|
|
|
nonMutuals.append(handle)
|
|
|
|
return nonMutuals
|
|
|
|
|
|
|
|
|
|
|
|
def checkDomains(session, baseDir: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
proxyType: str, port: int, httpPrefix: str,
|
|
|
|
debug: bool, projectVersion: str,
|
2021-07-18 14:15:16 +00:00
|
|
|
maxBlockedDomains: int, singleCheck: bool,
|
2021-08-31 14:17:11 +00:00
|
|
|
systemLanguage: str,
|
|
|
|
signingPrivateKeyPem: str) -> None:
|
2020-09-25 13:21:56 +00:00
|
|
|
"""Checks follower accounts for references to globally blocked domains
|
|
|
|
"""
|
2021-01-11 13:14:22 +00:00
|
|
|
wordFrequency = {}
|
2020-12-22 18:06:23 +00:00
|
|
|
nonMutuals = _getNonMutualsOfPerson(baseDir, nickname, domain)
|
2020-09-25 13:21:56 +00:00
|
|
|
if not nonMutuals:
|
2020-09-25 13:33:44 +00:00
|
|
|
print('No non-mutual followers were found')
|
2020-09-25 13:21:56 +00:00
|
|
|
return
|
|
|
|
followerWarningFilename = baseDir + '/accounts/followerWarnings.txt'
|
|
|
|
updateFollowerWarnings = False
|
|
|
|
followerWarningStr = ''
|
|
|
|
if os.path.isfile(followerWarningFilename):
|
2021-06-21 22:52:04 +00:00
|
|
|
with open(followerWarningFilename, 'r') as fp:
|
|
|
|
followerWarningStr = fp.read()
|
2020-09-25 13:21:56 +00:00
|
|
|
|
|
|
|
if singleCheck:
|
|
|
|
# checks a single random non-mutual
|
|
|
|
index = random.randrange(0, len(nonMutuals))
|
2020-09-25 14:23:33 +00:00
|
|
|
handle = nonMutuals[index]
|
|
|
|
if '@' in handle:
|
|
|
|
nonMutualNickname = handle.split('@')[0]
|
|
|
|
nonMutualDomain = handle.split('@')[1].strip()
|
|
|
|
blockedDomains = \
|
|
|
|
getPublicPostDomainsBlocked(session, baseDir,
|
|
|
|
nonMutualNickname,
|
|
|
|
nonMutualDomain,
|
|
|
|
proxyType, port, httpPrefix,
|
2021-01-11 13:14:22 +00:00
|
|
|
debug, projectVersion,
|
2021-07-18 14:15:16 +00:00
|
|
|
wordFrequency, [],
|
2021-08-31 14:17:11 +00:00
|
|
|
systemLanguage,
|
|
|
|
signingPrivateKeyPem)
|
2020-09-25 14:23:33 +00:00
|
|
|
if blockedDomains:
|
|
|
|
if len(blockedDomains) > maxBlockedDomains:
|
|
|
|
followerWarningStr += handle + '\n'
|
|
|
|
updateFollowerWarnings = True
|
2020-09-25 13:21:56 +00:00
|
|
|
else:
|
|
|
|
# checks all non-mutuals
|
2020-09-25 14:23:33 +00:00
|
|
|
for handle in nonMutuals:
|
|
|
|
if '@' not in handle:
|
|
|
|
continue
|
|
|
|
if handle in followerWarningStr:
|
2020-09-25 13:21:56 +00:00
|
|
|
continue
|
2020-09-25 14:23:33 +00:00
|
|
|
nonMutualNickname = handle.split('@')[0]
|
|
|
|
nonMutualDomain = handle.split('@')[1].strip()
|
2020-09-25 13:21:56 +00:00
|
|
|
blockedDomains = \
|
|
|
|
getPublicPostDomainsBlocked(session, baseDir,
|
2020-09-25 14:23:33 +00:00
|
|
|
nonMutualNickname,
|
|
|
|
nonMutualDomain,
|
2020-09-25 13:21:56 +00:00
|
|
|
proxyType, port, httpPrefix,
|
2021-01-11 13:14:22 +00:00
|
|
|
debug, projectVersion,
|
2021-07-18 14:15:16 +00:00
|
|
|
wordFrequency, [],
|
2021-08-31 14:17:11 +00:00
|
|
|
systemLanguage,
|
|
|
|
signingPrivateKeyPem)
|
2020-09-25 13:21:56 +00:00
|
|
|
if blockedDomains:
|
2020-09-25 14:23:33 +00:00
|
|
|
print(handle)
|
2020-09-25 13:21:56 +00:00
|
|
|
for d in blockedDomains:
|
|
|
|
print(' ' + d)
|
|
|
|
if len(blockedDomains) > maxBlockedDomains:
|
2020-09-25 14:23:33 +00:00
|
|
|
followerWarningStr += handle + '\n'
|
2020-09-25 13:21:56 +00:00
|
|
|
updateFollowerWarnings = True
|
|
|
|
|
|
|
|
if updateFollowerWarnings and followerWarningStr:
|
2021-06-21 22:53:04 +00:00
|
|
|
with open(followerWarningFilename, 'w+') as fp:
|
|
|
|
fp.write(followerWarningStr)
|
2020-09-25 13:21:56 +00:00
|
|
|
if not singleCheck:
|
|
|
|
print(followerWarningStr)
|
|
|
|
|
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
def populateRepliesJson(baseDir: str, nickname: str, domain: str,
|
|
|
|
postRepliesFilename: str, authorized: bool,
|
2019-09-28 11:29:42 +00:00
|
|
|
repliesJson: {}) -> None:
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr = 'https://www.w3.org/ns/activitystreams#Public'
|
2019-08-02 18:37:23 +00:00
|
|
|
# populate the items list with replies
|
2020-04-04 10:05:27 +00:00
|
|
|
repliesBoxes = ('outbox', 'inbox')
|
|
|
|
with open(postRepliesFilename, 'r') as repliesFile:
|
2019-08-02 18:37:23 +00:00
|
|
|
for messageId in repliesFile:
|
2020-04-04 10:05:27 +00:00
|
|
|
replyFound = False
|
2019-08-02 18:37:23 +00:00
|
|
|
# examine inbox and outbox
|
|
|
|
for boxname in repliesBoxes:
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2 = messageId.replace('\n', '').replace('\r', '')
|
2020-04-04 10:05:27 +00:00
|
|
|
searchFilename = \
|
2021-07-13 21:59:53 +00:00
|
|
|
acctDir(baseDir, nickname, domain) + '/' + \
|
2021-06-22 12:42:52 +00:00
|
|
|
boxname + '/' + \
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2.replace('/', '#') + '.json'
|
2019-08-02 18:37:23 +00:00
|
|
|
if os.path.isfile(searchFilename):
|
|
|
|
if authorized or \
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr in open(searchFilename).read():
|
|
|
|
postJsonObject = loadJson(searchFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
if postJsonObject:
|
2020-03-22 21:16:02 +00:00
|
|
|
if postJsonObject['object'].get('cc'):
|
2020-04-04 10:05:27 +00:00
|
|
|
pjo = postJsonObject
|
|
|
|
if (authorized or
|
|
|
|
(pubStr in pjo['object']['to'] or
|
|
|
|
pubStr in pjo['object']['cc'])):
|
|
|
|
repliesJson['orderedItems'].append(pjo)
|
|
|
|
replyFound = True
|
2019-08-02 18:37:23 +00:00
|
|
|
else:
|
|
|
|
if authorized or \
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr in postJsonObject['object']['to']:
|
|
|
|
pjo = postJsonObject
|
|
|
|
repliesJson['orderedItems'].append(pjo)
|
|
|
|
replyFound = True
|
2019-08-02 18:37:23 +00:00
|
|
|
break
|
|
|
|
# if not in either inbox or outbox then examine the shared inbox
|
|
|
|
if not replyFound:
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2 = messageId.replace('\n', '').replace('\r', '')
|
2020-04-04 10:05:27 +00:00
|
|
|
searchFilename = \
|
|
|
|
baseDir + \
|
|
|
|
'/accounts/inbox@' + \
|
2021-06-22 12:42:52 +00:00
|
|
|
domain + '/inbox/' + \
|
2020-05-22 11:32:38 +00:00
|
|
|
messageId2.replace('/', '#') + '.json'
|
2019-08-02 18:37:23 +00:00
|
|
|
if os.path.isfile(searchFilename):
|
|
|
|
if authorized or \
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr in open(searchFilename).read():
|
|
|
|
# get the json of the reply and append it to
|
|
|
|
# the collection
|
|
|
|
postJsonObject = loadJson(searchFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
if postJsonObject:
|
2020-03-22 21:16:02 +00:00
|
|
|
if postJsonObject['object'].get('cc'):
|
2020-04-04 10:05:27 +00:00
|
|
|
pjo = postJsonObject
|
|
|
|
if (authorized or
|
|
|
|
(pubStr in pjo['object']['to'] or
|
|
|
|
pubStr in pjo['object']['cc'])):
|
|
|
|
pjo = postJsonObject
|
|
|
|
repliesJson['orderedItems'].append(pjo)
|
2019-08-02 18:37:23 +00:00
|
|
|
else:
|
|
|
|
if authorized or \
|
2020-04-04 10:05:27 +00:00
|
|
|
pubStr in postJsonObject['object']['to']:
|
|
|
|
pjo = postJsonObject
|
|
|
|
repliesJson['orderedItems'].append(pjo)
|
|
|
|
|
2019-09-28 16:10:45 +00:00
|
|
|
|
2021-03-05 18:03:15 +00:00
|
|
|
def _rejectAnnounce(announceFilename: str,
|
|
|
|
baseDir: str, nickname: str, domain: str,
|
2021-03-05 19:23:33 +00:00
|
|
|
announcePostId: str, recentPostsCache: {}):
|
2019-09-28 16:58:21 +00:00
|
|
|
"""Marks an announce as rejected
|
|
|
|
"""
|
2021-03-05 19:23:33 +00:00
|
|
|
rejectPostId(baseDir, nickname, domain, announcePostId, recentPostsCache)
|
2021-03-05 18:03:15 +00:00
|
|
|
|
|
|
|
# reject the post referenced by the announce activity object
|
2020-04-04 10:05:27 +00:00
|
|
|
if not os.path.isfile(announceFilename + '.reject'):
|
2021-06-22 12:27:10 +00:00
|
|
|
with open(announceFilename + '.reject', 'w+') as rejectAnnounceFile:
|
2021-06-21 22:53:04 +00:00
|
|
|
rejectAnnounceFile.write('\n')
|
2019-09-28 16:58:21 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
|
|
|
def downloadAnnounce(session, baseDir: str, httpPrefix: str,
|
|
|
|
nickname: str, domain: str,
|
2020-06-12 11:50:49 +00:00
|
|
|
postJsonObject: {}, projectVersion: str,
|
2021-01-30 11:47:09 +00:00
|
|
|
translate: {}, YTReplacementDomain: str,
|
2021-03-05 19:23:33 +00:00
|
|
|
allowLocalNetworkAccess: bool,
|
2021-07-18 14:15:16 +00:00
|
|
|
recentPostsCache: {}, debug: bool,
|
2021-07-19 08:46:21 +00:00
|
|
|
systemLanguage: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
domainFull: str, personCache: {},
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2019-09-28 16:10:45 +00:00
|
|
|
"""Download the post referenced by an announce
|
|
|
|
"""
|
2021-06-22 20:30:27 +00:00
|
|
|
if not postJsonObject.get('object'):
|
|
|
|
return None
|
|
|
|
if not isinstance(postJsonObject['object'], str):
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-06-03 12:17:24 +00:00
|
|
|
# ignore self-boosts
|
|
|
|
if postJsonObject['actor'] in postJsonObject['object']:
|
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
|
|
|
|
# get the announced post
|
2020-04-04 10:05:27 +00:00
|
|
|
announceCacheDir = baseDir + '/cache/announce/' + nickname
|
2019-09-28 16:10:45 +00:00
|
|
|
if not os.path.isdir(announceCacheDir):
|
|
|
|
os.mkdir(announceCacheDir)
|
2021-03-05 18:03:15 +00:00
|
|
|
|
|
|
|
postId = None
|
|
|
|
if postJsonObject.get('id'):
|
|
|
|
postId = postJsonObject['id']
|
2020-04-04 10:05:27 +00:00
|
|
|
announceFilename = \
|
|
|
|
announceCacheDir + '/' + \
|
|
|
|
postJsonObject['object'].replace('/', '#') + '.json'
|
2019-09-28 16:10:45 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
if os.path.isfile(announceFilename + '.reject'):
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
|
|
|
|
if os.path.isfile(announceFilename):
|
2021-03-14 19:32:11 +00:00
|
|
|
if debug:
|
|
|
|
print('Reading cached Announce content for ' +
|
|
|
|
postJsonObject['object'])
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject = loadJson(announceFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
if postJsonObject:
|
2021-03-03 17:09:31 +00:00
|
|
|
return postJsonObject
|
2019-09-28 16:10:45 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/activity+json, application/ld+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-31 11:07:58 +00:00
|
|
|
}
|
2020-08-13 16:19:35 +00:00
|
|
|
if '/channel/' in postJsonObject['actor'] or \
|
|
|
|
'/accounts/' in postJsonObject['actor']:
|
2021-09-10 21:47:26 +00:00
|
|
|
acceptStr = \
|
|
|
|
'application/ld+json, application/activity+json; ' + \
|
|
|
|
'profile="' + profileStr + '"'
|
2020-04-04 10:05:27 +00:00
|
|
|
asHeader = {
|
2021-09-10 21:47:26 +00:00
|
|
|
'Accept': acceptStr
|
2020-03-31 11:07:58 +00:00
|
|
|
}
|
2020-04-04 10:05:27 +00:00
|
|
|
actorNickname = getNicknameFromActor(postJsonObject['actor'])
|
|
|
|
actorDomain, actorPort = getDomainFromActor(postJsonObject['actor'])
|
2020-03-01 10:06:55 +00:00
|
|
|
if not actorDomain:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce actor does not contain a ' +
|
|
|
|
'valid domain or port number: ' +
|
2020-03-01 10:06:55 +00:00
|
|
|
str(postJsonObject['actor']))
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
if isBlocked(baseDir, nickname, domain, actorNickname, actorDomain):
|
|
|
|
print('Announce download blocked actor: ' +
|
|
|
|
actorNickname + '@' + actorDomain)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
objectNickname = getNicknameFromActor(postJsonObject['object'])
|
|
|
|
objectDomain, objectPort = getDomainFromActor(postJsonObject['object'])
|
2020-03-01 10:06:55 +00:00
|
|
|
if not objectDomain:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce object does not contain a ' +
|
|
|
|
'valid domain or port number: ' +
|
2020-03-01 10:06:55 +00:00
|
|
|
str(postJsonObject['object']))
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
if isBlocked(baseDir, nickname, domain, objectNickname, objectDomain):
|
2020-02-19 18:55:29 +00:00
|
|
|
if objectNickname and objectDomain:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce download blocked object: ' +
|
|
|
|
objectNickname + '@' + objectDomain)
|
2020-02-19 18:55:29 +00:00
|
|
|
else:
|
2020-04-04 10:05:27 +00:00
|
|
|
print('Announce download blocked object: ' +
|
2020-03-31 11:07:58 +00:00
|
|
|
str(postJsonObject['object']))
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-03-14 19:46:46 +00:00
|
|
|
if debug:
|
|
|
|
print('Downloading Announce content for ' +
|
|
|
|
postJsonObject['object'])
|
2020-04-04 10:05:27 +00:00
|
|
|
announcedJson = \
|
2021-08-31 14:17:11 +00:00
|
|
|
getJson(signingPrivateKeyPem, session, postJsonObject['object'],
|
|
|
|
asHeader, None, debug, projectVersion, httpPrefix, domain)
|
2020-01-19 20:19:56 +00:00
|
|
|
|
2019-09-28 16:10:45 +00:00
|
|
|
if not announcedJson:
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-12-04 09:44:41 +00:00
|
|
|
|
|
|
|
if not isinstance(announcedJson, dict):
|
2020-04-04 10:05:27 +00:00
|
|
|
print('WARN: announce json is not a dict - ' +
|
2020-03-31 11:07:58 +00:00
|
|
|
postJsonObject['object'])
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
if not announcedJson.get('id'):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
if '/statuses/' not in announcedJson['id']:
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:37:58 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-01-30 12:48:08 +00:00
|
|
|
if not hasUsersPath(announcedJson['id']):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
if not announcedJson.get('type'):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
if announcedJson['type'] != 'Note' and \
|
|
|
|
announcedJson['type'] != 'Article':
|
2021-01-30 11:47:09 +00:00
|
|
|
# You can only announce Note or Article types
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-02-05 14:57:10 +00:00
|
|
|
if not announcedJson.get('content'):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-12-21 20:38:31 +00:00
|
|
|
if not announcedJson.get('published'):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-03-14 19:53:22 +00:00
|
|
|
if not validPostDate(announcedJson['published'], 90, debug):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-07-18 19:35:34 +00:00
|
|
|
if not understoodPostLanguage(baseDir, nickname, domain,
|
2021-07-19 08:46:21 +00:00
|
|
|
announcedJson, systemLanguage,
|
|
|
|
httpPrefix, domainFull,
|
|
|
|
personCache):
|
2021-07-18 19:35:34 +00:00
|
|
|
return None
|
2021-01-30 11:47:09 +00:00
|
|
|
# Check the content of the announce
|
|
|
|
contentStr = announcedJson['content']
|
|
|
|
if dangerousMarkup(contentStr, allowLocalNetworkAccess):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-01-30 11:59:26 +00:00
|
|
|
|
2021-01-30 11:47:09 +00:00
|
|
|
if isFiltered(baseDir, nickname, domain, contentStr):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2021-01-30 11:59:26 +00:00
|
|
|
|
2020-05-17 09:44:42 +00:00
|
|
|
# remove any long words
|
2021-01-30 11:59:26 +00:00
|
|
|
contentStr = removeLongWords(contentStr, 40, [])
|
2020-01-19 20:19:56 +00:00
|
|
|
|
2021-07-10 09:38:59 +00:00
|
|
|
# Prevent the same word from being repeated many times
|
|
|
|
contentStr = limitRepeatedWords(contentStr, 6)
|
|
|
|
|
2020-06-14 13:25:38 +00:00
|
|
|
# remove text formatting, such as bold/italics
|
2021-01-30 11:59:26 +00:00
|
|
|
contentStr = removeTextFormatting(contentStr)
|
|
|
|
|
|
|
|
# set the content after santitization
|
|
|
|
announcedJson['content'] = contentStr
|
2020-06-14 13:25:38 +00:00
|
|
|
|
2019-09-28 16:10:45 +00:00
|
|
|
# wrap in create to be consistent with other posts
|
2020-04-04 10:05:27 +00:00
|
|
|
announcedJson = \
|
|
|
|
outboxMessageCreateWrap(httpPrefix,
|
|
|
|
actorNickname, actorDomain, actorPort,
|
2019-09-28 16:10:45 +00:00
|
|
|
announcedJson)
|
2020-04-04 10:05:27 +00:00
|
|
|
if announcedJson['type'] != 'Create':
|
2021-01-30 11:47:09 +00:00
|
|
|
# Create wrap failed
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2019-09-28 16:10:45 +00:00
|
|
|
|
2020-06-16 20:29:17 +00:00
|
|
|
# labelAccusatoryPost(postJsonObject, translate)
|
2019-09-28 16:10:45 +00:00
|
|
|
# set the id to the original status
|
2020-04-04 10:05:27 +00:00
|
|
|
announcedJson['id'] = postJsonObject['object']
|
|
|
|
announcedJson['object']['id'] = postJsonObject['object']
|
2019-09-28 16:10:45 +00:00
|
|
|
# check that the repeat isn't for a blocked account
|
2020-04-04 10:05:27 +00:00
|
|
|
attributedNickname = \
|
2019-12-12 09:58:06 +00:00
|
|
|
getNicknameFromActor(announcedJson['object']['id'])
|
2020-04-04 10:05:27 +00:00
|
|
|
attributedDomain, attributedPort = \
|
2019-12-12 09:58:06 +00:00
|
|
|
getDomainFromActor(announcedJson['object']['id'])
|
2019-09-28 16:10:45 +00:00
|
|
|
if attributedNickname and attributedDomain:
|
2020-12-16 11:04:46 +00:00
|
|
|
attributedDomain = getFullDomain(attributedDomain, attributedPort)
|
2020-04-04 10:05:27 +00:00
|
|
|
if isBlocked(baseDir, nickname, domain,
|
|
|
|
attributedNickname, attributedDomain):
|
2021-03-05 18:03:15 +00:00
|
|
|
_rejectAnnounce(announceFilename,
|
2021-03-05 19:23:33 +00:00
|
|
|
baseDir, nickname, domain, postId,
|
|
|
|
recentPostsCache)
|
2021-03-03 17:09:31 +00:00
|
|
|
return None
|
2020-04-04 10:05:27 +00:00
|
|
|
postJsonObject = announcedJson
|
2021-07-18 14:15:16 +00:00
|
|
|
replaceYouTube(postJsonObject, YTReplacementDomain, systemLanguage)
|
2020-04-04 10:05:27 +00:00
|
|
|
if saveJson(postJsonObject, announceFilename):
|
2021-03-03 17:09:31 +00:00
|
|
|
return postJsonObject
|
|
|
|
return None
|
2019-12-01 13:45:30 +00:00
|
|
|
|
2020-04-04 10:05:27 +00:00
|
|
|
|
2021-08-12 10:22:04 +00:00
|
|
|
def isMuted(baseDir: str, nickname: str, domain: str, postId: str,
|
|
|
|
conversationId: str) -> bool:
|
2020-08-27 17:40:09 +00:00
|
|
|
"""Returns true if the given post is muted
|
|
|
|
"""
|
2021-08-12 10:22:04 +00:00
|
|
|
if conversationId:
|
|
|
|
convMutedFilename = \
|
|
|
|
acctDir(baseDir, nickname, domain) + '/conversation/' + \
|
|
|
|
conversationId.replace('/', '#') + '.muted'
|
|
|
|
if os.path.isfile(convMutedFilename):
|
|
|
|
return True
|
2020-08-27 17:40:09 +00:00
|
|
|
postFilename = locatePost(baseDir, nickname, domain, postId)
|
|
|
|
if not postFilename:
|
|
|
|
return False
|
|
|
|
if os.path.isfile(postFilename + '.muted'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-04-01 20:13:42 +00:00
|
|
|
def sendBlockViaServer(baseDir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
httpPrefix: str, blockedUrl: str,
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2020-04-01 20:13:42 +00:00
|
|
|
"""Creates a block via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendBlockViaServer')
|
|
|
|
return 6
|
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
blockActor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2020-04-01 20:13:42 +00:00
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
ccUrl = blockActor + '/followers'
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
newBlockJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Block',
|
|
|
|
'actor': blockActor,
|
|
|
|
'object': blockedUrl,
|
|
|
|
'to': [toUrl],
|
|
|
|
'cc': [ccUrl]
|
|
|
|
}
|
|
|
|
|
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix,
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: block webfinger failed for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: block Webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
return 1
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
|
|
|
(inboxUrl, pubKeyId, pubKey,
|
2020-09-27 19:27:24 +00:00
|
|
|
fromPersonId, sharedInbox, avatarUrl,
|
2021-08-31 14:17:11 +00:00
|
|
|
displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2020-04-01 20:13:42 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix, fromNickname,
|
2020-12-18 17:49:17 +00:00
|
|
|
fromDomain, postToBox, 72652)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: block no ' + postToBox + ' was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: block no actor was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 4
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-06-20 13:39:53 +00:00
|
|
|
postResult = postJson(httpPrefix, fromDomainFull,
|
|
|
|
session, newBlockJson, [], inboxUrl,
|
2021-03-10 19:24:52 +00:00
|
|
|
headers, 30, True)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not postResult:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: block unable to post')
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST block success')
|
|
|
|
|
|
|
|
return newBlockJson
|
|
|
|
|
|
|
|
|
2021-03-20 21:20:41 +00:00
|
|
|
def sendMuteViaServer(baseDir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
httpPrefix: str, mutedUrl: str,
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2021-03-20 21:20:41 +00:00
|
|
|
"""Creates a mute via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendMuteViaServer')
|
|
|
|
return 6
|
|
|
|
|
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
actor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2021-08-22 18:38:02 +00:00
|
|
|
handle = replaceUsersWithAt(actor)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
newMuteJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Ignore',
|
|
|
|
'actor': actor,
|
2021-03-21 12:44:58 +00:00
|
|
|
'to': [actor],
|
2021-03-20 21:20:41 +00:00
|
|
|
'object': mutedUrl
|
|
|
|
}
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix,
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2021-03-20 21:20:41 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mute webfinger failed for ' + handle)
|
|
|
|
return 1
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: mute Webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
|
|
|
(inboxUrl, pubKeyId, pubKey,
|
|
|
|
fromPersonId, sharedInbox, avatarUrl,
|
2021-08-31 14:17:11 +00:00
|
|
|
displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2021-03-20 21:20:41 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix, fromNickname,
|
|
|
|
fromDomain, postToBox, 72652)
|
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mute no ' + postToBox + ' was found for ' + handle)
|
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: mute no actor was found for ' + handle)
|
|
|
|
return 4
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-06-20 13:39:53 +00:00
|
|
|
postResult = postJson(httpPrefix, fromDomainFull,
|
|
|
|
session, newMuteJson, [], inboxUrl,
|
2021-03-21 13:17:59 +00:00
|
|
|
headers, 3, True)
|
|
|
|
if postResult is None:
|
2021-03-20 21:20:41 +00:00
|
|
|
print('WARN: mute unable to post')
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST mute success')
|
|
|
|
|
|
|
|
return newMuteJson
|
|
|
|
|
|
|
|
|
|
|
|
def sendUndoMuteViaServer(baseDir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
httpPrefix: str, mutedUrl: str,
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2021-03-20 21:20:41 +00:00
|
|
|
"""Undoes a mute via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendUndoMuteViaServer')
|
|
|
|
return 6
|
|
|
|
|
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
actor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2021-08-22 18:38:02 +00:00
|
|
|
handle = replaceUsersWithAt(actor)
|
2021-03-20 21:20:41 +00:00
|
|
|
|
|
|
|
undoMuteJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Undo',
|
|
|
|
'actor': actor,
|
2021-03-21 12:44:58 +00:00
|
|
|
'to': [actor],
|
2021-03-20 21:20:41 +00:00
|
|
|
'object': {
|
|
|
|
'type': 'Ignore',
|
|
|
|
'actor': actor,
|
2021-03-21 12:44:58 +00:00
|
|
|
'to': [actor],
|
2021-03-20 21:20:41 +00:00
|
|
|
'object': mutedUrl
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix,
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2021-03-20 21:20:41 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo mute webfinger failed for ' + handle)
|
|
|
|
return 1
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
print('WARN: undo mute Webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
|
|
|
return 1
|
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
|
|
|
(inboxUrl, pubKeyId, pubKey,
|
|
|
|
fromPersonId, sharedInbox, avatarUrl,
|
2021-08-31 14:17:11 +00:00
|
|
|
displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest,
|
2021-03-20 21:20:41 +00:00
|
|
|
personCache,
|
|
|
|
projectVersion, httpPrefix, fromNickname,
|
|
|
|
fromDomain, postToBox, 72652)
|
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo mute no ' + postToBox +
|
|
|
|
' was found for ' + handle)
|
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo mute no actor was found for ' + handle)
|
|
|
|
return 4
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-06-20 13:39:53 +00:00
|
|
|
postResult = postJson(httpPrefix, fromDomainFull,
|
|
|
|
session, undoMuteJson, [], inboxUrl,
|
2021-03-21 13:17:59 +00:00
|
|
|
headers, 3, True)
|
|
|
|
if postResult is None:
|
2021-03-20 21:20:41 +00:00
|
|
|
print('WARN: undo mute unable to post')
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: c2s POST undo mute success')
|
|
|
|
|
|
|
|
return undoMuteJson
|
|
|
|
|
|
|
|
|
2020-04-01 20:13:42 +00:00
|
|
|
def sendUndoBlockViaServer(baseDir: str, session,
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
httpPrefix: str, blockedUrl: str,
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2020-04-01 20:13:42 +00:00
|
|
|
"""Creates a block via c2s
|
|
|
|
"""
|
|
|
|
if not session:
|
|
|
|
print('WARN: No session for sendBlockViaServer')
|
|
|
|
return 6
|
|
|
|
|
2020-12-16 11:04:46 +00:00
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
blockActor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2020-04-01 20:13:42 +00:00
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
ccUrl = blockActor + '/followers'
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
newBlockJson = {
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
'type': 'Undo',
|
|
|
|
'actor': blockActor,
|
|
|
|
'object': {
|
|
|
|
'type': 'Block',
|
|
|
|
'actor': blockActor,
|
|
|
|
'object': blockedUrl,
|
|
|
|
'to': [toUrl],
|
|
|
|
'cc': [ccUrl]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
|
|
|
wfRequest = webfingerHandle(session, handle, httpPrefix,
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
signingPrivateKeyPem)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not wfRequest:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: unblock webfinger failed for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: unblock webfinger for ' + handle +
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
return 1
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
|
|
|
(inboxUrl, pubKeyId, pubKey,
|
2020-09-27 19:27:24 +00:00
|
|
|
fromPersonId, sharedInbox, avatarUrl,
|
2021-08-31 14:17:11 +00:00
|
|
|
displayName) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
baseDir, session, wfRequest, personCache,
|
2020-04-01 20:13:42 +00:00
|
|
|
projectVersion, httpPrefix, fromNickname,
|
2020-12-18 17:49:17 +00:00
|
|
|
fromDomain, postToBox, 53892)
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: unblock no ' + postToBox +
|
|
|
|
' was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 3
|
|
|
|
if not fromPersonId:
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: unblock no actor was found for ' + handle)
|
2020-04-01 20:13:42 +00:00
|
|
|
return 4
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
'host': fromDomain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader
|
|
|
|
}
|
2021-06-20 13:39:53 +00:00
|
|
|
postResult = postJson(httpPrefix, fromDomainFull,
|
|
|
|
session, newBlockJson, [], inboxUrl,
|
2021-03-10 19:24:52 +00:00
|
|
|
headers, 30, True)
|
2020-04-01 20:13:42 +00:00
|
|
|
if not postResult:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('WARN: unblock unable to post')
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
print('DEBUG: c2s POST unblock success')
|
2020-04-01 20:13:42 +00:00
|
|
|
|
|
|
|
return newBlockJson
|
2020-11-09 19:41:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def postIsMuted(baseDir: str, nickname: str, domain: str,
|
|
|
|
postJsonObject: {}, messageId: str) -> bool:
|
|
|
|
""" Returns true if the given post is muted
|
|
|
|
"""
|
|
|
|
isMuted = postJsonObject.get('muted')
|
|
|
|
if isMuted is True or isMuted is False:
|
|
|
|
return isMuted
|
2021-07-13 21:59:53 +00:00
|
|
|
postDir = acctDir(baseDir, nickname, domain)
|
2020-11-09 19:41:01 +00:00
|
|
|
muteFilename = \
|
|
|
|
postDir + '/inbox/' + messageId.replace('/', '#') + '.json.muted'
|
|
|
|
if os.path.isfile(muteFilename):
|
|
|
|
return True
|
|
|
|
muteFilename = \
|
|
|
|
postDir + '/outbox/' + messageId.replace('/', '#') + '.json.muted'
|
|
|
|
if os.path.isfile(muteFilename):
|
|
|
|
return True
|
|
|
|
muteFilename = \
|
|
|
|
baseDir + '/accounts/cache/announce/' + nickname + \
|
|
|
|
'/' + messageId.replace('/', '#') + '.json.muted'
|
|
|
|
if os.path.isfile(muteFilename):
|
|
|
|
return True
|
|
|
|
return False
|
2021-03-18 11:03:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
def c2sBoxJson(baseDir: str, session,
|
|
|
|
nickname: str, password: str,
|
|
|
|
domain: str, port: int,
|
|
|
|
httpPrefix: str,
|
|
|
|
boxName: str, pageNumber: int,
|
2021-08-31 14:17:11 +00:00
|
|
|
debug: bool, signingPrivateKeyPem: str) -> {}:
|
2021-03-18 11:03:39 +00:00
|
|
|
"""C2S Authenticated GET of posts for a timeline
|
|
|
|
"""
|
|
|
|
if not session:
|
|
|
|
print('WARN: No session for c2sBoxJson')
|
|
|
|
return None
|
|
|
|
|
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-08-14 11:13:39 +00:00
|
|
|
actor = localActorUrl(httpPrefix, nickname, domainFull)
|
2021-03-18 11:03:39 +00:00
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(nickname, password)
|
|
|
|
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
headers = {
|
|
|
|
'host': domain,
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
'Authorization': authHeader,
|
|
|
|
'Accept': 'application/ld+json; profile="' + profileStr + '"'
|
|
|
|
}
|
|
|
|
|
|
|
|
# GET json
|
|
|
|
url = actor + '/' + boxName + '?page=' + str(pageNumber)
|
2021-08-31 14:17:11 +00:00
|
|
|
boxJson = getJson(signingPrivateKeyPem, session, url, headers, None,
|
2021-03-18 11:03:39 +00:00
|
|
|
debug, __version__, httpPrefix, None)
|
|
|
|
|
|
|
|
if boxJson is not None and debug:
|
|
|
|
print('DEBUG: GET c2sBoxJson success')
|
|
|
|
|
|
|
|
return boxJson
|