2020-04-03 16:27:34 +00:00
|
|
|
__filename__ = "inbox.py"
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
__license__ = "AGPL3+"
|
2021-01-26 10:07:42 +00:00
|
|
|
__version__ = "1.2.0"
|
2020-04-03 16:27:34 +00:00
|
|
|
__maintainer__ = "Bob Mottram"
|
|
|
|
__email__ = "bob@freedombone.net"
|
|
|
|
__status__ = "Production"
|
2019-06-28 21:59:54 +00:00
|
|
|
|
|
|
|
import json
|
|
|
|
import os
|
2019-06-29 10:08:59 +00:00
|
|
|
import datetime
|
2019-07-04 12:23:53 +00:00
|
|
|
import time
|
2021-01-04 19:02:24 +00:00
|
|
|
from linked_data_sig import verifyJsonSignature
|
2021-01-05 10:48:22 +00:00
|
|
|
from utils import getConfigParam
|
2020-12-23 10:57:44 +00:00
|
|
|
from utils import hasUsersPath
|
2020-12-21 12:11:45 +00:00
|
|
|
from utils import validPostDate
|
2020-12-16 10:48:40 +00:00
|
|
|
from utils import getFullDomain
|
2020-08-26 17:41:38 +00:00
|
|
|
from utils import isEventPost
|
2020-08-23 11:13:35 +00:00
|
|
|
from utils import removeIdEnding
|
2020-06-11 12:26:15 +00:00
|
|
|
from utils import getProtocolPrefixes
|
2020-02-25 15:24:29 +00:00
|
|
|
from utils import isBlogPost
|
2020-02-04 19:34:52 +00:00
|
|
|
from utils import removeAvatarFromCache
|
2019-12-12 17:34:31 +00:00
|
|
|
from utils import isPublicPost
|
2019-11-25 14:05:59 +00:00
|
|
|
from utils import getCachedPostFilename
|
|
|
|
from utils import removePostFromCache
|
2019-07-02 10:39:55 +00:00
|
|
|
from utils import urlPermitted
|
2019-07-04 10:02:56 +00:00
|
|
|
from utils import createInboxQueueDir
|
2019-07-06 13:49:25 +00:00
|
|
|
from utils import getStatusNumber
|
2019-07-09 14:20:23 +00:00
|
|
|
from utils import getDomainFromActor
|
|
|
|
from utils import getNicknameFromActor
|
2019-07-11 12:29:31 +00:00
|
|
|
from utils import locatePost
|
2019-07-14 16:37:01 +00:00
|
|
|
from utils import deletePost
|
2019-08-12 18:02:29 +00:00
|
|
|
from utils import removeModerationPostFromIndex
|
2019-10-22 11:55:06 +00:00
|
|
|
from utils import loadJson
|
|
|
|
from utils import saveJson
|
2020-09-05 16:13:25 +00:00
|
|
|
from utils import updateLikesCollection
|
|
|
|
from utils import undoLikesCollectionEntry
|
2020-12-22 10:30:52 +00:00
|
|
|
from categories import getHashtagCategories
|
|
|
|
from categories import setHashtagCategory
|
2019-07-04 12:23:53 +00:00
|
|
|
from httpsig import verifyPostHeaders
|
|
|
|
from session import createSession
|
2019-07-04 19:34:28 +00:00
|
|
|
from session import getJson
|
2019-07-04 12:23:53 +00:00
|
|
|
from follow import receiveFollowRequest
|
2019-07-08 18:55:39 +00:00
|
|
|
from follow import getFollowersOfActor
|
2020-12-22 13:57:24 +00:00
|
|
|
from follow import unfollowerOfAccount
|
2019-07-04 14:36:29 +00:00
|
|
|
from pprint import pprint
|
2019-07-04 19:34:28 +00:00
|
|
|
from cache import getPersonFromCache
|
2019-07-04 20:25:19 +00:00
|
|
|
from cache import storePersonInCache
|
2019-07-06 15:17:21 +00:00
|
|
|
from acceptreject import receiveAcceptReject
|
2019-11-17 14:02:59 +00:00
|
|
|
from bookmarks import updateBookmarksCollection
|
2019-11-17 14:01:49 +00:00
|
|
|
from bookmarks import undoBookmarksCollectionEntry
|
2019-07-14 20:12:05 +00:00
|
|
|
from blocking import isBlocked
|
2019-10-17 13:18:21 +00:00
|
|
|
from blocking import isBlockedDomain
|
2021-02-15 22:26:25 +00:00
|
|
|
from blocking import brochModeLapses
|
2019-07-14 20:50:27 +00:00
|
|
|
from filters import isFiltered
|
2020-12-22 13:57:24 +00:00
|
|
|
from utils import updateAnnounceCollection
|
|
|
|
from utils import undoAnnounceCollectionEntry
|
2021-01-31 11:05:17 +00:00
|
|
|
from utils import dangerousMarkup
|
2019-08-16 17:19:23 +00:00
|
|
|
from httpsig import messageContentDigest
|
2020-08-25 19:45:15 +00:00
|
|
|
from posts import validContentWarning
|
2019-09-29 09:15:10 +00:00
|
|
|
from posts import downloadAnnounce
|
2019-10-03 16:22:34 +00:00
|
|
|
from posts import isDM
|
2019-10-03 16:37:25 +00:00
|
|
|
from posts import isReply
|
2020-08-27 17:40:09 +00:00
|
|
|
from posts import isMuted
|
2019-10-22 20:30:43 +00:00
|
|
|
from posts import isImageMedia
|
2019-10-04 12:39:46 +00:00
|
|
|
from posts import sendSignedJson
|
2019-11-29 22:02:16 +00:00
|
|
|
from posts import sendToFollowersThread
|
2020-11-28 10:54:48 +00:00
|
|
|
from webapp_post import individualPostAsHtml
|
2019-11-29 18:46:21 +00:00
|
|
|
from question import questionUpdateVotes
|
2020-01-15 10:56:39 +00:00
|
|
|
from media import replaceYouTube
|
2020-05-02 11:08:38 +00:00
|
|
|
from git import isGitPatch
|
|
|
|
from git import receiveGitPatch
|
2020-07-03 19:20:31 +00:00
|
|
|
from followingCalendar import receivingCalendarEvents
|
2020-08-20 16:51:48 +00:00
|
|
|
from happening import saveEventPost
|
2020-11-09 19:41:01 +00:00
|
|
|
from delete import removeOldHashtags
|
2020-12-13 11:27:12 +00:00
|
|
|
from follow import isFollowingActor
|
2020-12-22 10:30:52 +00:00
|
|
|
from categories import guessHashtagCategory
|
2021-01-05 20:11:16 +00:00
|
|
|
from context import hasValidContext
|
2020-12-05 11:11:32 +00:00
|
|
|
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
def storeHashTags(baseDir: str, nickname: str, postJsonObject: {}) -> None:
|
2019-12-12 17:34:31 +00:00
|
|
|
"""Extracts hashtags from an incoming post and updates the
|
|
|
|
relevant tags files.
|
|
|
|
"""
|
|
|
|
if not isPublicPost(postJsonObject):
|
|
|
|
return
|
|
|
|
if not postJsonObject.get('object'):
|
|
|
|
return
|
|
|
|
if not isinstance(postJsonObject['object'], dict):
|
|
|
|
return
|
|
|
|
if not postJsonObject['object'].get('tag'):
|
|
|
|
return
|
|
|
|
if not postJsonObject.get('id'):
|
|
|
|
return
|
|
|
|
if not isinstance(postJsonObject['object']['tag'], list):
|
|
|
|
return
|
2020-09-03 09:09:58 +00:00
|
|
|
tagsDir = baseDir + '/tags'
|
|
|
|
|
|
|
|
# add tags directory if it doesn't exist
|
|
|
|
if not os.path.isdir(tagsDir):
|
|
|
|
print('Creating tags directory')
|
|
|
|
os.mkdir(tagsDir)
|
|
|
|
|
2020-12-05 11:11:32 +00:00
|
|
|
hashtagCategories = getHashtagCategories(baseDir)
|
|
|
|
|
2019-12-12 17:34:31 +00:00
|
|
|
for tag in postJsonObject['object']['tag']:
|
|
|
|
if not tag.get('type'):
|
|
|
|
continue
|
2021-02-08 15:06:26 +00:00
|
|
|
if not isinstance(tag['type'], str):
|
|
|
|
continue
|
2020-04-03 16:27:34 +00:00
|
|
|
if tag['type'] != 'Hashtag':
|
2019-12-12 17:34:31 +00:00
|
|
|
continue
|
|
|
|
if not tag.get('name'):
|
|
|
|
continue
|
2020-04-03 16:27:34 +00:00
|
|
|
tagName = tag['name'].replace('#', '').strip()
|
|
|
|
tagsFilename = tagsDir + '/' + tagName + '.txt'
|
2020-08-23 11:13:35 +00:00
|
|
|
postUrl = removeIdEnding(postJsonObject['id'])
|
2020-04-03 16:27:34 +00:00
|
|
|
postUrl = postUrl.replace('/', '#')
|
|
|
|
daysDiff = datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)
|
|
|
|
daysSinceEpoch = daysDiff.days
|
|
|
|
tagline = str(daysSinceEpoch) + ' ' + nickname + ' ' + postUrl + '\n'
|
2019-12-12 17:34:31 +00:00
|
|
|
if not os.path.isfile(tagsFilename):
|
2020-04-03 16:27:34 +00:00
|
|
|
tagsFile = open(tagsFilename, "w+")
|
2019-12-12 17:34:31 +00:00
|
|
|
if tagsFile:
|
2019-12-12 19:18:29 +00:00
|
|
|
tagsFile.write(tagline)
|
2019-12-12 17:34:31 +00:00
|
|
|
tagsFile.close()
|
|
|
|
else:
|
|
|
|
if postUrl not in open(tagsFilename).read():
|
2019-12-12 17:47:16 +00:00
|
|
|
try:
|
|
|
|
with open(tagsFilename, 'r+') as tagsFile:
|
2020-04-03 16:27:34 +00:00
|
|
|
content = tagsFile.read()
|
2020-12-29 20:22:28 +00:00
|
|
|
if tagline not in content:
|
|
|
|
tagsFile.seek(0, 0)
|
|
|
|
tagsFile.write(tagline + content)
|
2019-12-12 17:49:16 +00:00
|
|
|
except Exception as e:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: Failed to write entry to tags file ' +
|
|
|
|
tagsFilename + ' ' + str(e))
|
2020-05-31 16:31:33 +00:00
|
|
|
removeOldHashtags(baseDir, 3)
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-05 11:11:32 +00:00
|
|
|
# automatically assign a category to the tag if possible
|
|
|
|
categoryFilename = tagsDir + '/' + tagName + '.category'
|
|
|
|
if not os.path.isfile(categoryFilename):
|
|
|
|
categoryStr = \
|
|
|
|
guessHashtagCategory(tagName, hashtagCategories)
|
|
|
|
if categoryStr:
|
|
|
|
setHashtagCategory(baseDir, tagName, categoryStr)
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _inboxStorePostToHtmlCache(recentPostsCache: {}, maxRecentPosts: int,
|
|
|
|
translate: {},
|
|
|
|
baseDir: str, httpPrefix: str,
|
|
|
|
session, cachedWebfingers: {}, personCache: {},
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
postJsonObject: {},
|
|
|
|
allowDeletion: bool, boxname: str,
|
2020-12-23 23:59:49 +00:00
|
|
|
showPublishedDateOnly: bool,
|
2021-01-30 11:47:09 +00:00
|
|
|
peertubeInstances: [],
|
|
|
|
allowLocalNetworkAccess: bool) -> None:
|
2019-10-19 12:37:35 +00:00
|
|
|
"""Converts the json post into html and stores it in a cache
|
|
|
|
This enables the post to be quickly displayed later
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
pageNumber = -999
|
|
|
|
avatarUrl = None
|
2020-08-26 11:10:21 +00:00
|
|
|
if boxname != 'tlevents' and boxname != 'outbox':
|
2020-10-08 12:28:02 +00:00
|
|
|
boxname = 'inbox'
|
2020-12-18 18:12:33 +00:00
|
|
|
|
2020-08-29 20:14:44 +00:00
|
|
|
individualPostAsHtml(True, recentPostsCache, maxRecentPosts,
|
2020-12-09 13:31:54 +00:00
|
|
|
translate, pageNumber,
|
2020-12-31 12:23:15 +00:00
|
|
|
baseDir, session, cachedWebfingers,
|
|
|
|
personCache,
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname, domain, port, postJsonObject,
|
|
|
|
avatarUrl, True, allowDeletion,
|
2020-10-11 18:50:13 +00:00
|
|
|
httpPrefix, __version__, boxname, None,
|
|
|
|
showPublishedDateOnly,
|
2021-01-30 11:47:09 +00:00
|
|
|
peertubeInstances, allowLocalNetworkAccess,
|
2020-04-03 16:27:34 +00:00
|
|
|
not isDM(postJsonObject),
|
|
|
|
True, True, False, True)
|
|
|
|
|
|
|
|
|
|
|
|
def validInbox(baseDir: str, nickname: str, domain: str) -> bool:
|
2019-07-18 11:35:48 +00:00
|
|
|
"""Checks whether files were correctly saved to the inbox
|
|
|
|
"""
|
2019-07-18 09:26:47 +00:00
|
|
|
if ':' in domain:
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = domain.split(':')[0]
|
|
|
|
inboxDir = baseDir+'/accounts/' + nickname + '@' + domain + '/inbox'
|
2019-07-18 09:26:47 +00:00
|
|
|
if not os.path.isdir(inboxDir):
|
|
|
|
return True
|
|
|
|
for subdir, dirs, files in os.walk(inboxDir):
|
|
|
|
for f in files:
|
2020-04-03 16:27:34 +00:00
|
|
|
filename = os.path.join(subdir, f)
|
2019-07-18 09:26:47 +00:00
|
|
|
if not os.path.isfile(filename):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('filename: ' + filename)
|
2019-07-18 09:26:47 +00:00
|
|
|
return False
|
|
|
|
if 'postNickname' in open(filename).read():
|
2020-04-03 16:27:34 +00:00
|
|
|
print('queue file incorrectly saved to ' + filename)
|
2019-07-18 11:35:48 +00:00
|
|
|
return False
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-03-22 21:16:02 +00:00
|
|
|
return True
|
2019-07-18 11:35:48 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def validInboxFilenames(baseDir: str, nickname: str, domain: str,
|
|
|
|
expectedDomain: str, expectedPort: int) -> bool:
|
2019-07-18 11:35:48 +00:00
|
|
|
"""Used by unit tests to check that the port number gets appended to
|
|
|
|
domain names within saved post filenames
|
|
|
|
"""
|
|
|
|
if ':' in domain:
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = domain.split(':')[0]
|
|
|
|
inboxDir = baseDir + '/accounts/' + nickname + '@' + domain + '/inbox'
|
2019-07-18 11:35:48 +00:00
|
|
|
if not os.path.isdir(inboxDir):
|
|
|
|
return True
|
2020-04-03 16:27:34 +00:00
|
|
|
expectedStr = expectedDomain + ':' + str(expectedPort)
|
2019-07-18 11:35:48 +00:00
|
|
|
for subdir, dirs, files in os.walk(inboxDir):
|
|
|
|
for f in files:
|
2020-04-03 16:27:34 +00:00
|
|
|
filename = os.path.join(subdir, f)
|
2019-07-18 11:35:48 +00:00
|
|
|
if not os.path.isfile(filename):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('filename: ' + filename)
|
2019-07-18 11:35:48 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if expectedStr not in filename:
|
|
|
|
print('Expected: ' + expectedStr)
|
|
|
|
print('Invalid filename: ' + filename)
|
2019-07-18 09:26:47 +00:00
|
|
|
return False
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-03-22 21:16:02 +00:00
|
|
|
return True
|
2019-07-18 09:26:47 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def getPersonPubKey(baseDir: str, session, personUrl: str,
|
|
|
|
personCache: {}, debug: bool,
|
|
|
|
projectVersion: str, httpPrefix: str,
|
|
|
|
domain: str, onionDomain: str) -> str:
|
2019-07-04 19:34:28 +00:00
|
|
|
if not personUrl:
|
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
personUrl = personUrl.replace('#main-key', '')
|
2019-08-05 16:05:08 +00:00
|
|
|
if personUrl.endswith('/users/inbox'):
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: Obtaining public key for shared inbox')
|
2020-04-03 16:27:34 +00:00
|
|
|
personUrl = personUrl.replace('/users/inbox', '/inbox')
|
2020-08-29 10:21:29 +00:00
|
|
|
personJson = \
|
|
|
|
getPersonFromCache(baseDir, personUrl, personCache, True)
|
2019-07-04 19:34:28 +00:00
|
|
|
if not personJson:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Obtaining public key for ' + personUrl)
|
|
|
|
personDomain = domain
|
2020-03-02 13:35:24 +00:00
|
|
|
if onionDomain:
|
|
|
|
if '.onion/' in personUrl:
|
2020-04-03 16:27:34 +00:00
|
|
|
personDomain = onionDomain
|
|
|
|
profileStr = 'https://www.w3.org/ns/activitystreams'
|
|
|
|
asHeader = {
|
|
|
|
'Accept': 'application/activity+json; profile="' + profileStr + '"'
|
2020-03-02 13:11:06 +00:00
|
|
|
}
|
2020-04-03 16:27:34 +00:00
|
|
|
personJson = \
|
|
|
|
getJson(session, personUrl, asHeader, None, projectVersion,
|
|
|
|
httpPrefix, personDomain)
|
2019-07-04 19:34:28 +00:00
|
|
|
if not personJson:
|
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
pubKey = None
|
2019-07-04 19:34:28 +00:00
|
|
|
if personJson.get('publicKey'):
|
|
|
|
if personJson['publicKey'].get('publicKeyPem'):
|
2020-04-03 16:27:34 +00:00
|
|
|
pubKey = personJson['publicKey']['publicKeyPem']
|
2019-07-04 19:34:28 +00:00
|
|
|
else:
|
|
|
|
if personJson.get('publicKeyPem'):
|
2020-04-03 16:27:34 +00:00
|
|
|
pubKey = personJson['publicKeyPem']
|
2019-07-04 19:34:28 +00:00
|
|
|
|
|
|
|
if not pubKey:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Public key not found for ' + personUrl)
|
2019-07-04 19:34:28 +00:00
|
|
|
|
2020-08-29 10:21:29 +00:00
|
|
|
storePersonInCache(baseDir, personUrl, personJson, personCache, True)
|
2019-07-04 19:34:28 +00:00
|
|
|
return pubKey
|
2019-06-28 21:59:54 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2019-07-02 15:07:27 +00:00
|
|
|
def inboxMessageHasParams(messageJson: {}) -> bool:
|
|
|
|
"""Checks whether an incoming message contains expected parameters
|
|
|
|
"""
|
2020-08-23 14:45:58 +00:00
|
|
|
expectedParams = ['actor', 'type', 'object']
|
2019-07-02 15:07:27 +00:00
|
|
|
for param in expectedParams:
|
|
|
|
if not messageJson.get(param):
|
2020-08-23 14:45:58 +00:00
|
|
|
# print('inboxMessageHasParams: ' +
|
|
|
|
# param + ' ' + str(messageJson))
|
2019-07-02 15:07:27 +00:00
|
|
|
return False
|
2021-02-08 15:06:26 +00:00
|
|
|
|
|
|
|
# actor should be a string
|
|
|
|
if not isinstance(messageJson['actor'], str):
|
|
|
|
print('WARN: actor should be a string, but is actually: ' +
|
|
|
|
str(messageJson['actor']))
|
|
|
|
return False
|
|
|
|
|
|
|
|
# type should be a string
|
|
|
|
if not isinstance(messageJson['type'], str):
|
|
|
|
print('WARN: type from ' + str(messageJson['actor']) +
|
|
|
|
' should be a string, but is actually: ' +
|
|
|
|
str(messageJson['type']))
|
|
|
|
return False
|
|
|
|
|
|
|
|
# object should be a dict or a string
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
if not isinstance(messageJson['object'], str):
|
|
|
|
print('WARN: object from ' + str(messageJson['actor']) +
|
|
|
|
' should be a dict or string, but is actually: ' +
|
|
|
|
str(messageJson['object']))
|
|
|
|
return False
|
|
|
|
|
2019-07-06 13:49:25 +00:00
|
|
|
if not messageJson.get('to'):
|
2021-02-08 14:48:37 +00:00
|
|
|
allowedWithoutToParam = ['Like', 'Follow', 'Join', 'Request',
|
2020-04-03 16:27:34 +00:00
|
|
|
'Accept', 'Capability', 'Undo']
|
2019-07-06 13:49:25 +00:00
|
|
|
if messageJson['type'] not in allowedWithoutToParam:
|
|
|
|
return False
|
2019-07-02 15:07:27 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def inboxPermittedMessage(domain: str, messageJson: {},
|
|
|
|
federationList: []) -> bool:
|
2019-06-28 21:59:54 +00:00
|
|
|
""" check that we are receiving from a permitted domain
|
|
|
|
"""
|
2019-11-16 12:30:59 +00:00
|
|
|
if not messageJson.get('actor'):
|
2019-06-28 21:59:54 +00:00
|
|
|
return False
|
2020-08-23 14:45:58 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = messageJson['actor']
|
2019-06-28 21:59:54 +00:00
|
|
|
# always allow the local domain
|
2019-07-01 11:48:54 +00:00
|
|
|
if domain in actor:
|
2019-06-28 21:59:54 +00:00
|
|
|
return True
|
|
|
|
|
2020-09-27 19:27:24 +00:00
|
|
|
if not urlPermitted(actor, federationList):
|
2019-06-28 21:59:54 +00:00
|
|
|
return False
|
|
|
|
|
2021-02-08 14:48:37 +00:00
|
|
|
alwaysAllowedTypes = ('Follow', 'Join', 'Like', 'Delete', 'Announce')
|
2019-11-16 12:30:59 +00:00
|
|
|
if messageJson['type'] not in alwaysAllowedTypes:
|
2019-11-16 12:32:28 +00:00
|
|
|
if not messageJson.get('object'):
|
|
|
|
return True
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
return False
|
|
|
|
if messageJson['object'].get('inReplyTo'):
|
2020-04-03 16:27:34 +00:00
|
|
|
inReplyTo = messageJson['object']['inReplyTo']
|
2020-08-28 14:45:07 +00:00
|
|
|
if not isinstance(inReplyTo, str):
|
|
|
|
return False
|
2020-09-27 19:27:24 +00:00
|
|
|
if not urlPermitted(inReplyTo, federationList):
|
2019-07-15 09:20:16 +00:00
|
|
|
return False
|
2019-06-28 21:59:54 +00:00
|
|
|
|
|
|
|
return True
|
2019-06-29 10:08:59 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def savePostToInboxQueue(baseDir: str, httpPrefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
postJsonObject: {},
|
2021-01-03 18:20:25 +00:00
|
|
|
originalPostJsonObject: {},
|
2020-04-03 16:27:34 +00:00
|
|
|
messageBytes: str,
|
|
|
|
httpHeaders: {},
|
|
|
|
postPath: str, debug: bool) -> str:
|
2019-07-04 10:02:56 +00:00
|
|
|
"""Saves the give json to the inbox queue for the person
|
|
|
|
keyId specifies the actor sending the post
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if len(messageBytes) > 10240:
|
|
|
|
print('WARN: inbox message too long ' +
|
|
|
|
str(len(messageBytes)) + ' bytes')
|
2019-11-15 23:43:07 +00:00
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
originalDomain = domain
|
2019-07-04 10:02:56 +00:00
|
|
|
if ':' in domain:
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = domain.split(':')[0]
|
2019-07-14 20:12:05 +00:00
|
|
|
|
|
|
|
# block at the ealiest stage possible, which means the data
|
|
|
|
# isn't written to file
|
2020-04-03 16:27:34 +00:00
|
|
|
postNickname = None
|
|
|
|
postDomain = None
|
|
|
|
actor = None
|
2019-07-14 20:12:05 +00:00
|
|
|
if postJsonObject.get('actor'):
|
2020-09-05 16:46:03 +00:00
|
|
|
if not isinstance(postJsonObject['actor'], str):
|
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = postJsonObject['actor']
|
|
|
|
postNickname = getNicknameFromActor(postJsonObject['actor'])
|
2019-09-01 19:20:28 +00:00
|
|
|
if not postNickname:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('No post Nickname in actor ' + postJsonObject['actor'])
|
2019-09-01 19:20:28 +00:00
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
postDomain, postPort = getDomainFromActor(postJsonObject['actor'])
|
2019-09-01 19:20:28 +00:00
|
|
|
if not postDomain:
|
2019-10-29 20:23:49 +00:00
|
|
|
if debug:
|
|
|
|
pprint(postJsonObject)
|
2019-09-01 19:20:28 +00:00
|
|
|
print('No post Domain in actor')
|
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
if isBlocked(baseDir, nickname, domain, postNickname, postDomain):
|
2019-08-18 09:39:12 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: post from ' + postNickname + ' blocked')
|
2019-07-14 20:12:05 +00:00
|
|
|
return None
|
2020-12-16 10:48:40 +00:00
|
|
|
postDomain = getFullDomain(postDomain, postPort)
|
2019-07-14 20:50:27 +00:00
|
|
|
|
2019-08-05 09:28:12 +00:00
|
|
|
if postJsonObject.get('object'):
|
|
|
|
if isinstance(postJsonObject['object'], dict):
|
2019-09-01 19:52:14 +00:00
|
|
|
if postJsonObject['object'].get('inReplyTo'):
|
|
|
|
if isinstance(postJsonObject['object']['inReplyTo'], str):
|
2020-04-03 16:27:34 +00:00
|
|
|
inReplyTo = \
|
|
|
|
postJsonObject['object']['inReplyTo']
|
|
|
|
replyDomain, replyPort = \
|
|
|
|
getDomainFromActor(inReplyTo)
|
|
|
|
if isBlockedDomain(baseDir, replyDomain):
|
|
|
|
print('WARN: post contains reply from ' +
|
|
|
|
str(actor) +
|
|
|
|
' to a blocked domain: ' + replyDomain)
|
2019-09-09 09:12:06 +00:00
|
|
|
return None
|
2019-10-17 13:18:21 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
replyNickname = \
|
|
|
|
getNicknameFromActor(inReplyTo)
|
2019-10-17 13:18:21 +00:00
|
|
|
if replyNickname and replyDomain:
|
2020-04-03 16:27:34 +00:00
|
|
|
if isBlocked(baseDir, nickname, domain,
|
|
|
|
replyNickname, replyDomain):
|
|
|
|
print('WARN: post contains reply from ' +
|
|
|
|
str(actor) +
|
|
|
|
' to a blocked account: ' +
|
|
|
|
replyNickname + '@' + replyDomain)
|
2019-10-17 13:18:21 +00:00
|
|
|
return None
|
2019-08-05 09:28:12 +00:00
|
|
|
if postJsonObject['object'].get('content'):
|
|
|
|
if isinstance(postJsonObject['object']['content'], str):
|
2020-04-03 16:27:34 +00:00
|
|
|
if isFiltered(baseDir, nickname, domain,
|
|
|
|
postJsonObject['object']['content']):
|
2019-09-01 19:54:02 +00:00
|
|
|
print('WARN: post was filtered out due to content')
|
2019-08-05 09:28:12 +00:00
|
|
|
return None
|
2020-04-03 16:27:34 +00:00
|
|
|
originalPostId = None
|
2019-07-14 16:57:06 +00:00
|
|
|
if postJsonObject.get('id'):
|
2020-09-05 16:46:03 +00:00
|
|
|
if not isinstance(postJsonObject['id'], str):
|
|
|
|
return None
|
2020-08-23 11:13:35 +00:00
|
|
|
originalPostId = removeIdEnding(postJsonObject['id'])
|
2019-08-16 15:04:40 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
currTime = datetime.datetime.utcnow()
|
2019-08-16 15:04:40 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
postId = None
|
2019-08-16 15:04:40 +00:00
|
|
|
if postJsonObject.get('id'):
|
2020-08-23 11:13:35 +00:00
|
|
|
postId = removeIdEnding(postJsonObject['id'])
|
2020-04-03 16:27:34 +00:00
|
|
|
published = currTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
2019-08-16 15:04:40 +00:00
|
|
|
if not postId:
|
2020-04-03 16:27:34 +00:00
|
|
|
statusNumber, published = getStatusNumber()
|
2019-08-16 15:04:40 +00:00
|
|
|
if actor:
|
2020-04-03 16:27:34 +00:00
|
|
|
postId = actor + '/statuses/' + statusNumber
|
2019-08-16 15:04:40 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
postId = httpPrefix + '://' + originalDomain + \
|
|
|
|
'/users/' + nickname + '/statuses/' + statusNumber
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-08-16 13:47:01 +00:00
|
|
|
# NOTE: don't change postJsonObject['id'] before signature check
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
inboxQueueDir = createInboxQueueDir(nickname, domain, baseDir)
|
|
|
|
|
|
|
|
handle = nickname + '@' + domain
|
|
|
|
destination = baseDir + '/accounts/' + \
|
|
|
|
handle + '/inbox/' + postId.replace('/', '#') + '.json'
|
|
|
|
filename = inboxQueueDir + '/' + postId.replace('/', '#') + '.json'
|
|
|
|
|
|
|
|
sharedInboxItem = False
|
|
|
|
if nickname == 'inbox':
|
|
|
|
nickname = originalDomain
|
|
|
|
sharedInboxItem = True
|
|
|
|
|
|
|
|
digestStartTime = time.time()
|
|
|
|
digest = messageContentDigest(messageBytes)
|
|
|
|
timeDiffStr = str(int((time.time() - digestStartTime) * 1000))
|
2019-11-16 10:12:40 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
while len(timeDiffStr) < 6:
|
|
|
|
timeDiffStr = '0' + timeDiffStr
|
|
|
|
print('DIGEST|' + timeDiffStr + '|' + filename)
|
2019-11-16 10:07:32 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
newQueueItem = {
|
2019-08-16 09:35:06 +00:00
|
|
|
'originalId': originalPostId,
|
2019-07-15 09:20:16 +00:00
|
|
|
'id': postId,
|
2019-08-16 09:35:06 +00:00
|
|
|
'actor': actor,
|
2019-07-07 15:51:04 +00:00
|
|
|
'nickname': nickname,
|
|
|
|
'domain': domain,
|
2019-07-15 10:22:19 +00:00
|
|
|
'postNickname': postNickname,
|
|
|
|
'postDomain': postDomain,
|
2019-07-05 11:39:03 +00:00
|
|
|
'sharedInbox': sharedInboxItem,
|
2019-07-04 10:09:27 +00:00
|
|
|
'published': published,
|
2019-08-15 21:34:25 +00:00
|
|
|
'httpHeaders': httpHeaders,
|
2019-07-05 22:13:20 +00:00
|
|
|
'path': postPath,
|
2019-07-14 16:57:06 +00:00
|
|
|
'post': postJsonObject,
|
2021-01-03 18:20:25 +00:00
|
|
|
'original': originalPostJsonObject,
|
2019-11-16 10:07:32 +00:00
|
|
|
'digest': digest,
|
2019-07-04 10:19:15 +00:00
|
|
|
'filename': filename,
|
2019-08-05 09:50:45 +00:00
|
|
|
'destination': destination
|
2019-07-04 10:02:56 +00:00
|
|
|
}
|
2019-07-06 13:49:25 +00:00
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('Inbox queue item created')
|
2020-04-03 16:27:34 +00:00
|
|
|
saveJson(newQueueItem, filename)
|
2019-07-04 10:02:56 +00:00
|
|
|
return filename
|
2019-07-04 12:23:53 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _inboxPostRecipientsAdd(baseDir: str, httpPrefix: str, toList: [],
|
|
|
|
recipientsDict: {},
|
|
|
|
domainMatch: str, domain: str,
|
|
|
|
actor: str, debug: bool) -> bool:
|
2019-07-08 22:12:24 +00:00
|
|
|
"""Given a list of post recipients (toList) from 'to' or 'cc' parameters
|
2020-09-27 18:35:35 +00:00
|
|
|
populate a recipientsDict with the handle for each
|
2019-07-08 22:12:24 +00:00
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = False
|
2019-07-08 22:12:24 +00:00
|
|
|
for recipient in toList:
|
2019-09-03 19:53:22 +00:00
|
|
|
if not recipient:
|
|
|
|
continue
|
2019-07-08 22:12:24 +00:00
|
|
|
# is this a to a local account?
|
|
|
|
if domainMatch in recipient:
|
|
|
|
# get the handle for the local account
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = recipient.split(domainMatch)[1]
|
|
|
|
handle = nickname+'@'+domain
|
|
|
|
if os.path.isdir(baseDir + '/accounts/' + handle):
|
2020-09-27 18:35:35 +00:00
|
|
|
recipientsDict[handle] = None
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + baseDir + '/accounts/' +
|
|
|
|
handle + ' does not exist')
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + recipient + ' is not local to ' +
|
|
|
|
domainMatch)
|
2019-07-11 12:29:31 +00:00
|
|
|
print(str(toList))
|
2019-07-08 22:12:24 +00:00
|
|
|
if recipient.endswith('followers'):
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: followers detected as post recipients')
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = True
|
|
|
|
return followerRecipients, recipientsDict
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _inboxPostRecipients(baseDir: str, postJsonObject: {},
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
debug: bool) -> ([], []):
|
2019-07-10 15:33:19 +00:00
|
|
|
"""Returns dictionaries containing the recipients of the given post
|
|
|
|
The shared dictionary contains followers
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsDict = {}
|
|
|
|
recipientsDictFollowers = {}
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
if not postJsonObject.get('actor'):
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
|
|
|
pprint(postJsonObject)
|
|
|
|
print('WARNING: inbox post has no actor')
|
2020-04-03 16:27:34 +00:00
|
|
|
return recipientsDict, recipientsDictFollowers
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
if ':' in domain:
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = domain.split(':')[0]
|
|
|
|
domainBase = domain
|
2020-12-16 10:48:40 +00:00
|
|
|
domain = getFullDomain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
domainMatch = '/' + domain + '/users/'
|
2019-07-08 22:12:24 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = postJsonObject['actor']
|
2019-07-08 22:12:24 +00:00
|
|
|
# first get any specific people which the post is addressed to
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = False
|
2019-07-08 22:12:24 +00:00
|
|
|
if postJsonObject.get('object'):
|
|
|
|
if isinstance(postJsonObject['object'], dict):
|
|
|
|
if postJsonObject['object'].get('to'):
|
2019-08-16 17:51:00 +00:00
|
|
|
if isinstance(postJsonObject['object']['to'], list):
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = postJsonObject['object']['to']
|
2019-08-16 17:51:00 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = [postJsonObject['object']['to']]
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: resolving "to"')
|
2020-04-03 16:27:34 +00:00
|
|
|
includesFollowers, recipientsDict = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxPostRecipientsAdd(baseDir, httpPrefix,
|
|
|
|
recipientsList,
|
|
|
|
recipientsDict,
|
|
|
|
domainMatch, domainBase,
|
|
|
|
actor, debug)
|
2019-07-08 22:12:24 +00:00
|
|
|
if includesFollowers:
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = True
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox post has no "to"')
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
if postJsonObject['object'].get('cc'):
|
2019-08-16 17:51:00 +00:00
|
|
|
if isinstance(postJsonObject['object']['cc'], list):
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = postJsonObject['object']['cc']
|
2019-08-16 17:51:00 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = [postJsonObject['object']['cc']]
|
|
|
|
includesFollowers, recipientsDict = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxPostRecipientsAdd(baseDir, httpPrefix,
|
|
|
|
recipientsList,
|
|
|
|
recipientsDict,
|
|
|
|
domainMatch, domainBase,
|
|
|
|
actor, debug)
|
2019-07-08 22:12:24 +00:00
|
|
|
if includesFollowers:
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = True
|
2019-07-11 12:29:31 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: inbox post has no cc')
|
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
if isinstance(postJsonObject['object'], str):
|
|
|
|
if '/statuses/' in postJsonObject['object']:
|
|
|
|
print('DEBUG: inbox item is a link to a post')
|
|
|
|
else:
|
|
|
|
if '/users/' in postJsonObject['object']:
|
|
|
|
print('DEBUG: inbox item is a link to an actor')
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
if postJsonObject.get('to'):
|
2019-08-16 17:51:00 +00:00
|
|
|
if isinstance(postJsonObject['to'], list):
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = postJsonObject['to']
|
2019-08-16 17:51:00 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = [postJsonObject['to']]
|
|
|
|
includesFollowers, recipientsDict = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxPostRecipientsAdd(baseDir, httpPrefix,
|
|
|
|
recipientsList,
|
|
|
|
recipientsDict,
|
|
|
|
domainMatch, domainBase,
|
|
|
|
actor, debug)
|
2019-07-08 22:12:24 +00:00
|
|
|
if includesFollowers:
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = True
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
if postJsonObject.get('cc'):
|
2019-08-16 17:51:00 +00:00
|
|
|
if isinstance(postJsonObject['cc'], list):
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = postJsonObject['cc']
|
2019-08-16 17:51:00 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsList = [postJsonObject['cc']]
|
|
|
|
includesFollowers, recipientsDict = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxPostRecipientsAdd(baseDir, httpPrefix,
|
|
|
|
recipientsList,
|
|
|
|
recipientsDict,
|
|
|
|
domainMatch, domainBase,
|
|
|
|
actor, debug)
|
2019-07-08 22:12:24 +00:00
|
|
|
if includesFollowers:
|
2020-04-03 16:27:34 +00:00
|
|
|
followerRecipients = True
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
if not followerRecipients:
|
2019-07-11 12:29:31 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: no followers were resolved')
|
2020-04-03 16:27:34 +00:00
|
|
|
return recipientsDict, recipientsDictFollowers
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
# now resolve the followers
|
2020-04-03 16:27:34 +00:00
|
|
|
recipientsDictFollowers = \
|
|
|
|
getFollowersOfActor(baseDir, actor, debug)
|
|
|
|
|
|
|
|
return recipientsDict, recipientsDictFollowers
|
2019-07-08 22:12:24 +00:00
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUndoFollow(session, baseDir: str, httpPrefix: str,
|
|
|
|
port: int, messageJson: {},
|
|
|
|
federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-17 10:34:00 +00:00
|
|
|
if not messageJson['object'].get('actor'):
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: follow request has no actor within object')
|
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['object']['actor']):
|
2019-07-17 10:34:00 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing ' +
|
|
|
|
'from actor within object')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
if messageJson['object']['actor'] != messageJson['actor']:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: actors do not match')
|
|
|
|
return False
|
2019-08-15 17:05:22 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
nicknameFollower = \
|
|
|
|
getNicknameFromActor(messageJson['object']['actor'])
|
2019-09-02 09:43:43 +00:00
|
|
|
if not nicknameFollower:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: unable to find nickname in ' +
|
|
|
|
messageJson['object']['actor'])
|
2019-09-02 09:43:43 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
domainFollower, portFollower = \
|
|
|
|
getDomainFromActor(messageJson['object']['actor'])
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFollowerFull = getFullDomain(domainFollower, portFollower)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
nicknameFollowing = \
|
|
|
|
getNicknameFromActor(messageJson['object']['object'])
|
2019-09-02 09:43:43 +00:00
|
|
|
if not nicknameFollowing:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: unable to find nickname in ' +
|
|
|
|
messageJson['object']['object'])
|
2019-09-02 09:43:43 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
domainFollowing, portFollowing = \
|
|
|
|
getDomainFromActor(messageJson['object']['object'])
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFollowingFull = getFullDomain(domainFollowing, portFollowing)
|
2019-07-17 10:34:00 +00:00
|
|
|
|
2020-12-22 13:57:24 +00:00
|
|
|
if unfollowerOfAccount(baseDir,
|
|
|
|
nicknameFollowing, domainFollowingFull,
|
|
|
|
nicknameFollower, domainFollowerFull,
|
|
|
|
debug):
|
2020-08-20 12:11:07 +00:00
|
|
|
print(nicknameFollowing + '@' + domainFollowingFull + ': '
|
|
|
|
'Follower ' + nicknameFollower + '@' + domainFollowerFull +
|
|
|
|
' was removed')
|
2019-07-17 11:54:13 +00:00
|
|
|
return True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-07-17 11:54:13 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Follower ' +
|
|
|
|
nicknameFollower + '@' + domainFollowerFull +
|
|
|
|
' was not removed')
|
2019-07-17 11:54:13 +00:00
|
|
|
return False
|
2019-07-17 10:34:00 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUndo(session, baseDir: str, httpPrefix: str,
|
|
|
|
port: int, sendThreads: [], postLog: [],
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
|
|
|
messageJson: {}, federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-17 10:34:00 +00:00
|
|
|
"""Receives an undo request within the POST section of HTTPServer
|
|
|
|
"""
|
|
|
|
if not messageJson['type'].startswith('Undo'):
|
|
|
|
return False
|
2019-07-17 11:24:11 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Undo activity received')
|
2019-07-17 10:34:00 +00:00
|
|
|
if not messageJson.get('actor'):
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: follow request has no actor')
|
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-17 10:34:00 +00:00
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object is not a dict')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('type'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object type')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] +
|
|
|
|
' has no object within object')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object']['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] +
|
|
|
|
' object within object is not a string')
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
2021-02-08 14:48:37 +00:00
|
|
|
if messageJson['object']['type'] == 'Follow' or \
|
|
|
|
messageJson['object']['type'] == 'Join':
|
2020-12-22 18:06:23 +00:00
|
|
|
return _receiveUndoFollow(session, baseDir, httpPrefix,
|
|
|
|
port, messageJson,
|
|
|
|
federationList, debug)
|
2019-07-17 10:34:00 +00:00
|
|
|
return False
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveEventPost(recentPostsCache: {}, session, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
nickname: str, debug: bool) -> bool:
|
2020-08-20 16:51:48 +00:00
|
|
|
"""Receive a mobilizon-type event activity
|
2020-08-20 17:08:25 +00:00
|
|
|
See https://framagit.org/framasoft/mobilizon/-/blob/
|
|
|
|
master/lib/federation/activity_stream/converter/event.ex
|
2020-08-20 16:51:48 +00:00
|
|
|
"""
|
|
|
|
if not isEventPost(messageJson):
|
|
|
|
return
|
|
|
|
print('Receiving event: ' + str(messageJson['object']))
|
2020-12-16 10:48:40 +00:00
|
|
|
handle = getFullDomain(nickname + '@' + domain, port)
|
2020-08-20 16:51:48 +00:00
|
|
|
|
2020-08-23 11:13:35 +00:00
|
|
|
postId = removeIdEnding(messageJson['id']).replace('/', '#')
|
2020-08-20 16:51:48 +00:00
|
|
|
|
|
|
|
saveEventPost(baseDir, handle, postId, messageJson['object'])
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _personReceiveUpdate(baseDir: str,
|
|
|
|
domain: str, port: int,
|
|
|
|
updateNickname: str, updateDomain: str,
|
|
|
|
updatePort: int,
|
|
|
|
personJson: {}, personCache: {},
|
|
|
|
debug: bool) -> bool:
|
2019-08-22 18:36:07 +00:00
|
|
|
"""Changes an actor. eg: avatar or display name change
|
2019-08-20 19:41:58 +00:00
|
|
|
"""
|
2020-12-17 20:56:29 +00:00
|
|
|
print('Receiving actor update for ' + personJson['url'] +
|
|
|
|
' ' + str(personJson))
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
updateDomainFull = getFullDomain(updateDomain, updatePort)
|
2021-02-09 16:21:59 +00:00
|
|
|
usersPaths = ('users', 'profile', 'channel', 'accounts', 'u')
|
2020-12-23 22:18:19 +00:00
|
|
|
usersStrFound = False
|
|
|
|
for usersStr in usersPaths:
|
|
|
|
actor = updateDomainFull + '/' + usersStr + '/' + updateNickname
|
|
|
|
if actor in personJson['id']:
|
|
|
|
usersStrFound = True
|
|
|
|
break
|
|
|
|
if not usersStrFound:
|
|
|
|
if debug:
|
|
|
|
print('actor: ' + actor)
|
|
|
|
print('id: ' + personJson['id'])
|
|
|
|
print('DEBUG: Actor does not match id')
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if updateDomainFull == domainFull:
|
2019-08-22 17:49:57 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: You can only receive actor updates ' +
|
|
|
|
'for domains other than your own')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
|
|
|
if not personJson.get('publicKey'):
|
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: actor update does not contain a public key')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
|
|
|
if not personJson['publicKey'].get('publicKeyPem'):
|
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: actor update does not contain a public key Pem')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
actorFilename = baseDir + '/cache/actors/' + \
|
|
|
|
personJson['id'].replace('/', '#') + '.json'
|
2019-08-20 19:41:58 +00:00
|
|
|
# check that the public keys match.
|
|
|
|
# If they don't then this may be a nefarious attempt to hack an account
|
2020-04-03 16:27:34 +00:00
|
|
|
idx = personJson['id']
|
|
|
|
if personCache.get(idx):
|
|
|
|
if personCache[idx]['actor']['publicKey']['publicKeyPem'] != \
|
|
|
|
personJson['publicKey']['publicKeyPem']:
|
2019-08-20 19:41:58 +00:00
|
|
|
if debug:
|
|
|
|
print('WARN: Public key does not match when updating actor')
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if os.path.isfile(actorFilename):
|
2020-04-03 16:27:34 +00:00
|
|
|
existingPersonJson = loadJson(actorFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
if existingPersonJson:
|
2020-04-03 16:27:34 +00:00
|
|
|
if existingPersonJson['publicKey']['publicKeyPem'] != \
|
|
|
|
personJson['publicKey']['publicKeyPem']:
|
2019-08-20 19:41:58 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: Public key does not match ' +
|
|
|
|
'cached actor when updating')
|
2019-08-20 19:41:58 +00:00
|
|
|
return False
|
|
|
|
# save to cache in memory
|
2020-08-29 10:21:29 +00:00
|
|
|
storePersonInCache(baseDir, personJson['id'], personJson,
|
|
|
|
personCache, True)
|
2019-08-20 19:41:58 +00:00
|
|
|
# save to cache on file
|
2020-04-03 16:27:34 +00:00
|
|
|
if saveJson(personJson, actorFilename):
|
|
|
|
print('actor updated for ' + personJson['id'])
|
2019-09-14 18:58:55 +00:00
|
|
|
|
|
|
|
# remove avatar if it exists so that it will be refreshed later
|
|
|
|
# when a timeline is constructed
|
2020-04-03 16:27:34 +00:00
|
|
|
actorStr = personJson['id'].replace('/', '-')
|
|
|
|
removeAvatarFromCache(baseDir, actorStr)
|
2019-08-20 19:41:58 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUpdateToQuestion(recentPostsCache: {}, messageJson: {},
|
|
|
|
baseDir: str,
|
|
|
|
nickname: str, domain: str) -> None:
|
2019-11-26 10:43:37 +00:00
|
|
|
"""Updating a question as new votes arrive
|
|
|
|
"""
|
|
|
|
# message url of the question
|
|
|
|
if not messageJson.get('id'):
|
|
|
|
return
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
return
|
2020-08-23 11:13:35 +00:00
|
|
|
messageId = removeIdEnding(messageJson['id'])
|
2019-11-26 10:43:37 +00:00
|
|
|
if '#' in messageId:
|
2020-04-03 16:27:34 +00:00
|
|
|
messageId = messageId.split('#', 1)[0]
|
2019-11-26 10:43:37 +00:00
|
|
|
# find the question post
|
2020-04-03 16:27:34 +00:00
|
|
|
postFilename = locatePost(baseDir, nickname, domain, messageId)
|
2019-11-26 10:43:37 +00:00
|
|
|
if not postFilename:
|
|
|
|
return
|
|
|
|
# load the json for the question
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject = loadJson(postFilename, 1)
|
2019-11-26 10:43:37 +00:00
|
|
|
if not postJsonObject:
|
|
|
|
return
|
|
|
|
if not postJsonObject.get('actor'):
|
|
|
|
return
|
|
|
|
# does the actor match?
|
2020-04-03 16:27:34 +00:00
|
|
|
if postJsonObject['actor'] != messageJson['actor']:
|
2019-11-26 10:43:37 +00:00
|
|
|
return
|
2020-04-03 16:27:34 +00:00
|
|
|
saveJson(messageJson, postFilename)
|
2019-11-26 10:43:37 +00:00
|
|
|
# ensure that the cached post is removed if it exists, so
|
|
|
|
# that it then will be recreated
|
2020-04-03 16:27:34 +00:00
|
|
|
cachedPostFilename = \
|
|
|
|
getCachedPostFilename(baseDir, nickname, domain, messageJson)
|
2019-11-26 10:43:37 +00:00
|
|
|
if cachedPostFilename:
|
|
|
|
if os.path.isfile(cachedPostFilename):
|
|
|
|
os.remove(cachedPostFilename)
|
|
|
|
# remove from memory cache
|
2020-04-03 16:27:34 +00:00
|
|
|
removePostFromCache(messageJson, recentPostsCache)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUpdate(recentPostsCache: {}, session, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
nickname: str, debug: bool) -> bool:
|
2019-07-09 14:20:23 +00:00
|
|
|
"""Receives an Update activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Update':
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no actor')
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object')
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object is not a dict')
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('type'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object has no type')
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-09 14:20:23 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
|
|
|
messageJson['type'])
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
2019-08-22 17:25:12 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['object']['type'] == 'Question':
|
2020-12-22 18:06:23 +00:00
|
|
|
_receiveUpdateToQuestion(recentPostsCache, messageJson,
|
|
|
|
baseDir, nickname, domain)
|
2019-11-25 14:05:59 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Question update was received')
|
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] == 'Person':
|
2020-01-19 21:05:02 +00:00
|
|
|
if messageJson.get('url') and messageJson.get('id'):
|
2020-12-17 20:56:29 +00:00
|
|
|
print('Request to update actor unwrapped: ' + str(messageJson))
|
2020-04-03 16:27:34 +00:00
|
|
|
updateNickname = getNicknameFromActor(messageJson['id'])
|
2020-01-19 21:05:02 +00:00
|
|
|
if updateNickname:
|
2020-04-03 16:27:34 +00:00
|
|
|
updateDomain, updatePort = \
|
|
|
|
getDomainFromActor(messageJson['id'])
|
2020-12-22 18:06:23 +00:00
|
|
|
if _personReceiveUpdate(baseDir, domain, port,
|
|
|
|
updateNickname, updateDomain,
|
|
|
|
updatePort, messageJson,
|
|
|
|
personCache, debug):
|
2020-01-19 21:05:02 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'Unwrapped profile update was received for ' +
|
|
|
|
messageJson['url'])
|
2020-01-19 21:05:02 +00:00
|
|
|
return True
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['object']['type'] == 'Person' or \
|
|
|
|
messageJson['object']['type'] == 'Application' or \
|
|
|
|
messageJson['object']['type'] == 'Group' or \
|
|
|
|
messageJson['object']['type'] == 'Service':
|
|
|
|
if messageJson['object'].get('url') and \
|
|
|
|
messageJson['object'].get('id'):
|
2020-12-17 20:56:29 +00:00
|
|
|
print('Request to update actor: ' + str(messageJson))
|
2020-04-03 16:27:34 +00:00
|
|
|
updateNickname = getNicknameFromActor(messageJson['actor'])
|
2019-09-02 09:43:43 +00:00
|
|
|
if updateNickname:
|
2020-04-03 16:27:34 +00:00
|
|
|
updateDomain, updatePort = \
|
|
|
|
getDomainFromActor(messageJson['actor'])
|
2020-12-22 18:06:23 +00:00
|
|
|
if _personReceiveUpdate(baseDir,
|
|
|
|
domain, port,
|
|
|
|
updateNickname, updateDomain,
|
|
|
|
updatePort,
|
|
|
|
messageJson['object'],
|
|
|
|
personCache, debug):
|
2019-09-02 09:43:43 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Profile update was received for ' +
|
|
|
|
messageJson['object']['url'])
|
2019-09-02 09:43:43 +00:00
|
|
|
return True
|
2019-07-09 14:20:23 +00:00
|
|
|
return False
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveLike(recentPostsCache: {},
|
|
|
|
session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
onionDomain: str,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-10 12:40:31 +00:00
|
|
|
"""Receives a Like activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Like':
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no actor')
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object')
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object is not a string')
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('to'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no "to" list')
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
|
|
|
messageJson['type'])
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
if '/statuses/' not in messageJson['object']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from object in ' +
|
|
|
|
messageJson['type'])
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of like - ' + handle)
|
2019-07-10 18:00:14 +00:00
|
|
|
# if this post in the outbox of the person?
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName = handle.split('@')[0]
|
|
|
|
handleDom = handle.split('@')[1]
|
|
|
|
postFilename = locatePost(baseDir, handleName, handleDom,
|
2020-04-03 16:27:34 +00:00
|
|
|
messageJson['object'])
|
2019-07-10 12:40:31 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: post not found in inbox or outbox')
|
|
|
|
print(messageJson['object'])
|
|
|
|
return True
|
|
|
|
if debug:
|
2019-07-11 12:59:00 +00:00
|
|
|
print('DEBUG: liked post found in inbox')
|
2019-10-19 17:50:05 +00:00
|
|
|
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName = handle.split('@')[0]
|
|
|
|
handleDom = handle.split('@')[1]
|
2020-12-30 21:21:57 +00:00
|
|
|
updateLikesCollection(recentPostsCache, baseDir, postFilename,
|
|
|
|
messageJson['object'],
|
|
|
|
messageJson['actor'], domain, debug)
|
2020-12-22 18:06:23 +00:00
|
|
|
if not _alreadyLiked(baseDir,
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName, handleDom,
|
2020-12-22 18:06:23 +00:00
|
|
|
messageJson['object'],
|
|
|
|
messageJson['actor']):
|
|
|
|
_likeNotify(baseDir, domain, onionDomain, handle,
|
|
|
|
messageJson['actor'], messageJson['object'])
|
2019-07-10 12:40:31 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUndoLike(recentPostsCache: {},
|
|
|
|
session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-12 09:10:09 +00:00
|
|
|
"""Receives an undo like activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Undo':
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('type'):
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['object']['type'] != 'Like':
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' like has no object')
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object']['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] +
|
|
|
|
' like object is not a string')
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-12 09:10:09 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
|
|
|
messageJson['type'] + ' like')
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
|
|
|
if '/statuses/' not in messageJson['object']['object']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from like object in ' +
|
|
|
|
messageJson['type'])
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of undo like - ' + handle)
|
2019-07-12 09:10:09 +00:00
|
|
|
# if this post in the outbox of the person?
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName = handle.split('@')[0]
|
|
|
|
handleDom = handle.split('@')[1]
|
2020-04-03 16:27:34 +00:00
|
|
|
postFilename = \
|
2020-12-22 21:24:46 +00:00
|
|
|
locatePost(baseDir, handleName, handleDom,
|
2020-04-03 16:27:34 +00:00
|
|
|
messageJson['object']['object'])
|
2019-07-12 09:10:09 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
2019-07-12 09:41:57 +00:00
|
|
|
print('DEBUG: unliked post not found in inbox or outbox')
|
2019-07-12 09:10:09 +00:00
|
|
|
print(messageJson['object']['object'])
|
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: liked post found in inbox. Now undoing.')
|
2020-04-03 16:27:34 +00:00
|
|
|
undoLikesCollectionEntry(recentPostsCache, baseDir, postFilename,
|
|
|
|
messageJson['object'],
|
|
|
|
messageJson['actor'], domain, debug)
|
2019-07-12 09:10:09 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveBookmark(recentPostsCache: {},
|
|
|
|
session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-11-17 14:01:49 +00:00
|
|
|
"""Receives a bookmark activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Bookmark':
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no actor')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object is not a string')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('to'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no "to" list')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if '/users/' not in messageJson['actor']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" missing from actor in ' +
|
|
|
|
messageJson['type'])
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if '/statuses/' not in messageJson['object']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from object in ' +
|
|
|
|
messageJson['type'])
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if domain not in handle.split('@')[1]:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unrecognized domain ' + handle)
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
if not messageJson['actor'].endswith(domainFull + '/users/' + nickname):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'bookmark actor should be the same as the handle sent to ' +
|
|
|
|
handle + ' != ' + messageJson['actor'])
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of bookmark - ' + handle)
|
2019-11-17 14:01:49 +00:00
|
|
|
# if this post in the outbox of the person?
|
2020-04-03 16:27:34 +00:00
|
|
|
postFilename = locatePost(baseDir, nickname, domain, messageJson['object'])
|
2019-11-17 14:01:49 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: post not found in inbox or outbox')
|
|
|
|
print(messageJson['object'])
|
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: bookmarked post was found')
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
updateBookmarksCollection(recentPostsCache, baseDir, postFilename,
|
|
|
|
messageJson['object'],
|
|
|
|
messageJson['actor'], domain, debug)
|
2019-11-17 14:01:49 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUndoBookmark(recentPostsCache: {},
|
|
|
|
session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-11-17 14:01:49 +00:00
|
|
|
"""Receives an undo bookmark activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Undo':
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('type'):
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['object']['type'] != 'Bookmark':
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' like has no object')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object']['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] +
|
|
|
|
' like object is not a string')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if '/users/' not in messageJson['actor']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" missing from actor in ' +
|
|
|
|
messageJson['type'] + ' like')
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
if '/statuses/' not in messageJson['object']['object']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from like object in ' +
|
|
|
|
messageJson['type'])
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2019-11-17 14:01:49 +00:00
|
|
|
if domain not in handle.split('@')[1]:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: unrecognized bookmark domain ' + handle)
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not messageJson['actor'].endswith(domainFull + '/users/' + nickname):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'bookmark actor should be the same as the handle sent to ' +
|
|
|
|
handle + ' != ' + messageJson['actor'])
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of bookmark undo - ' + handle)
|
2019-11-17 14:01:49 +00:00
|
|
|
# if this post in the outbox of the person?
|
2020-04-03 16:27:34 +00:00
|
|
|
postFilename = locatePost(baseDir, nickname, domain,
|
|
|
|
messageJson['object']['object'])
|
2019-11-17 14:01:49 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: unbookmarked post not found in inbox or outbox')
|
|
|
|
print(messageJson['object']['object'])
|
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: bookmarked post found. Now undoing.')
|
2020-04-03 16:27:34 +00:00
|
|
|
undoBookmarksCollectionEntry(recentPostsCache, baseDir, postFilename,
|
|
|
|
messageJson['object'],
|
|
|
|
messageJson['actor'], domain, debug)
|
2019-11-17 14:01:49 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveDelete(session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool, allowDeletion: bool,
|
|
|
|
recentPostsCache: {}) -> bool:
|
2019-07-11 21:38:28 +00:00
|
|
|
"""Receives a Delete activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Delete':
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no actor')
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2019-07-17 17:16:48 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Delete activity arrived')
|
2019-07-11 21:38:28 +00:00
|
|
|
if not messageJson.get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object')
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object is not a string')
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
deletePrefix = httpPrefix + '://' + domainFull + '/'
|
|
|
|
if (not allowDeletion and
|
|
|
|
(not messageJson['object'].startswith(deletePrefix) or
|
|
|
|
not messageJson['actor'].startswith(deletePrefix))):
|
2019-08-12 18:02:29 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: delete not permitted from other instances')
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2019-07-11 21:38:28 +00:00
|
|
|
if not messageJson.get('to'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no "to" list')
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'"users" or "profile" missing from actor in ' +
|
|
|
|
messageJson['type'])
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
|
|
|
if '/statuses/' not in messageJson['object']:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "statuses" missing from object in ' +
|
|
|
|
messageJson['type'])
|
2019-07-11 21:38:28 +00:00
|
|
|
return False
|
2019-07-11 21:42:15 +00:00
|
|
|
if messageJson['actor'] not in messageJson['object']:
|
|
|
|
if debug:
|
2020-03-22 21:16:02 +00:00
|
|
|
print('DEBUG: actor is not the owner of the post to be deleted')
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of like - ' + handle)
|
2019-07-11 21:38:28 +00:00
|
|
|
# if this post in the outbox of the person?
|
2020-08-23 11:13:35 +00:00
|
|
|
messageId = removeIdEnding(messageJson['object'])
|
2020-04-03 16:27:34 +00:00
|
|
|
removeModerationPostFromIndex(baseDir, messageId, debug)
|
2020-11-27 10:38:51 +00:00
|
|
|
handleNickname = handle.split('@')[0]
|
|
|
|
handleDomain = handle.split('@')[1]
|
|
|
|
postFilename = locatePost(baseDir, handleNickname,
|
|
|
|
handleDomain, messageId)
|
2019-07-11 21:38:28 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: delete post not found in inbox or outbox')
|
2019-07-17 17:16:48 +00:00
|
|
|
print(messageId)
|
2019-07-14 14:42:00 +00:00
|
|
|
return True
|
2020-11-27 10:38:51 +00:00
|
|
|
deletePost(baseDir, httpPrefix, handleNickname,
|
|
|
|
handleDomain, postFilename, debug,
|
2020-06-24 13:30:50 +00:00
|
|
|
recentPostsCache)
|
2019-07-11 21:38:28 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: post deleted - ' + postFilename)
|
2020-11-27 10:38:51 +00:00
|
|
|
|
|
|
|
# also delete any local blogs saved to the news actor
|
|
|
|
if handleNickname != 'news' and handleDomain == domainFull:
|
|
|
|
postFilename = locatePost(baseDir, 'news',
|
|
|
|
handleDomain, messageId)
|
|
|
|
if postFilename:
|
|
|
|
deletePost(baseDir, httpPrefix, 'news',
|
|
|
|
handleDomain, postFilename, debug,
|
|
|
|
recentPostsCache)
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: blog post deleted - ' + postFilename)
|
2019-07-11 21:38:28 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveAnnounce(recentPostsCache: {},
|
|
|
|
session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str,
|
|
|
|
domain: str, onionDomain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool, translate: {},
|
2021-01-30 11:47:09 +00:00
|
|
|
YTReplacementDomain: str,
|
|
|
|
allowLocalNetworkAccess: bool) -> bool:
|
2019-07-12 09:41:57 +00:00
|
|
|
"""Receives an announce activity within the POST section of HTTPServer
|
2019-07-11 19:31:02 +00:00
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Announce':
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2019-09-29 09:15:10 +00:00
|
|
|
if '@' not in handle:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: bad handle ' + handle)
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2019-07-11 19:31:02 +00:00
|
|
|
if not messageJson.get('actor'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no actor')
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2019-07-16 22:57:45 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: receiving announce on ' + handle)
|
2019-07-11 19:31:02 +00:00
|
|
|
if not messageJson.get('object'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no object')
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], str):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' object is not a string')
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('to'):
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' + messageJson['type'] + ' has no "to" list')
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-11 19:31:02 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'"users" or "profile" missing from actor in ' +
|
|
|
|
messageJson['type'])
|
2019-09-09 09:41:31 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['object']):
|
2019-09-09 09:41:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: ' +
|
|
|
|
'"users", "channel" or "profile" missing in ' +
|
|
|
|
messageJson['type'])
|
2019-07-11 19:31:02 +00:00
|
|
|
return False
|
2020-05-25 09:21:34 +00:00
|
|
|
|
2020-06-11 12:26:15 +00:00
|
|
|
prefixes = getProtocolPrefixes()
|
2020-05-25 09:21:34 +00:00
|
|
|
# is the domain of the announce actor blocked?
|
2020-06-11 12:04:42 +00:00
|
|
|
objectDomain = messageJson['object']
|
|
|
|
for prefix in prefixes:
|
|
|
|
objectDomain = objectDomain.replace(prefix, '')
|
2019-09-09 16:02:14 +00:00
|
|
|
if '/' in objectDomain:
|
2020-04-03 16:27:34 +00:00
|
|
|
objectDomain = objectDomain.split('/')[0]
|
|
|
|
if isBlockedDomain(baseDir, objectDomain):
|
2019-09-09 16:02:14 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: announced domain is blocked')
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of announce - ' + handle)
|
2020-05-25 09:21:34 +00:00
|
|
|
|
|
|
|
# is the announce actor blocked?
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2020-05-25 09:21:34 +00:00
|
|
|
actorNickname = getNicknameFromActor(messageJson['actor'])
|
|
|
|
actorDomain, actorPort = getDomainFromActor(messageJson['actor'])
|
|
|
|
if isBlocked(baseDir, nickname, domain, actorNickname, actorDomain):
|
|
|
|
print('Receive announce blocked for actor: ' +
|
|
|
|
actorNickname + '@' + actorDomain)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# is this post in the outbox of the person?
|
|
|
|
postFilename = locatePost(baseDir, nickname, domain,
|
2020-04-03 16:27:34 +00:00
|
|
|
messageJson['object'])
|
2019-07-11 19:31:02 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: announce post not found in inbox or outbox')
|
|
|
|
print(messageJson['object'])
|
|
|
|
return True
|
2020-04-03 16:27:34 +00:00
|
|
|
updateAnnounceCollection(recentPostsCache, baseDir, postFilename,
|
|
|
|
messageJson['actor'], domain, debug)
|
2019-09-29 10:13:00 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Downloading announce post ' + messageJson['actor'] +
|
|
|
|
' -> ' + messageJson['object'])
|
|
|
|
postJsonObject = downloadAnnounce(session, baseDir, httpPrefix,
|
|
|
|
nickname, domain, messageJson,
|
2020-08-02 09:51:20 +00:00
|
|
|
__version__, translate,
|
2021-01-30 11:47:09 +00:00
|
|
|
YTReplacementDomain,
|
|
|
|
allowLocalNetworkAccess)
|
2020-12-21 21:40:29 +00:00
|
|
|
if not postJsonObject:
|
|
|
|
if domain not in messageJson['object'] and \
|
|
|
|
onionDomain not in messageJson['object']:
|
|
|
|
if os.path.isfile(postFilename):
|
|
|
|
# if the announce can't be downloaded then remove it
|
|
|
|
os.remove(postFilename)
|
|
|
|
else:
|
2019-10-01 13:23:22 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Announce post downloaded for ' +
|
|
|
|
messageJson['actor'] + ' -> ' + messageJson['object'])
|
|
|
|
storeHashTags(baseDir, nickname, postJsonObject)
|
2019-09-30 19:13:14 +00:00
|
|
|
# Try to obtain the actor for this person
|
|
|
|
# so that their avatar can be shown
|
2020-04-03 16:27:34 +00:00
|
|
|
lookupActor = None
|
2019-10-01 14:11:15 +00:00
|
|
|
if postJsonObject.get('attributedTo'):
|
2020-08-06 16:21:46 +00:00
|
|
|
if isinstance(postJsonObject['attributedTo'], str):
|
|
|
|
lookupActor = postJsonObject['attributedTo']
|
2019-10-01 14:11:15 +00:00
|
|
|
else:
|
|
|
|
if postJsonObject.get('object'):
|
|
|
|
if isinstance(postJsonObject['object'], dict):
|
|
|
|
if postJsonObject['object'].get('attributedTo'):
|
2020-08-06 16:21:46 +00:00
|
|
|
attrib = postJsonObject['object']['attributedTo']
|
|
|
|
if isinstance(attrib, str):
|
|
|
|
lookupActor = attrib
|
2019-09-30 19:13:14 +00:00
|
|
|
if lookupActor:
|
2020-12-23 10:57:44 +00:00
|
|
|
if hasUsersPath(lookupActor):
|
2019-10-01 13:23:22 +00:00
|
|
|
if '/statuses/' in lookupActor:
|
2020-04-03 16:27:34 +00:00
|
|
|
lookupActor = lookupActor.split('/statuses/')[0]
|
2019-10-01 12:35:39 +00:00
|
|
|
|
2019-10-01 12:50:06 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Obtaining actor for announce post ' +
|
|
|
|
lookupActor)
|
2019-10-01 13:23:22 +00:00
|
|
|
for tries in range(6):
|
2020-04-03 16:27:34 +00:00
|
|
|
pubKey = \
|
|
|
|
getPersonPubKey(baseDir, session, lookupActor,
|
|
|
|
personCache, debug,
|
|
|
|
__version__, httpPrefix,
|
|
|
|
domain, onionDomain)
|
2019-10-01 13:23:22 +00:00
|
|
|
if pubKey:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: public key obtained for announce: ' +
|
|
|
|
lookupActor)
|
2019-10-01 13:23:22 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Retry ' + str(tries + 1) +
|
|
|
|
' obtaining actor for ' + lookupActor)
|
2020-03-22 21:16:02 +00:00
|
|
|
time.sleep(5)
|
2020-12-21 21:40:29 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: announced/repeated post arrived in inbox')
|
2019-07-11 19:31:02 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _receiveUndoAnnounce(recentPostsCache: {},
|
|
|
|
session, handle: str, isGroup: bool, baseDir: str,
|
|
|
|
httpPrefix: str, domain: str, port: int,
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, messageJson: {}, federationList: [],
|
|
|
|
debug: bool) -> bool:
|
2019-07-12 09:41:57 +00:00
|
|
|
"""Receives an undo announce activity within the POST section of HTTPServer
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['type'] != 'Undo':
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
|
|
|
if not messageJson.get('actor'):
|
|
|
|
return False
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object']['object'], str):
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if messageJson['object']['type'] != 'Announce':
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(messageJson['actor']):
|
2019-07-12 09:41:57 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: "users" or "profile" missing from actor in ' +
|
|
|
|
messageJson['type'] + ' announce')
|
2019-07-12 09:41:57 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
if not os.path.isdir(baseDir + '/accounts/' + handle):
|
|
|
|
print('DEBUG: unknown recipient of undo announce - ' + handle)
|
2019-07-12 09:41:57 +00:00
|
|
|
# if this post in the outbox of the person?
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName = handle.split('@')[0]
|
|
|
|
handleDom = handle.split('@')[1]
|
|
|
|
postFilename = locatePost(baseDir, handleName, handleDom,
|
2020-04-03 16:27:34 +00:00
|
|
|
messageJson['object']['object'])
|
2019-07-12 09:41:57 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: undo announce post not found in inbox or outbox')
|
|
|
|
print(messageJson['object']['object'])
|
|
|
|
return True
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: announced/repeated post to be undone found in inbox')
|
2019-09-17 12:14:36 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject = loadJson(postFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
if postJsonObject:
|
2019-07-14 16:57:06 +00:00
|
|
|
if not postJsonObject.get('type'):
|
2020-04-03 16:27:34 +00:00
|
|
|
if postJsonObject['type'] != 'Announce':
|
2019-07-12 10:09:15 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print("DEBUG: Attempt to undo something " +
|
|
|
|
"which isn't an announcement")
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
undoAnnounceCollectionEntry(recentPostsCache, baseDir, postFilename,
|
|
|
|
messageJson['actor'], domain, debug)
|
2019-10-13 12:22:27 +00:00
|
|
|
if os.path.isfile(postFilename):
|
|
|
|
os.remove(postFilename)
|
2019-07-12 09:41:57 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-08-21 18:32:16 +00:00
|
|
|
def jsonPostAllowsComments(postJsonObject: {}) -> bool:
|
|
|
|
"""Returns true if the given post allows comments/replies
|
|
|
|
"""
|
|
|
|
if 'commentsEnabled' in postJsonObject:
|
|
|
|
return postJsonObject['commentsEnabled']
|
|
|
|
if postJsonObject.get('object'):
|
|
|
|
if not isinstance(postJsonObject['object'], dict):
|
|
|
|
return False
|
|
|
|
if 'commentsEnabled' in postJsonObject['object']:
|
|
|
|
return postJsonObject['object']['commentsEnabled']
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _postAllowsComments(postFilename: str) -> bool:
|
2020-08-21 18:32:16 +00:00
|
|
|
"""Returns true if the given post allows comments/replies
|
|
|
|
"""
|
|
|
|
postJsonObject = loadJson(postFilename)
|
|
|
|
if not postJsonObject:
|
|
|
|
return False
|
|
|
|
return jsonPostAllowsComments(postJsonObject)
|
|
|
|
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
def populateReplies(baseDir: str, httpPrefix: str, domain: str,
|
|
|
|
messageJson: {}, maxReplies: int, debug: bool) -> bool:
|
2020-03-22 21:16:02 +00:00
|
|
|
"""Updates the list of replies for a post on this domain if
|
2019-07-12 12:35:38 +00:00
|
|
|
a reply to it arrives
|
|
|
|
"""
|
|
|
|
if not messageJson.get('id'):
|
|
|
|
return False
|
2019-07-13 20:16:07 +00:00
|
|
|
if not messageJson.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('inReplyTo'):
|
|
|
|
return False
|
|
|
|
if not messageJson['object'].get('to'):
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
replyTo = messageJson['object']['inReplyTo']
|
2020-08-28 14:45:07 +00:00
|
|
|
if not isinstance(replyTo, str):
|
|
|
|
return False
|
2019-07-12 12:35:38 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: post contains a reply')
|
|
|
|
# is this a reply to a post on this domain?
|
2020-04-03 16:27:34 +00:00
|
|
|
if not replyTo.startswith(httpPrefix + '://' + domain + '/'):
|
2019-07-12 12:35:38 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: post is a reply to another not on this domain')
|
2019-08-02 18:04:31 +00:00
|
|
|
print(replyTo)
|
2020-04-03 16:27:34 +00:00
|
|
|
print('Expected: ' + httpPrefix + '://' + domain + '/')
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
replyToNickname = getNicknameFromActor(replyTo)
|
2019-07-12 12:35:38 +00:00
|
|
|
if not replyToNickname:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: no nickname found for ' + replyTo)
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
replyToDomain, replyToPort = getDomainFromActor(replyTo)
|
2019-07-12 12:35:38 +00:00
|
|
|
if not replyToDomain:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: no domain found for ' + replyTo)
|
2019-07-12 12:35:38 +00:00
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
postFilename = locatePost(baseDir, replyToNickname,
|
|
|
|
replyToDomain, replyTo)
|
2019-07-12 12:35:38 +00:00
|
|
|
if not postFilename:
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: post may have expired - ' + replyTo)
|
2020-03-22 21:16:02 +00:00
|
|
|
return False
|
2020-12-22 18:06:23 +00:00
|
|
|
if not _postAllowsComments(postFilename):
|
2020-08-21 18:32:16 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: post does not allow comments - ' + replyTo)
|
|
|
|
return False
|
2019-07-13 19:28:14 +00:00
|
|
|
# populate a text file containing the ids of replies
|
2020-04-03 16:27:34 +00:00
|
|
|
postRepliesFilename = postFilename.replace('.json', '.replies')
|
2020-08-23 11:13:35 +00:00
|
|
|
messageId = removeIdEnding(messageJson['id'])
|
2019-07-13 19:28:14 +00:00
|
|
|
if os.path.isfile(postRepliesFilename):
|
2020-04-03 16:27:34 +00:00
|
|
|
numLines = sum(1 for line in open(postRepliesFilename))
|
|
|
|
if numLines > maxReplies:
|
2019-07-13 21:00:12 +00:00
|
|
|
return False
|
2019-07-13 19:28:14 +00:00
|
|
|
if messageId not in open(postRepliesFilename).read():
|
2020-08-20 11:34:39 +00:00
|
|
|
repliesFile = open(postRepliesFilename, 'a+')
|
2020-04-03 16:27:34 +00:00
|
|
|
repliesFile.write(messageId + '\n')
|
2019-07-13 19:28:14 +00:00
|
|
|
repliesFile.close()
|
|
|
|
else:
|
2020-08-29 11:14:19 +00:00
|
|
|
repliesFile = open(postRepliesFilename, 'w+')
|
2020-04-03 16:27:34 +00:00
|
|
|
repliesFile.write(messageId + '\n')
|
2019-07-13 19:28:14 +00:00
|
|
|
repliesFile.close()
|
|
|
|
return True
|
2019-09-30 09:43:46 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _estimateNumberOfMentions(content: str) -> int:
|
2019-09-30 10:15:20 +00:00
|
|
|
"""Returns a rough estimate of the number of mentions
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
return int(content.count('@') / 2)
|
|
|
|
|
2019-11-16 14:49:21 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _estimateNumberOfEmoji(content: str) -> int:
|
2019-11-16 14:49:21 +00:00
|
|
|
"""Returns a rough estimate of the number of emoji
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
return int(content.count(':') / 2)
|
2019-11-16 14:49:21 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _validPostContent(baseDir: str, nickname: str, domain: str,
|
|
|
|
messageJson: {}, maxMentions: int, maxEmoji: int,
|
|
|
|
allowLocalNetworkAccess: bool) -> bool:
|
2019-09-30 09:43:46 +00:00
|
|
|
"""Is the content of a received post valid?
|
2019-09-30 10:15:20 +00:00
|
|
|
Check for bad html
|
|
|
|
Check for hellthreads
|
2019-09-30 10:35:49 +00:00
|
|
|
Check number of tags is reasonable
|
2019-09-30 09:43:46 +00:00
|
|
|
"""
|
|
|
|
if not messageJson.get('object'):
|
|
|
|
return True
|
|
|
|
if not isinstance(messageJson['object'], dict):
|
|
|
|
return True
|
|
|
|
if not messageJson['object'].get('content'):
|
|
|
|
return True
|
2019-11-29 22:45:56 +00:00
|
|
|
|
|
|
|
if not messageJson['object'].get('published'):
|
|
|
|
return False
|
|
|
|
if 'T' not in messageJson['object']['published']:
|
|
|
|
return False
|
|
|
|
if 'Z' not in messageJson['object']['published']:
|
|
|
|
return False
|
2020-12-21 10:45:31 +00:00
|
|
|
if not validPostDate(messageJson['object']['published']):
|
|
|
|
return False
|
2020-07-10 14:15:01 +00:00
|
|
|
|
2020-08-25 19:45:15 +00:00
|
|
|
if messageJson['object'].get('summary'):
|
|
|
|
summary = messageJson['object']['summary']
|
|
|
|
if not isinstance(summary, str):
|
|
|
|
print('WARN: content warning is not a string')
|
|
|
|
return False
|
|
|
|
if summary != validContentWarning(summary):
|
|
|
|
print('WARN: invalid content warning ' + summary)
|
|
|
|
return False
|
|
|
|
|
2020-05-02 11:08:38 +00:00
|
|
|
if isGitPatch(baseDir, nickname, domain,
|
2020-05-03 10:56:29 +00:00
|
|
|
messageJson['object']['type'],
|
2020-05-02 11:08:38 +00:00
|
|
|
messageJson['object']['summary'],
|
|
|
|
messageJson['object']['content']):
|
|
|
|
return True
|
2020-07-10 14:15:01 +00:00
|
|
|
|
2020-11-20 10:58:49 +00:00
|
|
|
if dangerousMarkup(messageJson['object']['content'],
|
|
|
|
allowLocalNetworkAccess):
|
2020-07-10 14:15:01 +00:00
|
|
|
if messageJson['object'].get('id'):
|
|
|
|
print('REJECT ARBITRARY HTML: ' + messageJson['object']['id'])
|
|
|
|
print('REJECT ARBITRARY HTML: bad string in post - ' +
|
|
|
|
messageJson['object']['content'])
|
|
|
|
return False
|
|
|
|
|
2019-09-30 10:35:49 +00:00
|
|
|
# check (rough) number of mentions
|
2020-12-22 18:06:23 +00:00
|
|
|
mentionsEst = _estimateNumberOfMentions(messageJson['object']['content'])
|
2020-04-03 16:27:34 +00:00
|
|
|
if mentionsEst > maxMentions:
|
2019-09-30 10:37:34 +00:00
|
|
|
if messageJson['object'].get('id'):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('REJECT HELLTHREAD: ' + messageJson['object']['id'])
|
|
|
|
print('REJECT HELLTHREAD: Too many mentions in post - ' +
|
|
|
|
messageJson['object']['content'])
|
2019-11-16 14:49:21 +00:00
|
|
|
return False
|
2020-12-22 18:06:23 +00:00
|
|
|
if _estimateNumberOfEmoji(messageJson['object']['content']) > maxEmoji:
|
2019-11-16 14:49:21 +00:00
|
|
|
if messageJson['object'].get('id'):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('REJECT EMOJI OVERLOAD: ' + messageJson['object']['id'])
|
|
|
|
print('REJECT EMOJI OVERLOAD: Too many emoji in post - ' +
|
|
|
|
messageJson['object']['content'])
|
2019-09-30 10:15:20 +00:00
|
|
|
return False
|
2019-09-30 10:35:49 +00:00
|
|
|
# check number of tags
|
|
|
|
if messageJson['object'].get('tag'):
|
2019-09-30 11:05:35 +00:00
|
|
|
if not isinstance(messageJson['object']['tag'], list):
|
2020-04-03 16:27:34 +00:00
|
|
|
messageJson['object']['tag'] = []
|
2019-09-30 11:05:35 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
if len(messageJson['object']['tag']) > int(maxMentions * 2):
|
2019-09-30 10:37:34 +00:00
|
|
|
if messageJson['object'].get('id'):
|
2020-04-03 16:27:34 +00:00
|
|
|
print('REJECT: ' + messageJson['object']['id'])
|
|
|
|
print('REJECT: Too many tags in post - ' +
|
|
|
|
messageJson['object']['tag'])
|
2019-09-30 10:35:49 +00:00
|
|
|
return False
|
2020-02-05 17:29:38 +00:00
|
|
|
# check for filtered content
|
2020-04-03 16:27:34 +00:00
|
|
|
if isFiltered(baseDir, nickname, domain,
|
|
|
|
messageJson['object']['content']):
|
2020-02-05 17:29:38 +00:00
|
|
|
print('REJECT: content filtered')
|
|
|
|
return False
|
2020-08-21 18:32:16 +00:00
|
|
|
if messageJson['object'].get('inReplyTo'):
|
|
|
|
if isinstance(messageJson['object']['inReplyTo'], str):
|
|
|
|
originalPostId = messageJson['object']['inReplyTo']
|
|
|
|
postPostFilename = locatePost(baseDir, nickname, domain,
|
|
|
|
originalPostId)
|
|
|
|
if postPostFilename:
|
2020-12-22 18:06:23 +00:00
|
|
|
if not _postAllowsComments(postPostFilename):
|
2020-08-21 18:32:16 +00:00
|
|
|
print('REJECT: reply to post which does not ' +
|
|
|
|
'allow comments: ' + originalPostId)
|
|
|
|
return False
|
2019-09-30 09:43:46 +00:00
|
|
|
print('ACCEPT: post content is valid')
|
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _obtainAvatarForReplyPost(session, baseDir: str, httpPrefix: str,
|
|
|
|
domain: str, onionDomain: str, personCache: {},
|
|
|
|
postJsonObject: {}, debug: bool) -> None:
|
2019-09-30 19:23:53 +00:00
|
|
|
"""Tries to obtain the actor for the person being replied to
|
|
|
|
so that their avatar can later be shown
|
|
|
|
"""
|
2019-09-30 19:39:48 +00:00
|
|
|
if not postJsonObject.get('object'):
|
|
|
|
return
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-09-30 19:39:48 +00:00
|
|
|
if not isinstance(postJsonObject['object'], dict):
|
|
|
|
return
|
|
|
|
|
|
|
|
if not postJsonObject['object'].get('inReplyTo'):
|
|
|
|
return
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
lookupActor = postJsonObject['object']['inReplyTo']
|
2019-10-21 12:49:16 +00:00
|
|
|
if not lookupActor:
|
|
|
|
return
|
|
|
|
|
2020-08-28 14:45:07 +00:00
|
|
|
if not isinstance(lookupActor, str):
|
|
|
|
return
|
|
|
|
|
2020-12-23 10:57:44 +00:00
|
|
|
if not hasUsersPath(lookupActor):
|
2019-10-21 12:49:16 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if '/statuses/' in lookupActor:
|
2020-04-03 16:27:34 +00:00
|
|
|
lookupActor = lookupActor.split('/statuses/')[0]
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2019-10-21 12:49:16 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Obtaining actor for reply post ' + lookupActor)
|
2019-10-01 13:23:22 +00:00
|
|
|
|
2019-10-21 12:49:16 +00:00
|
|
|
for tries in range(6):
|
2020-04-03 16:27:34 +00:00
|
|
|
pubKey = \
|
|
|
|
getPersonPubKey(baseDir, session, lookupActor,
|
|
|
|
personCache, debug,
|
|
|
|
__version__, httpPrefix,
|
|
|
|
domain, onionDomain)
|
2019-10-21 12:49:16 +00:00
|
|
|
if pubKey:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: public key obtained for reply: ' + lookupActor)
|
2019-10-21 12:49:16 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Retry ' + str(tries + 1) +
|
|
|
|
' obtaining actor for ' + lookupActor)
|
2020-03-22 21:16:02 +00:00
|
|
|
time.sleep(5)
|
2019-09-30 19:23:53 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _dmNotify(baseDir: str, handle: str, url: str) -> None:
|
2019-10-03 16:22:34 +00:00
|
|
|
"""Creates a notification that a new DM has arrived
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
accountDir = baseDir + '/accounts/' + handle
|
2019-10-03 16:22:34 +00:00
|
|
|
if not os.path.isdir(accountDir):
|
|
|
|
return
|
2020-04-03 16:27:34 +00:00
|
|
|
dmFile = accountDir + '/.newDM'
|
2019-10-03 16:22:34 +00:00
|
|
|
if not os.path.isfile(dmFile):
|
2020-07-12 20:04:58 +00:00
|
|
|
with open(dmFile, 'w+') as fp:
|
2019-10-06 15:07:40 +00:00
|
|
|
fp.write(url)
|
2019-10-03 16:22:34 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _alreadyLiked(baseDir: str, nickname: str, domain: str,
|
|
|
|
postUrl: str, likerActor: str) -> bool:
|
2020-07-13 13:36:45 +00:00
|
|
|
"""Is the given post already liked by the given handle?
|
|
|
|
"""
|
|
|
|
postFilename = \
|
|
|
|
locatePost(baseDir, nickname, domain, postUrl)
|
|
|
|
if not postFilename:
|
|
|
|
return False
|
|
|
|
postJsonObject = loadJson(postFilename, 1)
|
|
|
|
if not postJsonObject:
|
|
|
|
return False
|
|
|
|
if not postJsonObject.get('object'):
|
|
|
|
return False
|
|
|
|
if not isinstance(postJsonObject['object'], dict):
|
|
|
|
return False
|
|
|
|
if not postJsonObject['object'].get('likes'):
|
|
|
|
return False
|
|
|
|
if not postJsonObject['object']['likes'].get('items'):
|
|
|
|
return False
|
|
|
|
for like in postJsonObject['object']['likes']['items']:
|
|
|
|
if not like.get('type'):
|
|
|
|
continue
|
|
|
|
if not like.get('actor'):
|
|
|
|
continue
|
|
|
|
if like['type'] != 'Like':
|
|
|
|
continue
|
|
|
|
if like['actor'] == likerActor:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _likeNotify(baseDir: str, domain: str, onionDomain: str,
|
|
|
|
handle: str, actor: str, url: str) -> None:
|
2020-07-08 19:49:15 +00:00
|
|
|
"""Creates a notification that a like has arrived
|
|
|
|
"""
|
2020-07-08 22:04:17 +00:00
|
|
|
# This is not you liking your own post
|
|
|
|
if actor in url:
|
|
|
|
return
|
|
|
|
|
|
|
|
# check that the liked post was by this handle
|
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
if '/' + domain + '/users/' + nickname not in url:
|
|
|
|
if not onionDomain:
|
|
|
|
return
|
|
|
|
if '/' + onionDomain + '/users/' + nickname not in url:
|
|
|
|
return
|
|
|
|
|
2020-07-08 19:49:15 +00:00
|
|
|
accountDir = baseDir + '/accounts/' + handle
|
2020-08-27 09:35:26 +00:00
|
|
|
|
|
|
|
# are like notifications enabled?
|
2020-08-27 09:19:32 +00:00
|
|
|
notifyLikesEnabledFilename = accountDir + '/.notifyLikes'
|
|
|
|
if not os.path.isfile(notifyLikesEnabledFilename):
|
2020-07-08 19:49:15 +00:00
|
|
|
return
|
2020-08-27 09:19:32 +00:00
|
|
|
|
2020-07-08 19:49:15 +00:00
|
|
|
likeFile = accountDir + '/.newLike'
|
2020-07-08 21:41:48 +00:00
|
|
|
if os.path.isfile(likeFile):
|
2020-07-08 21:52:18 +00:00
|
|
|
if '##sent##' not in open(likeFile).read():
|
|
|
|
return
|
2020-07-08 19:49:15 +00:00
|
|
|
|
|
|
|
likerNickname = getNicknameFromActor(actor)
|
|
|
|
likerDomain, likerPort = getDomainFromActor(actor)
|
2020-07-08 21:04:19 +00:00
|
|
|
if likerNickname and likerDomain:
|
|
|
|
likerHandle = likerNickname + '@' + likerDomain
|
|
|
|
else:
|
2020-12-22 18:06:23 +00:00
|
|
|
print('_likeNotify likerHandle: ' +
|
2020-07-08 21:18:50 +00:00
|
|
|
str(likerNickname) + '@' + str(likerDomain))
|
2020-07-08 21:04:19 +00:00
|
|
|
likerHandle = actor
|
2020-07-08 19:49:15 +00:00
|
|
|
if likerHandle != handle:
|
2020-07-13 19:42:30 +00:00
|
|
|
likeStr = likerHandle + ' ' + url + '?likedBy=' + actor
|
2020-07-08 22:17:21 +00:00
|
|
|
prevLikeFile = accountDir + '/.prevLike'
|
|
|
|
# was there a previous like notification?
|
|
|
|
if os.path.isfile(prevLikeFile):
|
|
|
|
# is it the same as the current notification ?
|
2020-07-13 09:34:04 +00:00
|
|
|
with open(prevLikeFile, 'r') as fp:
|
|
|
|
prevLikeStr = fp.read()
|
2020-07-08 22:17:21 +00:00
|
|
|
if prevLikeStr == likeStr:
|
|
|
|
return
|
2020-07-12 20:04:58 +00:00
|
|
|
try:
|
|
|
|
with open(prevLikeFile, 'w+') as fp:
|
|
|
|
fp.write(likeStr)
|
|
|
|
except BaseException:
|
2020-07-13 08:56:24 +00:00
|
|
|
print('ERROR: unable to save previous like notification ' +
|
|
|
|
prevLikeFile)
|
2020-07-12 20:04:58 +00:00
|
|
|
pass
|
|
|
|
try:
|
|
|
|
with open(likeFile, 'w+') as fp:
|
|
|
|
fp.write(likeStr)
|
|
|
|
except BaseException:
|
2020-07-13 08:56:24 +00:00
|
|
|
print('ERROR: unable to write like notification file ' +
|
|
|
|
likeFile)
|
2020-07-12 20:04:58 +00:00
|
|
|
pass
|
2020-07-08 19:49:15 +00:00
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _replyNotify(baseDir: str, handle: str, url: str) -> None:
|
2019-10-03 16:37:25 +00:00
|
|
|
"""Creates a notification that a new reply has arrived
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
accountDir = baseDir + '/accounts/' + handle
|
2019-10-03 16:37:25 +00:00
|
|
|
if not os.path.isdir(accountDir):
|
|
|
|
return
|
2020-04-03 16:27:34 +00:00
|
|
|
replyFile = accountDir + '/.newReply'
|
2019-10-03 16:37:25 +00:00
|
|
|
if not os.path.isfile(replyFile):
|
2020-07-12 20:04:58 +00:00
|
|
|
with open(replyFile, 'w+') as fp:
|
2019-10-06 15:11:10 +00:00
|
|
|
fp.write(url)
|
2019-10-03 16:37:25 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _gitPatchNotify(baseDir: str, handle: str,
|
|
|
|
subject: str, content: str,
|
|
|
|
fromNickname: str, fromDomain: str) -> None:
|
2020-05-02 11:20:57 +00:00
|
|
|
"""Creates a notification that a new git patch has arrived
|
|
|
|
"""
|
|
|
|
accountDir = baseDir + '/accounts/' + handle
|
|
|
|
if not os.path.isdir(accountDir):
|
|
|
|
return
|
|
|
|
patchFile = accountDir + '/.newPatch'
|
2020-05-03 09:58:51 +00:00
|
|
|
subject = subject.replace('[PATCH]', '').strip()
|
|
|
|
handle = '@' + fromNickname + '@' + fromDomain
|
2020-07-12 20:04:58 +00:00
|
|
|
with open(patchFile, 'w+') as fp:
|
2020-05-03 09:58:51 +00:00
|
|
|
fp.write('git ' + handle + ' ' + subject)
|
2020-05-02 17:16:24 +00:00
|
|
|
|
2020-05-02 11:20:57 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _groupHandle(baseDir: str, handle: str) -> bool:
|
2019-10-04 12:22:56 +00:00
|
|
|
"""Is the given account handle a group?
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
actorFile = baseDir + '/accounts/' + handle + '.json'
|
2019-10-04 12:22:56 +00:00
|
|
|
if not os.path.isfile(actorFile):
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
actorJson = loadJson(actorFile)
|
2019-10-04 12:22:56 +00:00
|
|
|
if not actorJson:
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
return actorJson['type'] == 'Group'
|
|
|
|
|
2019-10-04 12:22:56 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _getGroupName(baseDir: str, handle: str) -> str:
|
2019-10-04 13:39:41 +00:00
|
|
|
"""Returns the preferred name of a group
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
actorFile = baseDir + '/accounts/' + handle + '.json'
|
2019-10-04 13:39:41 +00:00
|
|
|
if not os.path.isfile(actorFile):
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
actorJson = loadJson(actorFile)
|
2019-10-04 13:39:41 +00:00
|
|
|
if not actorJson:
|
|
|
|
return 'Group'
|
|
|
|
return actorJson['name']
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _sendToGroupMembers(session, baseDir: str, handle: str, port: int,
|
|
|
|
postJsonObject: {},
|
|
|
|
httpPrefix: str, federationList: [],
|
|
|
|
sendThreads: [], postLog: [], cachedWebfingers: {},
|
|
|
|
personCache: {}, debug: bool) -> None:
|
2019-10-04 12:22:56 +00:00
|
|
|
"""When a post arrives for a group send it out to the group members
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
followersFile = baseDir + '/accounts/' + handle + '/followers.txt'
|
2019-10-04 12:22:56 +00:00
|
|
|
if not os.path.isfile(followersFile):
|
|
|
|
return
|
2019-10-04 13:31:30 +00:00
|
|
|
if not postJsonObject.get('object'):
|
|
|
|
return
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2020-12-22 18:06:23 +00:00
|
|
|
# groupname = _getGroupName(baseDir, handle)
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = handle.split('@')[1]
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2019-10-04 14:02:11 +00:00
|
|
|
# set sender
|
2020-04-03 16:27:34 +00:00
|
|
|
cc = ''
|
|
|
|
sendingActor = postJsonObject['actor']
|
|
|
|
sendingActorNickname = getNicknameFromActor(sendingActor)
|
|
|
|
sendingActorDomain, sendingActorPort = \
|
|
|
|
getDomainFromActor(sendingActor)
|
2020-12-16 10:48:40 +00:00
|
|
|
sendingActorDomainFull = \
|
|
|
|
getFullDomain(sendingActorDomain, sendingActorPort)
|
2020-04-03 16:27:34 +00:00
|
|
|
senderStr = '@' + sendingActorNickname + '@' + sendingActorDomainFull
|
2019-10-04 14:36:53 +00:00
|
|
|
if not postJsonObject['object']['content'].startswith(senderStr):
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject['object']['content'] = \
|
|
|
|
senderStr + ' ' + postJsonObject['object']['content']
|
2019-10-04 14:36:53 +00:00
|
|
|
# add mention to tag list
|
2019-10-04 14:38:18 +00:00
|
|
|
if not postJsonObject['object']['tag']:
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject['object']['tag'] = []
|
2019-10-04 15:17:48 +00:00
|
|
|
# check if the mention already exists
|
2020-04-03 16:27:34 +00:00
|
|
|
mentionExists = False
|
2019-10-04 15:17:48 +00:00
|
|
|
for mention in postJsonObject['object']['tag']:
|
2020-04-03 16:27:34 +00:00
|
|
|
if mention['type'] == 'Mention':
|
2019-10-04 15:17:48 +00:00
|
|
|
if mention.get('href'):
|
2020-04-03 16:27:34 +00:00
|
|
|
if mention['href'] == sendingActor:
|
|
|
|
mentionExists = True
|
2019-10-04 15:17:48 +00:00
|
|
|
if not mentionExists:
|
|
|
|
# add the mention of the original sender
|
|
|
|
postJsonObject['object']['tag'].append({
|
|
|
|
'href': sendingActor,
|
|
|
|
'name': senderStr,
|
|
|
|
'type': 'Mention'
|
|
|
|
})
|
2019-10-04 14:29:40 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject['actor'] = \
|
|
|
|
httpPrefix + '://' + domainFull + '/users/' + nickname
|
|
|
|
postJsonObject['to'] = \
|
|
|
|
[postJsonObject['actor'] + '/followers']
|
|
|
|
postJsonObject['cc'] = [cc]
|
|
|
|
postJsonObject['object']['to'] = postJsonObject['to']
|
|
|
|
postJsonObject['object']['cc'] = [cc]
|
2019-10-04 14:09:48 +00:00
|
|
|
# set subject
|
|
|
|
if not postJsonObject['object'].get('summary'):
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject['object']['summary'] = 'General Discussion'
|
2019-10-04 12:22:56 +00:00
|
|
|
if ':' in domain:
|
2020-04-03 16:27:34 +00:00
|
|
|
domain = domain.split(':')[0]
|
2019-10-04 12:22:56 +00:00
|
|
|
with open(followersFile, 'r') as groupMembers:
|
|
|
|
for memberHandle in groupMembers:
|
2020-04-03 16:27:34 +00:00
|
|
|
if memberHandle != handle:
|
|
|
|
memberNickname = memberHandle.split('@')[0]
|
|
|
|
memberDomain = memberHandle.split('@')[1]
|
|
|
|
memberPort = port
|
2019-10-04 12:22:56 +00:00
|
|
|
if ':' in memberDomain:
|
2020-04-03 16:27:34 +00:00
|
|
|
memberPortStr = memberDomain.split(':')[1]
|
2019-10-04 12:22:56 +00:00
|
|
|
if memberPortStr.isdigit():
|
2020-04-03 16:27:34 +00:00
|
|
|
memberPort = int(memberPortStr)
|
|
|
|
memberDomain = memberDomain.split(':')[0]
|
|
|
|
sendSignedJson(postJsonObject, session, baseDir,
|
|
|
|
nickname, domain, port,
|
|
|
|
memberNickname, memberDomain, memberPort, cc,
|
|
|
|
httpPrefix, False, False, federationList,
|
|
|
|
sendThreads, postLog, cachedWebfingers,
|
|
|
|
personCache, debug, __version__)
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _inboxUpdateCalendar(baseDir: str, handle: str,
|
|
|
|
postJsonObject: {}) -> None:
|
2019-10-11 12:31:06 +00:00
|
|
|
"""Detects whether the tag list on a post contains calendar events
|
|
|
|
and if so saves the post id to a file in the calendar directory
|
|
|
|
for the account
|
|
|
|
"""
|
2020-07-03 18:49:00 +00:00
|
|
|
if not postJsonObject.get('actor'):
|
|
|
|
return
|
2019-10-11 12:31:06 +00:00
|
|
|
if not postJsonObject.get('object'):
|
|
|
|
return
|
|
|
|
if not isinstance(postJsonObject['object'], dict):
|
|
|
|
return
|
|
|
|
if not postJsonObject['object'].get('tag'):
|
|
|
|
return
|
|
|
|
if not isinstance(postJsonObject['object']['tag'], list):
|
|
|
|
return
|
|
|
|
|
2020-07-03 18:49:00 +00:00
|
|
|
actor = postJsonObject['actor']
|
|
|
|
actorNickname = getNicknameFromActor(actor)
|
|
|
|
actorDomain, actorPort = getDomainFromActor(actor)
|
2020-07-11 21:01:08 +00:00
|
|
|
handleNickname = handle.split('@')[0]
|
|
|
|
handleDomain = handle.split('@')[1]
|
2020-07-03 18:49:00 +00:00
|
|
|
if not receivingCalendarEvents(baseDir,
|
2020-07-11 21:01:08 +00:00
|
|
|
handleNickname, handleDomain,
|
|
|
|
actorNickname, actorDomain):
|
2020-07-03 18:49:00 +00:00
|
|
|
return
|
2020-08-13 09:37:11 +00:00
|
|
|
|
2020-08-23 11:13:35 +00:00
|
|
|
postId = removeIdEnding(postJsonObject['id']).replace('/', '#')
|
2020-08-13 11:58:05 +00:00
|
|
|
|
2020-08-13 09:37:11 +00:00
|
|
|
# look for events within the tags list
|
2019-10-11 12:31:06 +00:00
|
|
|
for tagDict in postJsonObject['object']['tag']:
|
2020-07-11 22:36:52 +00:00
|
|
|
if not tagDict.get('type'):
|
|
|
|
continue
|
2020-04-03 16:27:34 +00:00
|
|
|
if tagDict['type'] != 'Event':
|
2019-10-11 12:31:06 +00:00
|
|
|
continue
|
2019-10-11 16:16:56 +00:00
|
|
|
if not tagDict.get('startTime'):
|
2019-10-11 12:31:06 +00:00
|
|
|
continue
|
2020-08-20 16:51:48 +00:00
|
|
|
saveEventPost(baseDir, handle, postId, tagDict)
|
2019-10-11 16:20:16 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def inboxUpdateIndex(boxname: str, baseDir: str, handle: str,
|
|
|
|
destinationFilename: str, debug: bool) -> bool:
|
2019-10-20 10:25:38 +00:00
|
|
|
"""Updates the index of received posts
|
|
|
|
The new entry is added to the top of the file
|
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
indexFilename = baseDir + '/accounts/' + handle + '/' + boxname + '.index'
|
2019-10-20 10:40:09 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Updating index ' + indexFilename)
|
2019-11-18 13:16:21 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
if '/' + boxname + '/' in destinationFilename:
|
|
|
|
destinationFilename = destinationFilename.split('/' + boxname + '/')[1]
|
2019-11-18 13:16:21 +00:00
|
|
|
|
|
|
|
# remove the path
|
|
|
|
if '/' in destinationFilename:
|
2020-04-03 16:27:34 +00:00
|
|
|
destinationFilename = destinationFilename.split('/')[-1]
|
2019-11-18 13:16:21 +00:00
|
|
|
|
2019-10-20 10:45:12 +00:00
|
|
|
if os.path.isfile(indexFilename):
|
2019-10-20 12:43:59 +00:00
|
|
|
try:
|
|
|
|
with open(indexFilename, 'r+') as indexFile:
|
2020-04-03 16:27:34 +00:00
|
|
|
content = indexFile.read()
|
2020-12-29 17:56:42 +00:00
|
|
|
if destinationFilename + '\n' not in content:
|
|
|
|
indexFile.seek(0, 0)
|
|
|
|
indexFile.write(destinationFilename + '\n' + content)
|
2019-10-20 12:43:59 +00:00
|
|
|
return True
|
|
|
|
except Exception as e:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: Failed to write entry to index ' + str(e))
|
2019-10-20 10:45:12 +00:00
|
|
|
else:
|
2019-10-20 12:43:59 +00:00
|
|
|
try:
|
2020-04-03 16:27:34 +00:00
|
|
|
indexFile = open(indexFilename, 'w+')
|
2019-10-20 12:43:59 +00:00
|
|
|
if indexFile:
|
2020-04-03 16:27:34 +00:00
|
|
|
indexFile.write(destinationFilename + '\n')
|
2019-10-20 12:43:59 +00:00
|
|
|
indexFile.close()
|
|
|
|
except Exception as e:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('WARN: Failed to write initial entry to index ' + str(e))
|
2019-10-20 10:45:12 +00:00
|
|
|
|
2019-10-20 10:35:13 +00:00
|
|
|
return False
|
2019-10-20 10:25:38 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _updateLastSeen(baseDir: str, handle: str, actor: str) -> None:
|
2020-12-13 11:27:12 +00:00
|
|
|
"""Updates the time when the given handle last saw the given actor
|
2020-12-13 11:28:23 +00:00
|
|
|
This can later be used to indicate if accounts are dormant/abandoned/moved
|
2020-12-13 11:27:12 +00:00
|
|
|
"""
|
|
|
|
if '@' not in handle:
|
|
|
|
return
|
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
domain = handle.split('@')[1]
|
|
|
|
if ':' in domain:
|
|
|
|
domain = domain.split(':')[0]
|
|
|
|
accountPath = baseDir + '/accounts/' + nickname + '@' + domain
|
|
|
|
if not os.path.isdir(accountPath):
|
|
|
|
return
|
|
|
|
if not isFollowingActor(baseDir, nickname, domain, actor):
|
|
|
|
return
|
|
|
|
lastSeenPath = accountPath + '/lastseen'
|
|
|
|
if not os.path.isdir(lastSeenPath):
|
|
|
|
os.mkdir(lastSeenPath)
|
|
|
|
lastSeenFilename = lastSeenPath + '/' + actor.replace('/', '#') + '.txt'
|
|
|
|
currTime = datetime.datetime.utcnow()
|
|
|
|
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
2020-12-13 14:31:22 +00:00
|
|
|
# has the value changed?
|
|
|
|
if os.path.isfile(lastSeenFilename):
|
|
|
|
with open(lastSeenFilename, 'r') as lastSeenFile:
|
|
|
|
daysSinceEpochFile = lastSeenFile.read()
|
|
|
|
if int(daysSinceEpochFile) == daysSinceEpoch:
|
|
|
|
# value hasn't changed, so we can save writing anything to file
|
|
|
|
return
|
2020-12-13 11:27:12 +00:00
|
|
|
with open(lastSeenFilename, 'w+') as lastSeenFile:
|
|
|
|
lastSeenFile.write(str(daysSinceEpoch))
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _inboxAfterInitial(recentPostsCache: {}, maxRecentPosts: int,
|
|
|
|
session, keyId: str, handle: str, messageJson: {},
|
|
|
|
baseDir: str, httpPrefix: str, sendThreads: [],
|
|
|
|
postLog: [], cachedWebfingers: {}, personCache: {},
|
|
|
|
queue: [], domain: str,
|
|
|
|
onionDomain: str, i2pDomain: str,
|
|
|
|
port: int, proxyType: str,
|
|
|
|
federationList: [], debug: bool,
|
|
|
|
queueFilename: str, destinationFilename: str,
|
|
|
|
maxReplies: int, allowDeletion: bool,
|
|
|
|
maxMentions: int, maxEmoji: int, translate: {},
|
|
|
|
unitTest: bool, YTReplacementDomain: str,
|
|
|
|
showPublishedDateOnly: bool,
|
2020-12-23 23:59:49 +00:00
|
|
|
allowLocalNetworkAccess: bool,
|
|
|
|
peertubeInstances: []) -> bool:
|
2020-09-27 18:35:35 +00:00
|
|
|
""" Anything which needs to be done after initial checks have passed
|
2019-07-10 09:59:22 +00:00
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = keyId
|
2019-09-29 10:41:21 +00:00
|
|
|
if '#' in actor:
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = keyId.split('#')[0]
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
_updateLastSeen(baseDir, handle, actor)
|
2020-12-13 11:27:12 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
isGroup = _groupHandle(baseDir, handle)
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveLike(recentPostsCache,
|
|
|
|
session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
onionDomain,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug):
|
2019-07-10 12:40:31 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Like accepted from ' + actor)
|
2019-07-10 12:40:31 +00:00
|
|
|
return False
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveUndoLike(recentPostsCache,
|
|
|
|
session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug):
|
2019-07-12 09:10:09 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Undo like accepted from ' + actor)
|
2019-07-12 09:10:09 +00:00
|
|
|
return False
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveBookmark(recentPostsCache,
|
|
|
|
session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Bookmark accepted from ' + actor)
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveUndoBookmark(recentPostsCache,
|
|
|
|
session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug):
|
2019-11-17 14:01:49 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Undo bookmark accepted from ' + actor)
|
2019-11-17 14:01:49 +00:00
|
|
|
return False
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveAnnounce(recentPostsCache,
|
|
|
|
session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, onionDomain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug, translate,
|
2021-01-30 11:47:09 +00:00
|
|
|
YTReplacementDomain,
|
|
|
|
allowLocalNetworkAccess):
|
2019-07-11 19:31:02 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Announce accepted from ' + actor)
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveUndoAnnounce(recentPostsCache,
|
|
|
|
session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug):
|
2019-07-12 09:41:57 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Undo announce accepted from ' + actor)
|
2019-07-12 11:35:03 +00:00
|
|
|
return False
|
2019-07-12 09:41:57 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveDelete(session, handle, isGroup,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
messageJson,
|
|
|
|
federationList,
|
|
|
|
debug, allowDeletion,
|
|
|
|
recentPostsCache):
|
2019-08-12 18:02:29 +00:00
|
|
|
if debug:
|
2020-04-03 16:27:34 +00:00
|
|
|
print('DEBUG: Delete accepted from ' + actor)
|
2019-08-12 18:02:29 +00:00
|
|
|
return False
|
|
|
|
|
2019-07-10 13:32:47 +00:00
|
|
|
if debug:
|
2020-09-27 18:35:35 +00:00
|
|
|
print('DEBUG: initial checks passed')
|
2020-04-03 16:27:34 +00:00
|
|
|
print('copy queue file from ' + queueFilename +
|
|
|
|
' to ' + destinationFilename)
|
2019-08-16 22:04:45 +00:00
|
|
|
|
2019-09-11 17:42:55 +00:00
|
|
|
if os.path.isfile(destinationFilename):
|
|
|
|
return True
|
2019-10-04 09:58:02 +00:00
|
|
|
|
2019-07-18 09:26:47 +00:00
|
|
|
if messageJson.get('postNickname'):
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject = messageJson['post']
|
2019-07-18 09:26:47 +00:00
|
|
|
else:
|
2020-04-03 16:27:34 +00:00
|
|
|
postJsonObject = messageJson
|
2019-10-04 12:22:56 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
nickname = handle.split('@')[0]
|
2020-12-22 18:06:23 +00:00
|
|
|
if _validPostContent(baseDir, nickname, domain,
|
|
|
|
postJsonObject, maxMentions, maxEmoji,
|
|
|
|
allowLocalNetworkAccess):
|
2020-05-02 13:17:02 +00:00
|
|
|
|
2020-08-23 14:45:58 +00:00
|
|
|
if postJsonObject.get('object'):
|
|
|
|
jsonObj = postJsonObject['object']
|
|
|
|
if not isinstance(jsonObj, dict):
|
|
|
|
jsonObj = None
|
|
|
|
else:
|
|
|
|
jsonObj = postJsonObject
|
2020-05-02 13:17:02 +00:00
|
|
|
# check for incoming git patches
|
2020-08-23 14:45:58 +00:00
|
|
|
if jsonObj:
|
|
|
|
if jsonObj.get('content') and \
|
|
|
|
jsonObj.get('summary') and \
|
|
|
|
jsonObj.get('attributedTo'):
|
|
|
|
attributedTo = jsonObj['attributedTo']
|
2020-08-06 16:21:46 +00:00
|
|
|
if isinstance(attributedTo, str):
|
|
|
|
fromNickname = getNicknameFromActor(attributedTo)
|
|
|
|
fromDomain, fromPort = getDomainFromActor(attributedTo)
|
2020-12-16 10:48:40 +00:00
|
|
|
fromDomain = getFullDomain(fromDomain, fromPort)
|
2020-08-06 16:21:46 +00:00
|
|
|
if receiveGitPatch(baseDir, nickname, domain,
|
2020-08-23 14:45:58 +00:00
|
|
|
jsonObj['type'],
|
|
|
|
jsonObj['summary'],
|
|
|
|
jsonObj['content'],
|
2020-08-06 16:21:46 +00:00
|
|
|
fromNickname, fromDomain):
|
2020-12-22 18:06:23 +00:00
|
|
|
_gitPatchNotify(baseDir, handle,
|
|
|
|
jsonObj['summary'],
|
|
|
|
jsonObj['content'],
|
|
|
|
fromNickname, fromDomain)
|
2020-08-23 14:45:58 +00:00
|
|
|
elif '[PATCH]' in jsonObj['content']:
|
2020-08-06 16:21:46 +00:00
|
|
|
print('WARN: git patch not accepted - ' +
|
2020-08-23 14:45:58 +00:00
|
|
|
jsonObj['summary'])
|
2020-08-06 16:21:46 +00:00
|
|
|
return False
|
2020-05-02 11:08:38 +00:00
|
|
|
|
2020-01-15 10:56:39 +00:00
|
|
|
# replace YouTube links, so they get less tracking data
|
2020-08-02 09:51:20 +00:00
|
|
|
replaceYouTube(postJsonObject, YTReplacementDomain)
|
2020-01-15 10:56:39 +00:00
|
|
|
|
2019-10-22 20:07:12 +00:00
|
|
|
# list of indexes to be updated
|
2020-04-03 16:27:34 +00:00
|
|
|
updateIndexList = ['inbox']
|
|
|
|
populateReplies(baseDir, httpPrefix, domain, postJsonObject,
|
|
|
|
maxReplies, debug)
|
2019-11-29 19:22:11 +00:00
|
|
|
|
2019-11-29 22:02:16 +00:00
|
|
|
# if this is a reply to a question then update the votes
|
2020-04-03 16:27:34 +00:00
|
|
|
questionJson = questionUpdateVotes(baseDir, nickname, domain,
|
|
|
|
postJsonObject)
|
2019-11-29 22:02:16 +00:00
|
|
|
if questionJson:
|
|
|
|
# Is this a question created by this instance?
|
2020-04-03 16:27:34 +00:00
|
|
|
idPrefix = httpPrefix + '://' + domain
|
|
|
|
if questionJson['object']['id'].startswith(idPrefix):
|
|
|
|
# if the votes on a question have changed then
|
|
|
|
# send out an update
|
|
|
|
questionJson['type'] = 'Update'
|
|
|
|
sendToFollowersThread(session, baseDir,
|
2020-06-03 20:21:44 +00:00
|
|
|
nickname, domain,
|
|
|
|
onionDomain, i2pDomain, port,
|
2020-04-03 16:27:34 +00:00
|
|
|
httpPrefix, federationList,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers, personCache,
|
|
|
|
postJsonObject, debug,
|
2019-11-29 22:02:16 +00:00
|
|
|
__version__)
|
2019-11-29 19:22:11 +00:00
|
|
|
|
2020-08-27 17:40:09 +00:00
|
|
|
isReplyToMutedPost = False
|
|
|
|
|
2019-10-04 12:22:56 +00:00
|
|
|
if not isGroup:
|
|
|
|
# create a DM notification file if needed
|
2020-05-17 18:21:38 +00:00
|
|
|
postIsDM = isDM(postJsonObject)
|
|
|
|
if postIsDM:
|
2020-04-03 16:27:34 +00:00
|
|
|
if nickname != 'inbox':
|
|
|
|
followDMsFilename = \
|
|
|
|
baseDir + '/accounts/' + \
|
|
|
|
nickname + '@' + domain + '/.followDMs'
|
2019-10-29 10:49:23 +00:00
|
|
|
if os.path.isfile(followDMsFilename):
|
2020-04-03 16:27:34 +00:00
|
|
|
followingFilename = \
|
|
|
|
baseDir + '/accounts/' + \
|
|
|
|
nickname + '@' + domain + '/following.txt'
|
2019-10-29 10:49:23 +00:00
|
|
|
if not postJsonObject.get('actor'):
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
sendingActor = postJsonObject['actor']
|
|
|
|
sendingActorNickname = \
|
|
|
|
getNicknameFromActor(sendingActor)
|
|
|
|
sendingActorDomain, sendingActorPort = \
|
|
|
|
getDomainFromActor(sendingActor)
|
2019-10-29 10:49:23 +00:00
|
|
|
if sendingActorNickname and sendingActorDomain:
|
2020-10-11 13:18:27 +00:00
|
|
|
if not os.path.isfile(followingFilename):
|
|
|
|
print('No following.txt file exists for ' +
|
|
|
|
nickname + '@' + domain +
|
2020-10-11 13:18:54 +00:00
|
|
|
' so not accepting DM from ' +
|
2020-10-11 13:18:27 +00:00
|
|
|
sendingActorNickname + '@' +
|
|
|
|
sendingActorDomain)
|
|
|
|
return False
|
2020-04-03 16:27:34 +00:00
|
|
|
sendH = \
|
|
|
|
sendingActorNickname + '@' + sendingActorDomain
|
|
|
|
if sendH != nickname + '@' + domain:
|
2020-10-11 13:14:28 +00:00
|
|
|
if sendH not in \
|
|
|
|
open(followingFilename).read():
|
2020-04-03 16:27:34 +00:00
|
|
|
print(nickname + '@' + domain +
|
2020-10-11 13:14:28 +00:00
|
|
|
' cannot receive DM from ' +
|
|
|
|
sendH +
|
|
|
|
' because they do not ' +
|
|
|
|
'follow them')
|
2019-10-29 10:49:23 +00:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return False
|
2019-10-22 20:07:12 +00:00
|
|
|
# dm index will be updated
|
|
|
|
updateIndexList.append('dm')
|
2020-12-22 18:06:23 +00:00
|
|
|
_dmNotify(baseDir, handle,
|
|
|
|
httpPrefix + '://' + domain + '/users/' +
|
|
|
|
nickname + '/dm')
|
2019-10-04 12:22:56 +00:00
|
|
|
|
|
|
|
# get the actor being replied to
|
2020-12-16 10:48:40 +00:00
|
|
|
domainFull = getFullDomain(domain, port)
|
2020-04-03 16:27:34 +00:00
|
|
|
actor = httpPrefix + '://' + domainFull + \
|
|
|
|
'/users/' + handle.split('@')[0]
|
2019-10-04 12:22:56 +00:00
|
|
|
|
|
|
|
# create a reply notification file if needed
|
2020-05-17 18:21:38 +00:00
|
|
|
if not postIsDM and isReply(postJsonObject, actor):
|
2020-04-03 16:27:34 +00:00
|
|
|
if nickname != 'inbox':
|
2019-10-22 20:07:12 +00:00
|
|
|
# replies index will be updated
|
|
|
|
updateIndexList.append('tlreplies')
|
2020-09-02 22:42:29 +00:00
|
|
|
if postJsonObject['object'].get('inReplyTo'):
|
|
|
|
inReplyTo = postJsonObject['object']['inReplyTo']
|
|
|
|
if inReplyTo:
|
|
|
|
if isinstance(inReplyTo, str):
|
|
|
|
if not isMuted(baseDir, nickname, domain,
|
2020-09-03 09:01:27 +00:00
|
|
|
inReplyTo):
|
2020-12-22 18:06:23 +00:00
|
|
|
_replyNotify(baseDir, handle,
|
|
|
|
httpPrefix + '://' + domain +
|
|
|
|
'/users/' + nickname +
|
|
|
|
'/tlreplies')
|
2020-09-02 22:42:29 +00:00
|
|
|
else:
|
|
|
|
isReplyToMutedPost = True
|
2019-10-04 10:00:57 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
if isImageMedia(session, baseDir, httpPrefix,
|
2020-06-12 11:50:49 +00:00
|
|
|
nickname, domain, postJsonObject,
|
2021-01-30 11:47:09 +00:00
|
|
|
translate, YTReplacementDomain,
|
|
|
|
allowLocalNetworkAccess):
|
2019-10-22 20:30:43 +00:00
|
|
|
# media index will be updated
|
|
|
|
updateIndexList.append('tlmedia')
|
2020-02-24 14:39:25 +00:00
|
|
|
if isBlogPost(postJsonObject):
|
|
|
|
# blogs index will be updated
|
|
|
|
updateIndexList.append('tlblogs')
|
2020-08-23 11:13:35 +00:00
|
|
|
elif isEventPost(postJsonObject):
|
|
|
|
# events index will be updated
|
|
|
|
updateIndexList.append('tlevents')
|
2019-10-22 20:30:43 +00:00
|
|
|
|
2019-10-04 10:00:57 +00:00
|
|
|
# get the avatar for a reply/announce
|
2020-12-22 18:06:23 +00:00
|
|
|
_obtainAvatarForReplyPost(session, baseDir,
|
|
|
|
httpPrefix, domain, onionDomain,
|
|
|
|
personCache, postJsonObject, debug)
|
2019-10-04 10:00:57 +00:00
|
|
|
|
|
|
|
# save the post to file
|
2020-04-03 16:27:34 +00:00
|
|
|
if saveJson(postJsonObject, destinationFilename):
|
2020-08-27 17:40:09 +00:00
|
|
|
# If this is a reply to a muted post then also mute it.
|
|
|
|
# This enables you to ignore a threat that's getting boring
|
|
|
|
if isReplyToMutedPost:
|
|
|
|
print('MUTE REPLY: ' + destinationFilename)
|
2020-08-29 11:14:19 +00:00
|
|
|
muteFile = open(destinationFilename + '.muted', 'w+')
|
2020-08-27 17:40:09 +00:00
|
|
|
if muteFile:
|
|
|
|
muteFile.write('\n')
|
|
|
|
muteFile.close()
|
|
|
|
|
2019-10-22 20:07:12 +00:00
|
|
|
# update the indexes for different timelines
|
|
|
|
for boxname in updateIndexList:
|
2020-04-03 16:27:34 +00:00
|
|
|
if not inboxUpdateIndex(boxname, baseDir, handle,
|
|
|
|
destinationFilename, debug):
|
|
|
|
print('ERROR: unable to update ' + boxname + ' index')
|
2020-08-26 11:10:21 +00:00
|
|
|
else:
|
|
|
|
if not unitTest:
|
2020-08-26 11:19:32 +00:00
|
|
|
if debug:
|
|
|
|
print('Saving inbox post as html to cache')
|
|
|
|
|
2020-08-26 11:10:21 +00:00
|
|
|
htmlCacheStartTime = time.time()
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName = handle.split('@')[0]
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxStorePostToHtmlCache(recentPostsCache,
|
|
|
|
maxRecentPosts,
|
|
|
|
translate, baseDir,
|
|
|
|
httpPrefix,
|
|
|
|
session, cachedWebfingers,
|
|
|
|
personCache,
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName,
|
2020-12-22 18:06:23 +00:00
|
|
|
domain, port,
|
|
|
|
postJsonObject,
|
|
|
|
allowDeletion,
|
|
|
|
boxname,
|
2020-12-23 23:59:49 +00:00
|
|
|
showPublishedDateOnly,
|
2021-01-30 11:47:09 +00:00
|
|
|
peertubeInstances,
|
|
|
|
allowLocalNetworkAccess)
|
2020-08-26 11:19:32 +00:00
|
|
|
if debug:
|
|
|
|
timeDiff = \
|
|
|
|
str(int((time.time() - htmlCacheStartTime) *
|
|
|
|
1000))
|
|
|
|
print('Saved ' + boxname +
|
|
|
|
' post as html to cache in ' +
|
|
|
|
timeDiff + ' mS')
|
2019-10-20 10:25:38 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxUpdateCalendar(baseDir, handle, postJsonObject)
|
2019-10-19 18:08:47 +00:00
|
|
|
|
2020-12-22 21:24:46 +00:00
|
|
|
handleName = handle.split('@')[0]
|
|
|
|
storeHashTags(baseDir, handleName, postJsonObject)
|
2019-12-12 17:34:31 +00:00
|
|
|
|
2019-10-19 13:00:46 +00:00
|
|
|
# send the post out to group members
|
|
|
|
if isGroup:
|
2020-12-22 18:06:23 +00:00
|
|
|
_sendToGroupMembers(session, baseDir, handle, port,
|
|
|
|
postJsonObject,
|
|
|
|
httpPrefix, federationList, sendThreads,
|
|
|
|
postLog, cachedWebfingers, personCache,
|
|
|
|
debug)
|
2019-10-04 12:22:56 +00:00
|
|
|
|
2019-10-04 10:00:57 +00:00
|
|
|
# if the post wasn't saved
|
2019-08-17 12:26:09 +00:00
|
|
|
if not os.path.isfile(destinationFilename):
|
|
|
|
return False
|
|
|
|
|
2019-07-10 09:59:22 +00:00
|
|
|
return True
|
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
2020-05-22 11:48:13 +00:00
|
|
|
def clearQueueItems(baseDir: str, queue: []) -> None:
|
2020-05-22 12:57:15 +00:00
|
|
|
"""Clears the queue for each account
|
2020-05-22 11:48:13 +00:00
|
|
|
"""
|
|
|
|
ctr = 0
|
|
|
|
queue.clear()
|
|
|
|
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
|
|
|
for account in dirs:
|
|
|
|
queueDir = baseDir + '/accounts/' + account + '/queue'
|
2020-06-02 09:05:55 +00:00
|
|
|
if not os.path.isdir(queueDir):
|
|
|
|
continue
|
|
|
|
for queuesubdir, queuedirs, queuefiles in os.walk(queueDir):
|
|
|
|
for qfile in queuefiles:
|
|
|
|
try:
|
|
|
|
os.remove(os.path.join(queueDir, qfile))
|
|
|
|
ctr += 1
|
|
|
|
except BaseException:
|
|
|
|
pass
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-05-22 11:48:13 +00:00
|
|
|
if ctr > 0:
|
|
|
|
print('Removed ' + str(ctr) + ' inbox queue items')
|
|
|
|
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
def _restoreQueueItems(baseDir: str, queue: []) -> None:
|
2019-07-12 21:09:23 +00:00
|
|
|
"""Checks the queue for each account and appends filenames
|
|
|
|
"""
|
2019-08-15 16:45:07 +00:00
|
|
|
queue.clear()
|
2020-04-03 16:27:34 +00:00
|
|
|
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
2019-07-12 21:09:23 +00:00
|
|
|
for account in dirs:
|
2020-04-03 16:27:34 +00:00
|
|
|
queueDir = baseDir + '/accounts/' + account + '/queue'
|
2020-06-02 09:05:55 +00:00
|
|
|
if not os.path.isdir(queueDir):
|
|
|
|
continue
|
|
|
|
for queuesubdir, queuedirs, queuefiles in os.walk(queueDir):
|
|
|
|
for qfile in queuefiles:
|
|
|
|
queue.append(os.path.join(queueDir, qfile))
|
2020-12-13 22:13:45 +00:00
|
|
|
break
|
2020-04-03 16:27:34 +00:00
|
|
|
if len(queue) > 0:
|
|
|
|
print('Restored ' + str(len(queue)) + ' inbox queue items')
|
2019-09-02 21:52:43 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def runInboxQueueWatchdog(projectVersion: str, httpd) -> None:
|
2019-09-02 21:52:43 +00:00
|
|
|
"""This tries to keep the inbox thread running even if it dies
|
|
|
|
"""
|
|
|
|
print('Starting inbox queue watchdog')
|
2020-04-03 16:27:34 +00:00
|
|
|
inboxQueueOriginal = httpd.thrInboxQueue.clone(runInboxQueue)
|
2019-09-02 21:52:43 +00:00
|
|
|
httpd.thrInboxQueue.start()
|
|
|
|
while True:
|
2020-03-22 21:16:02 +00:00
|
|
|
time.sleep(20)
|
2020-12-18 15:29:12 +00:00
|
|
|
if not httpd.thrInboxQueue.is_alive() or httpd.restartInboxQueue:
|
2020-05-02 10:19:24 +00:00
|
|
|
httpd.restartInboxQueueInProgress = True
|
2019-09-02 21:52:43 +00:00
|
|
|
httpd.thrInboxQueue.kill()
|
2020-04-03 16:27:34 +00:00
|
|
|
httpd.thrInboxQueue = inboxQueueOriginal.clone(runInboxQueue)
|
2020-04-27 09:41:38 +00:00
|
|
|
httpd.inboxQueue.clear()
|
2019-09-02 21:52:43 +00:00
|
|
|
httpd.thrInboxQueue.start()
|
|
|
|
print('Restarting inbox queue...')
|
2020-05-02 10:19:24 +00:00
|
|
|
httpd.restartInboxQueueInProgress = False
|
|
|
|
httpd.restartInboxQueue = False
|
2019-09-02 21:52:43 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
|
|
|
|
def runInboxQueue(recentPostsCache: {}, maxRecentPosts: int,
|
|
|
|
projectVersion: str,
|
|
|
|
baseDir: str, httpPrefix: str, sendThreads: [], postLog: [],
|
|
|
|
cachedWebfingers: {}, personCache: {}, queue: [],
|
2020-06-03 20:21:44 +00:00
|
|
|
domain: str,
|
2020-06-09 11:03:59 +00:00
|
|
|
onionDomain: str, i2pDomain: str, port: int, proxyType: str,
|
2020-09-27 19:27:24 +00:00
|
|
|
federationList: [], maxReplies: int,
|
2020-04-03 16:27:34 +00:00
|
|
|
domainMaxPostsPerDay: int, accountMaxPostsPerDay: int,
|
|
|
|
allowDeletion: bool, debug: bool, maxMentions: int,
|
|
|
|
maxEmoji: int, translate: {}, unitTest: bool,
|
2020-10-11 18:50:13 +00:00
|
|
|
YTReplacementDomain: str,
|
2020-10-23 19:18:13 +00:00
|
|
|
showPublishedDateOnly: bool,
|
2020-12-23 23:59:49 +00:00
|
|
|
maxFollowers: int, allowLocalNetworkAccess: bool,
|
2021-01-05 10:29:37 +00:00
|
|
|
peertubeInstances: [],
|
|
|
|
verifyAllSignatures: bool) -> None:
|
2020-08-02 09:51:20 +00:00
|
|
|
"""Processes received items and moves them to the appropriate
|
|
|
|
directories
|
2019-07-04 12:23:53 +00:00
|
|
|
"""
|
2020-04-03 16:27:34 +00:00
|
|
|
currSessionTime = int(time.time())
|
|
|
|
sessionLastUpdate = currSessionTime
|
2020-06-24 09:04:58 +00:00
|
|
|
print('Starting new session when starting inbox queue')
|
2020-06-09 11:03:59 +00:00
|
|
|
session = createSession(proxyType)
|
2020-04-03 16:27:34 +00:00
|
|
|
inboxHandle = 'inbox@' + domain
|
2019-07-04 12:23:53 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: Inbox queue running')
|
|
|
|
|
2019-07-12 21:09:23 +00:00
|
|
|
# if queue processing was interrupted (eg server crash)
|
|
|
|
# then this loads any outstanding items back into the queue
|
2020-12-22 18:06:23 +00:00
|
|
|
_restoreQueueItems(baseDir, queue)
|
2019-07-15 10:22:19 +00:00
|
|
|
|
2020-03-25 10:21:25 +00:00
|
|
|
# keep track of numbers of incoming posts per day
|
2020-04-03 16:27:34 +00:00
|
|
|
quotasLastUpdateDaily = int(time.time())
|
|
|
|
quotasDaily = {
|
2019-07-15 10:22:19 +00:00
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2020-04-03 16:27:34 +00:00
|
|
|
quotasLastUpdatePerMin = int(time.time())
|
|
|
|
quotasPerMin = {
|
2020-03-25 10:36:37 +00:00
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2019-08-05 22:38:38 +00:00
|
|
|
|
2020-04-03 16:27:34 +00:00
|
|
|
heartBeatCtr = 0
|
|
|
|
queueRestoreCtr = 0
|
2019-09-03 08:46:26 +00:00
|
|
|
|
2019-07-04 12:23:53 +00:00
|
|
|
while True:
|
2020-04-16 18:25:59 +00:00
|
|
|
time.sleep(1)
|
2019-09-03 08:46:26 +00:00
|
|
|
|
|
|
|
# heartbeat to monitor whether the inbox queue is running
|
2020-04-03 16:27:34 +00:00
|
|
|
heartBeatCtr += 5
|
|
|
|
if heartBeatCtr >= 10:
|
2021-02-15 22:26:25 +00:00
|
|
|
# turn off broch mode after it has timed out
|
|
|
|
brochModeLapses(baseDir)
|
2020-04-16 10:14:05 +00:00
|
|
|
print('>>> Heartbeat Q:' + str(len(queue)) + ' ' +
|
2020-04-03 16:27:34 +00:00
|
|
|
'{:%F %T}'.format(datetime.datetime.now()))
|
|
|
|
heartBeatCtr = 0
|
|
|
|
|
|
|
|
if len(queue) == 0:
|
2019-09-03 09:11:33 +00:00
|
|
|
# restore any remaining queue items
|
2020-04-03 16:27:34 +00:00
|
|
|
queueRestoreCtr += 1
|
|
|
|
if queueRestoreCtr >= 30:
|
|
|
|
queueRestoreCtr = 0
|
2020-12-22 18:06:23 +00:00
|
|
|
_restoreQueueItems(baseDir, queue)
|
2020-04-16 09:49:57 +00:00
|
|
|
continue
|
2020-04-16 10:14:05 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
currTime = int(time.time())
|
|
|
|
|
|
|
|
# recreate the session periodically
|
2020-06-24 09:40:17 +00:00
|
|
|
if not session or currTime - sessionLastUpdate > 21600:
|
|
|
|
print('Regenerating inbox queue session at 6hr interval')
|
2020-06-09 11:03:59 +00:00
|
|
|
session = createSession(proxyType)
|
2020-06-08 20:18:02 +00:00
|
|
|
if not session:
|
|
|
|
continue
|
2020-04-16 09:49:57 +00:00
|
|
|
sessionLastUpdate = currTime
|
|
|
|
|
|
|
|
# oldest item first
|
|
|
|
queue.sort()
|
|
|
|
queueFilename = queue[0]
|
|
|
|
if not os.path.isfile(queueFilename):
|
|
|
|
print("Queue: queue item rejected because it has no file: " +
|
|
|
|
queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-04 12:23:53 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Loading queue item ' + queueFilename)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# Load the queue json
|
|
|
|
queueJson = loadJson(queueFilename, 1)
|
|
|
|
if not queueJson:
|
|
|
|
print('Queue: runInboxQueue failed to load inbox queue item ' +
|
|
|
|
queueFilename)
|
|
|
|
# Assume that the file is probably corrupt/unreadable
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
# delete the queue file
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
try:
|
|
|
|
os.remove(queueFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
|
|
|
continue
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# clear the daily quotas for maximum numbers of received posts
|
2021-01-05 10:48:22 +00:00
|
|
|
if currTime - quotasLastUpdateDaily > 60 * 60 * 24:
|
2020-04-16 09:49:57 +00:00
|
|
|
quotasDaily = {
|
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
|
|
|
quotasLastUpdateDaily = currTime
|
|
|
|
|
2021-01-05 10:48:22 +00:00
|
|
|
if currTime - quotasLastUpdatePerMin > 60:
|
|
|
|
# clear the per minute quotas for maximum numbers of received posts
|
2020-04-16 09:49:57 +00:00
|
|
|
quotasPerMin = {
|
|
|
|
'domains': {},
|
|
|
|
'accounts': {}
|
|
|
|
}
|
2021-01-05 10:48:22 +00:00
|
|
|
# also check if the json signature enforcement has changed
|
|
|
|
verifyAllSigs = getConfigParam(baseDir, "verifyAllSignatures")
|
|
|
|
if verifyAllSigs is not None:
|
|
|
|
verifyAllSignatures = verifyAllSigs
|
|
|
|
# change the last time that this was done
|
2020-04-16 09:49:57 +00:00
|
|
|
quotasLastUpdatePerMin = currTime
|
|
|
|
|
|
|
|
# limit the number of posts which can arrive per domain per day
|
|
|
|
postDomain = queueJson['postDomain']
|
|
|
|
if postDomain:
|
|
|
|
if domainMaxPostsPerDay > 0:
|
|
|
|
if quotasDaily['domains'].get(postDomain):
|
|
|
|
if quotasDaily['domains'][postDomain] > \
|
|
|
|
domainMaxPostsPerDay:
|
|
|
|
print('Queue: Quota per day - Maximum posts for ' +
|
|
|
|
postDomain + ' reached (' +
|
|
|
|
str(domainMaxPostsPerDay) + ')')
|
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
|
|
|
os.remove(queueFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
quotasDaily['domains'][postDomain] += 1
|
|
|
|
else:
|
|
|
|
quotasDaily['domains'][postDomain] = 1
|
|
|
|
|
|
|
|
if quotasPerMin['domains'].get(postDomain):
|
|
|
|
domainMaxPostsPerMin = \
|
|
|
|
int(domainMaxPostsPerDay / (24 * 60))
|
2020-04-16 14:37:01 +00:00
|
|
|
if domainMaxPostsPerMin < 5:
|
|
|
|
domainMaxPostsPerMin = 5
|
2020-04-16 09:49:57 +00:00
|
|
|
if quotasPerMin['domains'][postDomain] > \
|
|
|
|
domainMaxPostsPerMin:
|
|
|
|
print('Queue: Quota per min - Maximum posts for ' +
|
|
|
|
postDomain + ' reached (' +
|
|
|
|
str(domainMaxPostsPerMin) + ')')
|
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
|
|
|
os.remove(queueFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
quotasPerMin['domains'][postDomain] += 1
|
|
|
|
else:
|
|
|
|
quotasPerMin['domains'][postDomain] = 1
|
|
|
|
|
|
|
|
if accountMaxPostsPerDay > 0:
|
|
|
|
postHandle = queueJson['postNickname'] + '@' + postDomain
|
|
|
|
if quotasDaily['accounts'].get(postHandle):
|
|
|
|
if quotasDaily['accounts'][postHandle] > \
|
|
|
|
accountMaxPostsPerDay:
|
|
|
|
print('Queue: Quota account posts per day -' +
|
|
|
|
' Maximum posts for ' +
|
|
|
|
postHandle + ' reached (' +
|
|
|
|
str(accountMaxPostsPerDay) + ')')
|
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
|
|
|
os.remove(queueFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
quotasDaily['accounts'][postHandle] += 1
|
|
|
|
else:
|
|
|
|
quotasDaily['accounts'][postHandle] = 1
|
|
|
|
|
|
|
|
if quotasPerMin['accounts'].get(postHandle):
|
|
|
|
accountMaxPostsPerMin = \
|
|
|
|
int(accountMaxPostsPerDay / (24 * 60))
|
2020-04-16 14:37:01 +00:00
|
|
|
if accountMaxPostsPerMin < 5:
|
|
|
|
accountMaxPostsPerMin = 5
|
2020-04-16 09:49:57 +00:00
|
|
|
if quotasPerMin['accounts'][postHandle] > \
|
|
|
|
accountMaxPostsPerMin:
|
|
|
|
print('Queue: Quota account posts per min -' +
|
|
|
|
' Maximum posts for ' +
|
|
|
|
postHandle + ' reached (' +
|
|
|
|
str(accountMaxPostsPerMin) + ')')
|
|
|
|
if len(queue) > 0:
|
|
|
|
try:
|
|
|
|
os.remove(queueFilename)
|
|
|
|
except BaseException:
|
|
|
|
pass
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
quotasPerMin['accounts'][postHandle] += 1
|
|
|
|
else:
|
|
|
|
quotasPerMin['accounts'][postHandle] = 1
|
2020-03-25 10:47:13 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
if debug:
|
|
|
|
if accountMaxPostsPerDay > 0 or domainMaxPostsPerDay > 0:
|
|
|
|
pprint(quotasDaily)
|
2019-07-15 10:22:19 +00:00
|
|
|
|
2020-08-23 14:45:58 +00:00
|
|
|
if queueJson.get('actor'):
|
|
|
|
print('Obtaining public key for actor ' + queueJson['actor'])
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# Try a few times to obtain the public key
|
|
|
|
pubKey = None
|
|
|
|
keyId = None
|
|
|
|
for tries in range(8):
|
2020-04-03 16:27:34 +00:00
|
|
|
keyId = None
|
2020-04-16 09:49:57 +00:00
|
|
|
signatureParams = \
|
|
|
|
queueJson['httpHeaders']['signature'].split(',')
|
|
|
|
for signatureItem in signatureParams:
|
|
|
|
if signatureItem.startswith('keyId='):
|
|
|
|
if '"' in signatureItem:
|
|
|
|
keyId = signatureItem.split('"')[1]
|
|
|
|
break
|
|
|
|
if not keyId:
|
|
|
|
print('Queue: No keyId in signature: ' +
|
|
|
|
queueJson['httpHeaders']['signature'])
|
|
|
|
pubKey = None
|
|
|
|
break
|
|
|
|
|
|
|
|
pubKey = \
|
|
|
|
getPersonPubKey(baseDir, session, keyId,
|
|
|
|
personCache, debug,
|
|
|
|
projectVersion, httpPrefix,
|
|
|
|
domain, onionDomain)
|
|
|
|
if pubKey:
|
2019-07-04 17:31:41 +00:00
|
|
|
if debug:
|
2020-04-16 09:49:57 +00:00
|
|
|
print('DEBUG: public key: ' + str(pubKey))
|
|
|
|
break
|
2019-07-04 17:31:41 +00:00
|
|
|
|
2019-08-15 08:36:49 +00:00
|
|
|
if debug:
|
2020-04-16 09:49:57 +00:00
|
|
|
print('DEBUG: Retry ' + str(tries+1) +
|
|
|
|
' obtaining public key for ' + keyId)
|
2020-04-16 18:25:59 +00:00
|
|
|
time.sleep(1)
|
2019-07-04 12:23:53 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
if not pubKey:
|
|
|
|
print('Queue: public key could not be obtained from ' + keyId)
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
|
2021-01-03 09:44:33 +00:00
|
|
|
# check the http header signature
|
2020-04-16 09:49:57 +00:00
|
|
|
if debug:
|
2021-01-03 09:44:33 +00:00
|
|
|
print('DEBUG: checking http header signature')
|
2020-04-16 09:49:57 +00:00
|
|
|
pprint(queueJson['httpHeaders'])
|
2020-12-22 21:24:46 +00:00
|
|
|
postStr = json.dumps(queueJson['post'])
|
2021-02-14 15:22:03 +00:00
|
|
|
httpSignatureFailed = False
|
2020-04-16 09:49:57 +00:00
|
|
|
if not verifyPostHeaders(httpPrefix,
|
|
|
|
pubKey,
|
|
|
|
queueJson['httpHeaders'],
|
|
|
|
queueJson['path'], False,
|
|
|
|
queueJson['digest'],
|
2020-12-22 21:24:46 +00:00
|
|
|
postStr,
|
2020-04-16 09:49:57 +00:00
|
|
|
debug):
|
2021-02-14 15:22:03 +00:00
|
|
|
httpSignatureFailed = True
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Queue: Header signature check failed')
|
2021-02-14 20:41:11 +00:00
|
|
|
if debug:
|
|
|
|
pprint(queueJson['httpHeaders'])
|
2021-02-14 15:22:03 +00:00
|
|
|
else:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: http header signature check success')
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2021-01-04 19:02:24 +00:00
|
|
|
# check if a json signature exists on this post
|
2021-02-14 15:22:03 +00:00
|
|
|
hasJsonSignature = False
|
2021-02-14 15:45:42 +00:00
|
|
|
jwebsigType = None
|
2021-01-05 20:11:16 +00:00
|
|
|
originalJson = queueJson['original']
|
|
|
|
if originalJson.get('@context') and \
|
|
|
|
originalJson.get('signature'):
|
|
|
|
if isinstance(originalJson['signature'], dict):
|
2021-01-04 19:02:24 +00:00
|
|
|
# see https://tools.ietf.org/html/rfc7515
|
2021-01-05 20:11:16 +00:00
|
|
|
jwebsig = originalJson['signature']
|
2021-01-04 19:02:24 +00:00
|
|
|
# signature exists and is of the expected type
|
|
|
|
if jwebsig.get('type') and jwebsig.get('signatureValue'):
|
2021-02-14 15:45:42 +00:00
|
|
|
jwebsigType = jwebsig['type']
|
|
|
|
if jwebsigType == 'RsaSignature2017':
|
2021-01-05 20:11:16 +00:00
|
|
|
if hasValidContext(originalJson):
|
2021-02-14 15:22:03 +00:00
|
|
|
hasJsonSignature = True
|
2021-01-05 20:15:52 +00:00
|
|
|
else:
|
|
|
|
print('unrecognised @context: ' +
|
2021-01-05 20:16:58 +00:00
|
|
|
str(originalJson['@context']))
|
2021-01-05 10:29:37 +00:00
|
|
|
|
2021-01-05 10:54:50 +00:00
|
|
|
# strict enforcement of json signatures
|
2021-02-14 15:22:03 +00:00
|
|
|
if not hasJsonSignature:
|
|
|
|
if httpSignatureFailed:
|
2021-02-14 15:45:42 +00:00
|
|
|
if jwebsigType:
|
|
|
|
print('Queue: Header signature check failed and does ' +
|
|
|
|
'not have a recognised jsonld signature type ' +
|
|
|
|
jwebsigType)
|
|
|
|
else:
|
|
|
|
print('Queue: Header signature check failed and ' +
|
2021-02-14 20:30:01 +00:00
|
|
|
'does not have jsonld signature')
|
2021-02-14 20:41:11 +00:00
|
|
|
if debug:
|
|
|
|
pprint(queueJson['httpHeaders'])
|
2021-01-05 10:29:37 +00:00
|
|
|
|
2021-02-14 15:22:03 +00:00
|
|
|
if verifyAllSignatures:
|
|
|
|
print('Queue: inbox post does not have a jsonld signature ' +
|
|
|
|
keyId + ' ' + str(originalJson))
|
|
|
|
|
|
|
|
if httpSignatureFailed or verifyAllSignatures:
|
2021-01-05 20:55:11 +00:00
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2021-02-14 15:22:03 +00:00
|
|
|
else:
|
|
|
|
if httpSignatureFailed or verifyAllSignatures:
|
|
|
|
# use the original json message received, not one which
|
|
|
|
# may have been modified along the way
|
|
|
|
if not verifyJsonSignature(originalJson, pubKey):
|
|
|
|
if debug:
|
|
|
|
print('WARN: jsonld inbox signature check failed ' +
|
|
|
|
keyId + ' ' + pubKey + ' ' + str(originalJson))
|
|
|
|
else:
|
|
|
|
print('WARN: jsonld inbox signature check failed ' +
|
|
|
|
keyId)
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
if httpSignatureFailed:
|
|
|
|
print('jsonld inbox signature check success ' +
|
|
|
|
'via relay ' + keyId)
|
|
|
|
else:
|
|
|
|
print('jsonld inbox signature check success ' + keyId)
|
2021-01-03 14:25:20 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# set the id to the same as the post filename
|
|
|
|
# This makes the filename and the id consistent
|
|
|
|
# if queueJson['post'].get('id'):
|
|
|
|
# queueJson['post']['id']=queueJson['id']
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveUndo(session,
|
|
|
|
baseDir, httpPrefix, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
queueJson['post'],
|
|
|
|
federationList,
|
|
|
|
debug):
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Queue: Undo accepted from ' + keyId)
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-17 10:34:00 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
if debug:
|
|
|
|
print('DEBUG: checking for follow requests')
|
|
|
|
if receiveFollowRequest(session,
|
|
|
|
baseDir, httpPrefix, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
queueJson['post'],
|
|
|
|
federationList,
|
2020-10-23 19:18:13 +00:00
|
|
|
debug, projectVersion,
|
2020-10-23 19:48:59 +00:00
|
|
|
maxFollowers):
|
2020-04-16 09:49:57 +00:00
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
print('Queue: Follow activity for ' + keyId +
|
2020-06-28 19:04:43 +00:00
|
|
|
' removed from queue')
|
2020-04-16 09:49:57 +00:00
|
|
|
continue
|
|
|
|
else:
|
2019-08-15 16:05:28 +00:00
|
|
|
if debug:
|
2020-04-16 09:49:57 +00:00
|
|
|
print('DEBUG: No follow requests')
|
|
|
|
|
|
|
|
if receiveAcceptReject(session,
|
|
|
|
baseDir, httpPrefix, domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers, personCache,
|
|
|
|
queueJson['post'],
|
|
|
|
federationList, debug):
|
|
|
|
print('Queue: Accept/Reject received from ' + keyId)
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-06 15:17:21 +00:00
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveEventPost(recentPostsCache, session,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
queueJson['post'],
|
|
|
|
federationList,
|
|
|
|
queueJson['postNickname'],
|
|
|
|
debug):
|
2020-08-20 16:51:48 +00:00
|
|
|
print('Queue: Event activity accepted from ' + keyId)
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
|
2020-12-22 18:06:23 +00:00
|
|
|
if _receiveUpdate(recentPostsCache, session,
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
domain, port,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache,
|
|
|
|
queueJson['post'],
|
|
|
|
federationList,
|
|
|
|
queueJson['postNickname'],
|
|
|
|
debug):
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Queue: Update accepted from ' + keyId)
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# get recipients list
|
|
|
|
recipientsDict, recipientsDictFollowers = \
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxPostRecipients(baseDir, queueJson['post'],
|
|
|
|
httpPrefix, domain, port, debug)
|
2020-04-16 09:49:57 +00:00
|
|
|
if len(recipientsDict.items()) == 0 and \
|
|
|
|
len(recipientsDictFollowers.items()) == 0:
|
|
|
|
print('Queue: no recipients were resolved ' +
|
|
|
|
'for post arriving in inbox')
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|
|
|
|
continue
|
2019-07-09 14:20:23 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
# if there are only a small number of followers then
|
|
|
|
# process them as if they were specifically
|
|
|
|
# addresses to particular accounts
|
|
|
|
noOfFollowItems = len(recipientsDictFollowers.items())
|
|
|
|
if noOfFollowItems > 0:
|
|
|
|
# always deliver to individual inboxes
|
|
|
|
if noOfFollowItems < 999999:
|
|
|
|
if debug:
|
|
|
|
print('DEBUG: moving ' + str(noOfFollowItems) +
|
|
|
|
' inbox posts addressed to followers')
|
|
|
|
for handle, postItem in recipientsDictFollowers.items():
|
|
|
|
recipientsDict[handle] = postItem
|
|
|
|
recipientsDictFollowers = {}
|
|
|
|
# recipientsList = [recipientsDict, recipientsDictFollowers]
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
print('*************************************')
|
|
|
|
print('Resolved recipients list:')
|
|
|
|
pprint(recipientsDict)
|
|
|
|
print('Resolved followers list:')
|
|
|
|
pprint(recipientsDictFollowers)
|
|
|
|
print('*************************************')
|
|
|
|
|
|
|
|
# Copy any posts addressed to followers into the shared inbox
|
|
|
|
# this avoid copying file multiple times to potentially many
|
|
|
|
# individual inboxes
|
|
|
|
if len(recipientsDictFollowers) > 0:
|
|
|
|
sharedInboxPostFilename = \
|
|
|
|
queueJson['destination'].replace(inboxHandle, inboxHandle)
|
|
|
|
if not os.path.isfile(sharedInboxPostFilename):
|
|
|
|
saveJson(queueJson['post'], sharedInboxPostFilename)
|
|
|
|
|
|
|
|
# for posts addressed to specific accounts
|
|
|
|
for handle, capsId in recipientsDict.items():
|
|
|
|
destination = \
|
|
|
|
queueJson['destination'].replace(inboxHandle, handle)
|
2020-12-22 18:06:23 +00:00
|
|
|
_inboxAfterInitial(recentPostsCache,
|
|
|
|
maxRecentPosts,
|
|
|
|
session, keyId, handle,
|
|
|
|
queueJson['post'],
|
|
|
|
baseDir, httpPrefix,
|
|
|
|
sendThreads, postLog,
|
|
|
|
cachedWebfingers,
|
|
|
|
personCache, queue,
|
|
|
|
domain,
|
|
|
|
onionDomain, i2pDomain,
|
|
|
|
port, proxyType,
|
|
|
|
federationList,
|
|
|
|
debug,
|
|
|
|
queueFilename, destination,
|
|
|
|
maxReplies, allowDeletion,
|
|
|
|
maxMentions, maxEmoji,
|
|
|
|
translate, unitTest,
|
|
|
|
YTReplacementDomain,
|
|
|
|
showPublishedDateOnly,
|
2020-12-23 23:59:49 +00:00
|
|
|
allowLocalNetworkAccess,
|
|
|
|
peertubeInstances)
|
2020-09-27 18:35:35 +00:00
|
|
|
if debug:
|
|
|
|
pprint(queueJson['post'])
|
2020-03-22 21:16:02 +00:00
|
|
|
|
2020-04-16 09:49:57 +00:00
|
|
|
print('Queue: Queue post accepted')
|
|
|
|
if os.path.isfile(queueFilename):
|
|
|
|
os.remove(queueFilename)
|
|
|
|
if len(queue) > 0:
|
|
|
|
queue.pop(0)
|