epicyon/daemon.py

12762 lines
578 KiB
Python
Raw Normal View History

2020-04-02 21:35:06 +00:00
__filename__ = "daemon.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.1.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__status__ = "Production"
2020-08-24 19:36:22 +00:00
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer, HTTPServer
2020-06-12 12:03:04 +00:00
import sys
2019-06-28 18:55:29 +00:00
import json
2019-07-01 14:30:48 +00:00
import time
2019-09-07 08:57:52 +00:00
import locale
2020-04-15 10:57:04 +00:00
import urllib.parse
2020-10-10 22:41:38 +00:00
import datetime
2020-06-23 14:25:03 +00:00
from socket import error as SocketError
import errno
2020-01-12 22:23:01 +00:00
from functools import partial
2020-06-20 19:37:44 +00:00
import pyqrcode
2019-07-27 20:30:58 +00:00
# for saving images
from hashlib import sha256
2019-12-04 12:57:12 +00:00
from hashlib import sha1
2019-06-28 18:55:29 +00:00
from session import createSession
2020-04-02 21:35:06 +00:00
from webfinger import parseHandle
2019-06-28 18:55:29 +00:00
from webfinger import webfingerMeta
2019-11-13 10:32:12 +00:00
from webfinger import webfingerNodeInfo
2019-06-28 18:55:29 +00:00
from webfinger import webfingerLookup
from webfinger import webfingerUpdate
2019-11-13 12:45:41 +00:00
from metadata import metaDataInstance
2020-04-02 21:35:06 +00:00
from metadata import metaDataNodeInfo
from pgp import getEmailAddress
from pgp import setEmailAddress
from pgp import getPGPpubKey
2020-07-06 10:14:41 +00:00
from pgp import getPGPfingerprint
from pgp import setPGPpubKey
2020-07-06 10:14:41 +00:00
from pgp import setPGPfingerprint
2019-12-17 14:57:16 +00:00
from xmpp import getXmppAddress
from xmpp import setXmppAddress
2020-02-26 14:35:17 +00:00
from ssb import getSSBAddress
from ssb import setSSBAddress
2020-03-22 14:42:26 +00:00
from tox import getToxAddress
from tox import setToxAddress
2019-12-17 15:25:34 +00:00
from matrix import getMatrixAddress
from matrix import setMatrixAddress
from donate import getDonationUrl
from donate import setDonationUrl
2020-08-05 21:12:09 +00:00
from person import setPersonNotes
2020-08-05 10:33:47 +00:00
from person import getDefaultPersonContext
2020-06-21 16:12:42 +00:00
from person import savePersonQrcode
from person import randomizeActorImages
2020-01-19 20:42:03 +00:00
from person import personUpgradeActor
from person import activateAccount
from person import deactivateAccount
2019-08-08 13:38:33 +00:00
from person import registerAccount
2019-06-28 18:55:29 +00:00
from person import personLookup
2019-07-04 16:24:23 +00:00
from person import personBoxJson
2019-07-11 12:29:31 +00:00
from person import createSharedInbox
2020-10-07 16:01:45 +00:00
from person import createNewsInbox
2019-08-13 11:59:38 +00:00
from person import suspendAccount
from person import unsuspendAccount
2019-08-13 13:58:48 +00:00
from person import removeAccount
from person import canRemovePost
2019-11-06 11:39:41 +00:00
from person import personSnooze
from person import personUnsnooze
2020-10-04 09:22:27 +00:00
from posts import isModerator
2020-10-10 19:14:36 +00:00
from posts import isEditor
2019-12-01 13:45:30 +00:00
from posts import mutePost
2019-12-01 15:03:55 +00:00
from posts import unmutePost
2019-11-25 22:34:26 +00:00
from posts import createQuestionPost
2019-07-27 22:48:34 +00:00
from posts import createPublicPost
2020-02-24 13:32:19 +00:00
from posts import createBlogPost
2019-08-11 11:25:27 +00:00
from posts import createReportPost
2019-07-28 11:08:14 +00:00
from posts import createUnlistedPost
2019-07-27 22:48:34 +00:00
from posts import createFollowersOnlyPost
2020-08-21 11:08:31 +00:00
from posts import createEventPost
2019-07-27 22:48:34 +00:00
from posts import createDirectMessagePost
2019-08-02 18:37:23 +00:00
from posts import populateRepliesJson
2019-08-18 09:39:12 +00:00
from posts import addToField
2019-08-20 11:51:29 +00:00
from posts import expireCache
2020-05-22 11:48:13 +00:00
from inbox import clearQueueItems
2019-06-28 21:59:54 +00:00
from inbox import inboxPermittedMessage
2019-07-02 15:07:27 +00:00
from inbox import inboxMessageHasParams
2019-07-04 12:23:53 +00:00
from inbox import runInboxQueue
2019-09-02 21:52:43 +00:00
from inbox import runInboxQueueWatchdog
2019-07-04 14:36:29 +00:00
from inbox import savePostToInboxQueue
2019-08-02 18:04:31 +00:00
from inbox import populateReplies
2019-09-25 10:22:49 +00:00
from inbox import getPersonPubKey
2019-06-29 20:21:37 +00:00
from follow import getFollowingFeed
2019-07-29 16:13:48 +00:00
from follow import sendFollowRequest
2020-11-06 18:13:17 +00:00
from follow import unfollowPerson
2019-07-03 18:24:44 +00:00
from auth import authorize
2019-07-16 16:08:21 +00:00
from auth import createPassword
2019-07-24 22:38:42 +00:00
from auth import createBasicAuthHeader
from auth import authorizeBasic
2019-11-12 10:59:17 +00:00
from auth import storeBasicCredentials
2019-07-04 12:23:53 +00:00
from threads import threadWithTrace
2019-10-16 18:19:18 +00:00
from threads import removeDormantThreads
2020-03-01 21:11:01 +00:00
from media import replaceYouTube
from media import attachMedia
from blocking import addBlock
from blocking import removeBlock
from blocking import addGlobalBlock
from blocking import removeGlobalBlock
2019-08-14 10:32:15 +00:00
from blocking import isBlockedHashtag
2020-04-16 18:49:16 +00:00
from blocking import isBlockedDomain
2020-03-28 10:33:04 +00:00
from blocking import getDomainBlocklist
2019-08-12 21:20:47 +00:00
from roles import setRole
from roles import clearModeratorStatus
2020-10-11 19:42:21 +00:00
from roles import clearEditorStatus
from blog import htmlBlogPageRSS2
2020-05-23 09:41:50 +00:00
from blog import htmlBlogPageRSS3
2020-02-25 13:35:41 +00:00
from blog import htmlBlogView
from blog import htmlBlogPage
from blog import htmlBlogPost
2020-03-01 11:05:20 +00:00
from blog import htmlEditBlog
2020-11-05 22:30:03 +00:00
from webinterface import htmlCitations
2020-06-28 21:54:49 +00:00
from webinterface import htmlFollowingList
from webinterface import getBlogAddress
from webinterface import setBlogAddress
2020-02-23 12:20:54 +00:00
from webinterface import htmlCalendarDeleteConfirm
2019-08-27 12:47:11 +00:00
from webinterface import htmlDeletePost
2019-08-26 16:02:47 +00:00
from webinterface import htmlAbout
from webinterface import htmlRemoveSharedItem
2019-08-25 17:34:09 +00:00
from webinterface import htmlInboxDMs
2019-09-23 20:09:11 +00:00
from webinterface import htmlInboxReplies
2019-09-28 11:29:42 +00:00
from webinterface import htmlInboxMedia
2020-02-24 14:39:25 +00:00
from webinterface import htmlInboxBlogs
2020-10-07 19:04:15 +00:00
from webinterface import htmlInboxNews
2019-08-24 23:00:03 +00:00
from webinterface import htmlUnblockConfirm
2019-08-24 21:10:20 +00:00
from webinterface import htmlPersonOptions
2019-07-20 21:13:36 +00:00
from webinterface import htmlIndividualPost
from webinterface import htmlProfile
from webinterface import htmlInbox
2019-11-17 14:01:49 +00:00
from webinterface import htmlBookmarks
2020-08-23 11:13:35 +00:00
from webinterface import htmlEvents
2019-11-02 14:31:39 +00:00
from webinterface import htmlShares
2019-07-20 21:13:36 +00:00
from webinterface import htmlOutbox
2019-08-12 13:22:17 +00:00
from webinterface import htmlModeration
2019-07-20 21:13:36 +00:00
from webinterface import htmlPostReplies
2019-07-24 22:38:42 +00:00
from webinterface import htmlLogin
2019-08-13 09:24:55 +00:00
from webinterface import htmlSuspended
2019-07-24 22:38:42 +00:00
from webinterface import htmlGetLoginCredentials
2019-07-25 21:39:09 +00:00
from webinterface import htmlNewPost
2019-07-29 09:49:46 +00:00
from webinterface import htmlFollowConfirm
2019-10-10 14:43:21 +00:00
from webinterface import htmlCalendar
2019-07-30 22:34:04 +00:00
from webinterface import htmlSearch
2020-10-12 11:51:37 +00:00
from webinterface import htmlNewswireMobile
2020-10-12 19:41:53 +00:00
from webinterface import htmlLinksMobile
2019-08-19 19:02:28 +00:00
from webinterface import htmlSearchEmoji
2019-08-19 20:01:29 +00:00
from webinterface import htmlSearchEmojiTextEntry
2019-07-29 20:36:26 +00:00
from webinterface import htmlUnfollowConfirm
2019-07-30 22:34:04 +00:00
from webinterface import htmlProfileAfterSearch
2019-08-02 09:52:12 +00:00
from webinterface import htmlEditProfile
2020-10-01 19:42:10 +00:00
from webinterface import htmlEditLinks
2020-10-04 09:22:27 +00:00
from webinterface import htmlEditNewswire
2020-10-10 19:14:36 +00:00
from webinterface import htmlEditNewsPost
2019-08-08 13:38:33 +00:00
from webinterface import htmlTermsOfService
2019-08-27 23:01:40 +00:00
from webinterface import htmlSkillsSearch
2020-04-11 12:37:20 +00:00
from webinterface import htmlHistorySearch
2019-08-10 10:54:52 +00:00
from webinterface import htmlHashtagSearch
2020-09-26 18:23:43 +00:00
from webinterface import rssHashtagSearch
2019-08-13 17:25:39 +00:00
from webinterface import htmlModerationInfo
2019-08-13 21:32:18 +00:00
from webinterface import htmlSearchSharedItems
2019-08-14 10:32:15 +00:00
from webinterface import htmlHashtagBlocked
2019-07-23 12:33:09 +00:00
from shares import getSharesFeedForPerson
2019-07-28 08:34:49 +00:00
from shares import addShare
2019-08-26 09:31:45 +00:00
from shares import removeShare
2019-10-17 09:58:30 +00:00
from shares import expireShares
2020-10-18 16:24:28 +00:00
from utils import clearFromPostCaches
2020-10-15 08:59:08 +00:00
from utils import containsInvalidChars
2020-10-13 11:13:32 +00:00
from utils import isSystemAccount
2020-10-06 08:58:44 +00:00
from utils import setConfigParam
from utils import getConfigParam
2020-08-23 11:13:35 +00:00
from utils import removeIdEnding
2020-06-06 18:16:16 +00:00
from utils import updateLikesCollection
from utils import undoLikesCollectionEntry
2020-04-02 21:35:06 +00:00
from utils import deletePost
2020-02-24 23:14:49 +00:00
from utils import isBlogPost
from utils import removeAvatarFromCache
2019-12-10 21:39:02 +00:00
from utils import locatePost
2019-11-25 13:50:39 +00:00
from utils import getCachedPostFilename
from utils import removePostFromCache
2019-07-29 16:13:48 +00:00
from utils import getNicknameFromActor
from utils import getDomainFromActor
2019-08-21 17:13:08 +00:00
from utils import getStatusNumber
2019-09-25 10:22:49 +00:00
from utils import urlPermitted
2019-10-22 11:55:06 +00:00
from utils import loadJson
from utils import saveJson
2020-10-06 08:58:44 +00:00
from utils import isSuspended
2019-07-29 19:14:14 +00:00
from manualapprove import manualDenyFollowRequest
from manualapprove import manualApproveFollowRequest
2019-07-31 16:47:45 +00:00
from announce import createAnnounce
2020-03-01 20:15:07 +00:00
from content import replaceEmojiFromTags
2019-08-09 09:09:21 +00:00
from content import addHtmlTags
2019-11-10 11:37:24 +00:00
from content import extractMediaInFormPOST
from content import saveMediaInFormPOST
from content import extractTextFieldsInPOST
2019-08-02 09:52:12 +00:00
from media import removeMetaData
2019-08-22 15:14:05 +00:00
from cache import storePersonInCache
2019-11-06 23:20:00 +00:00
from cache import getPersonFromCache
2019-09-25 10:22:49 +00:00
from httpsig import verifyPostHeaders
2020-10-13 21:38:19 +00:00
from theme import setNewsAvatar
2019-11-23 14:13:25 +00:00
from theme import setTheme
2020-05-26 20:17:16 +00:00
from theme import getTheme
2020-07-10 18:08:45 +00:00
from theme import enableGrayscale
from theme import disableGrayscale
2020-01-12 20:13:44 +00:00
from schedule import runPostSchedule
from schedule import runPostScheduleWatchdog
2020-01-14 10:23:17 +00:00
from schedule import removeScheduledPosts
2020-01-13 10:35:17 +00:00
from outbox import postMessageToOutbox
2020-02-23 13:28:27 +00:00
from happening import removeCalendarEvent
2020-05-21 21:23:34 +00:00
from bookmarks import bookmark
from bookmarks import undoBookmark
2020-06-29 16:53:02 +00:00
from petnames import setPetName
2020-07-03 20:47:26 +00:00
from followingCalendar import addPersonToCalendar
from followingCalendar import removePersonFromCalendar
2020-08-06 20:16:42 +00:00
from devices import E2EEdevicesCollection
2020-08-06 20:56:14 +00:00
from devices import E2EEvalidDevice
2020-08-06 21:23:17 +00:00
from devices import E2EEaddDevice
2020-10-04 12:29:07 +00:00
from newswire import getRSSfromDict
2020-10-13 16:58:45 +00:00
from newswire import rss2Header
from newswire import rss2Footer
2020-10-07 12:05:49 +00:00
from newsdaemon import runNewswireWatchdog
from newsdaemon import runNewswireDaemon
2019-06-28 18:55:29 +00:00
import os
2020-04-02 21:35:06 +00:00
2019-06-28 18:55:29 +00:00
2019-06-29 14:35:26 +00:00
# maximum number of posts to list in outbox feed
2020-04-02 21:35:06 +00:00
maxPostsInFeed = 12
2019-06-29 14:35:26 +00:00
2019-09-28 18:06:17 +00:00
# reduced posts for media feed because it can take a while
2020-04-02 21:35:06 +00:00
maxPostsInMediaFeed = 6
2019-09-28 18:06:17 +00:00
2020-02-24 14:39:25 +00:00
# Blogs can be longer, so don't show many per page
2020-04-02 21:35:06 +00:00
maxPostsInBlogsFeed = 4
2020-02-24 14:39:25 +00:00
maxPostsInNewsFeed = 10
2020-02-27 20:23:27 +00:00
# Maximum number of entries in returned rss.xml
2020-04-02 21:35:06 +00:00
maxPostsInRSSFeed = 10
2020-02-27 20:23:27 +00:00
2019-06-29 20:21:37 +00:00
# number of follows/followers per page
2020-04-02 21:35:06 +00:00
followsPerPage = 12
2019-06-29 20:21:37 +00:00
2019-07-23 12:33:09 +00:00
# number of item shares per page
2020-04-02 21:35:06 +00:00
sharesPerPage = 12
2019-07-23 12:33:09 +00:00
2020-06-20 19:37:44 +00:00
def saveDomainQrcode(baseDir: str, httpPrefix: str,
domainFull: str, scale=6) -> None:
"""Saves a qrcode image for the domain name
This helps to transfer onion or i2p domains to a mobile device
"""
qrcodeFilename = baseDir + '/accounts/qrcode.png'
url = pyqrcode.create(httpPrefix + '://' + domainFull)
url.png(qrcodeFilename, scale)
2019-11-03 15:27:29 +00:00
def readFollowList(filename: str) -> None:
2019-06-28 18:55:29 +00:00
"""Returns a list of ActivityPub addresses to follow
"""
2020-04-02 21:35:06 +00:00
followlist = []
2019-06-28 18:55:29 +00:00
if not os.path.isfile(filename):
return followlist
2020-04-02 21:35:06 +00:00
followUsers = open(filename, "r")
2019-06-28 18:55:29 +00:00
for u in followUsers:
if u not in followlist:
2020-04-02 21:35:06 +00:00
nickname, domain = parseHandle(u)
2019-07-03 09:40:27 +00:00
if nickname:
2020-04-02 21:35:06 +00:00
followlist.append(nickname + '@' + domain)
2019-06-29 20:21:37 +00:00
followUsers.close()
2019-06-28 18:55:29 +00:00
return followlist
2020-04-02 21:35:06 +00:00
2019-06-28 18:55:29 +00:00
class PubServer(BaseHTTPRequestHandler):
2020-04-02 21:35:06 +00:00
protocol_version = 'HTTP/1.1'
2019-11-25 11:19:03 +00:00
2020-08-31 17:55:13 +00:00
def _pathIsImage(self, path: str) -> bool:
if path.endswith('.png') or \
path.endswith('.jpg') or \
path.endswith('.gif') or \
path.endswith('.avif') or \
2020-08-31 17:55:13 +00:00
path.endswith('.webp'):
2020-06-19 20:04:22 +00:00
return True
return False
2020-08-31 17:55:13 +00:00
def _pathIsVideo(self, path: str) -> bool:
if path.endswith('.ogv') or \
path.endswith('.mp4'):
2020-06-19 20:08:06 +00:00
return True
return False
2020-08-31 17:55:13 +00:00
def _pathIsAudio(self, path: str) -> bool:
if path.endswith('.ogg') or \
path.endswith('.mp3'):
2020-06-19 20:08:06 +00:00
return True
return False
2020-06-08 16:46:01 +00:00
def handle_error(self, request, client_address):
print('ERROR: http server error: ' + str(request) + ', ' +
str(client_address))
pass
2020-05-23 14:23:56 +00:00
def _isMinimal(self, nickname: str) -> bool:
"""Returns true if minimal buttons should be shown
for the given account
"""
2020-05-23 14:26:49 +00:00
accountDir = self.server.baseDir + '/accounts/' + \
2020-05-23 14:23:56 +00:00
nickname + '@' + self.server.domain
if not os.path.isdir(accountDir):
return True
minimalFilename = accountDir + '/.notminimal'
if os.path.isfile(minimalFilename):
return False
return True
2020-05-23 14:23:56 +00:00
def _setMinimal(self, nickname: str, minimal: bool) -> None:
"""Sets whether an account should display minimal buttons
"""
2020-05-23 14:26:49 +00:00
accountDir = self.server.baseDir + '/accounts/' + \
2020-05-23 14:23:56 +00:00
nickname + '@' + self.server.domain
if not os.path.isdir(accountDir):
return
minimalFilename = accountDir + '/.notminimal'
2020-05-23 14:23:56 +00:00
minimalFileExists = os.path.isfile(minimalFilename)
if minimal and minimalFileExists:
2020-05-23 14:23:56 +00:00
os.remove(minimalFilename)
elif not minimal and not minimalFileExists:
2020-07-12 20:04:58 +00:00
with open(minimalFilename, 'w+') as fp:
2020-05-23 14:23:56 +00:00
fp.write('\n')
2020-04-02 21:35:06 +00:00
def _sendReplyToQuestion(self, nickname: str, messageId: str,
answer: str) -> None:
2019-11-25 13:34:44 +00:00
"""Sends a reply to a question
2019-11-25 12:43:00 +00:00
"""
2020-04-02 21:35:06 +00:00
votesFilename = self.server.baseDir + '/accounts/' + \
nickname + '@' + self.server.domain + '/questions.txt'
2019-11-25 12:43:00 +00:00
2019-11-27 09:58:39 +00:00
if os.path.isfile(votesFilename):
# have we already voted on this?
if messageId in open(votesFilename).read():
2020-04-02 21:35:06 +00:00
print('Already voted on message ' + messageId)
2019-11-27 09:58:39 +00:00
return
2019-11-25 12:43:00 +00:00
2020-04-02 21:35:06 +00:00
print('Voting on message ' + messageId)
print('Vote for: ' + answer)
2020-08-21 16:10:47 +00:00
commentsEnabled = True
2020-04-02 21:35:06 +00:00
messageJson = \
createPublicPost(self.server.baseDir,
nickname,
self.server.domain, self.server.port,
self.server.httpPrefix,
answer, False, False, False,
2020-08-21 16:10:47 +00:00
commentsEnabled,
2020-04-02 21:35:06 +00:00
None, None, None, True,
messageId, messageId, None,
False, None, None, None)
2019-11-25 12:43:00 +00:00
if messageJson:
2020-01-30 10:11:08 +00:00
# name field contains the answer
2020-04-02 21:35:06 +00:00
messageJson['object']['name'] = answer
if self._postToOutbox(messageJson, __version__, nickname):
postFilename = \
locatePost(self.server.baseDir, nickname,
self.server.domain, messageId)
2019-12-10 21:08:03 +00:00
if postFilename:
2020-04-02 21:35:06 +00:00
postJsonObject = loadJson(postFilename)
2019-12-10 21:08:03 +00:00
if postJsonObject:
2020-04-02 21:35:06 +00:00
populateReplies(self.server.baseDir,
self.server.httpPrefix,
self.server.domainFull,
postJsonObject,
self.server.maxReplies,
2019-12-10 21:08:03 +00:00
self.server.debug)
# record the vote
2020-04-02 21:35:06 +00:00
votesFile = open(votesFilename, 'a+')
2019-12-10 21:08:03 +00:00
if votesFile:
2020-04-02 21:35:06 +00:00
votesFile.write(messageId + '\n')
2019-12-10 21:08:03 +00:00
votesFile.close()
# ensure that the cached post is removed if it exists,
# so that it then will be recreated
2020-04-02 21:35:06 +00:00
cachedPostFilename = \
getCachedPostFilename(self.server.baseDir,
nickname,
self.server.domain,
2019-12-10 21:08:03 +00:00
postJsonObject)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
os.remove(cachedPostFilename)
# remove from memory cache
2020-04-02 21:35:06 +00:00
removePostFromCache(postJsonObject,
2019-12-10 21:08:03 +00:00
self.server.recentPostsCache)
2019-11-25 12:43:00 +00:00
else:
print('ERROR: unable to post vote to outbox')
else:
print('ERROR: unable to create vote')
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
def _removePostInteractions(self, postJsonObject: {}) -> None:
2019-11-25 11:19:03 +00:00
"""Removes potentially sensitive interactions from a post
This is the type of thing which would be of interest to marketers
or of saleable value to them. eg. Knowing who likes who or what.
"""
if postJsonObject.get('likes'):
2020-04-02 21:35:06 +00:00
postJsonObject['likes'] = {'items': []}
2019-11-25 11:19:03 +00:00
if postJsonObject.get('shares'):
2020-04-02 21:35:06 +00:00
postJsonObject['shares'] = {}
2019-11-25 11:19:03 +00:00
if postJsonObject.get('replies'):
2020-04-02 21:35:06 +00:00
postJsonObject['replies'] = {}
2019-11-25 11:19:03 +00:00
if postJsonObject.get('bookmarks'):
2020-04-02 21:35:06 +00:00
postJsonObject['bookmarks'] = {}
2019-11-25 11:19:03 +00:00
if not postJsonObject.get('object'):
return
if not isinstance(postJsonObject['object'], dict):
return
if postJsonObject['object'].get('likes'):
2020-04-02 21:35:06 +00:00
postJsonObject['object']['likes'] = {'items': []}
2019-11-25 11:19:03 +00:00
if postJsonObject['object'].get('shares'):
2020-04-02 21:35:06 +00:00
postJsonObject['object']['shares'] = {}
2019-11-25 11:19:03 +00:00
if postJsonObject['object'].get('replies'):
2020-04-02 21:35:06 +00:00
postJsonObject['object']['replies'] = {}
2019-11-25 11:19:03 +00:00
if postJsonObject['object'].get('bookmarks'):
2020-04-02 21:35:06 +00:00
postJsonObject['object']['bookmarks'] = {}
2019-11-25 11:19:03 +00:00
2019-08-24 11:23:12 +00:00
def _requestHTTP(self) -> bool:
"""Should a http response be given?
"""
2019-08-31 12:38:57 +00:00
if not self.headers.get('Accept'):
return False
2019-11-26 14:32:09 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('ACCEPT: ' + self.headers['Accept'])
2019-08-27 16:27:48 +00:00
if 'image/' in self.headers['Accept']:
2020-01-09 20:31:00 +00:00
if 'text/html' not in self.headers['Accept']:
return False
2019-12-03 21:47:28 +00:00
if 'video/' in self.headers['Accept']:
2020-01-09 20:31:00 +00:00
if 'text/html' not in self.headers['Accept']:
return False
2019-12-03 21:47:28 +00:00
if 'audio/' in self.headers['Accept']:
2020-01-09 20:31:00 +00:00
if 'text/html' not in self.headers['Accept']:
return False
2019-08-27 16:27:48 +00:00
if self.headers['Accept'].startswith('*'):
2019-08-24 11:28:43 +00:00
return False
2019-08-24 11:23:12 +00:00
if 'json' in self.headers['Accept']:
return False
return True
2019-09-25 09:22:10 +00:00
def _fetchAuthenticated(self) -> bool:
"""http authentication of GET requests for json
"""
if not self.server.authenticatedFetch:
return True
2019-09-25 10:22:49 +00:00
# check that the headers are signed
if not self.headers.get('signature'):
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('WARN: authenticated fetch, ' +
'GET has no signature in headers')
2019-09-25 10:22:49 +00:00
return False
# get the keyId
2020-04-02 21:35:06 +00:00
keyId = None
signatureParams = self.headers['signature'].split(',')
2019-09-25 10:22:49 +00:00
for signatureItem in signatureParams:
if signatureItem.startswith('keyId='):
if '"' in signatureItem:
2020-04-02 21:35:06 +00:00
keyId = signatureItem.split('"')[1]
2019-09-25 10:22:49 +00:00
break
if not keyId:
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('WARN: authenticated fetch, ' +
'failed to obtain keyId from signature')
2019-09-25 10:22:49 +00:00
return False
# is the keyId (actor) valid?
2020-09-27 19:27:24 +00:00
if not urlPermitted(keyId, self.server.federationList):
2019-09-25 10:22:49 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('Authorized fetch failed: ' + keyId +
' is not permitted')
2019-09-25 10:22:49 +00:00
return False
# make sure we have a session
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('DEBUG: creating new session during authenticated fetch')
2020-06-09 11:03:59 +00:00
self.server.session = createSession(self.server.proxyType)
2020-06-08 17:10:53 +00:00
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('ERROR: GET failed to create session during ' +
'authenticated fetch')
2020-06-08 17:10:53 +00:00
return False
2019-09-25 10:22:49 +00:00
# obtain the public key
2020-04-02 21:35:06 +00:00
pubKey = \
getPersonPubKey(self.server.baseDir, self.server.session, keyId,
self.server.personCache, self.server.debug,
__version__, self.server.httpPrefix,
self.server.domain, self.server.onionDomain)
2019-09-25 10:22:49 +00:00
if not pubKey:
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: Authenticated fetch failed to ' +
'obtain public key for ' + keyId)
2019-09-25 10:22:49 +00:00
return False
2020-04-02 21:35:06 +00:00
# it is assumed that there will be no message body on
# authenticated fetches and also consequently no digest
GETrequestBody = ''
GETrequestDigest = None
2019-09-25 10:22:49 +00:00
# verify the GET request without any digest
2020-04-02 21:35:06 +00:00
if verifyPostHeaders(self.server.httpPrefix,
pubKey, self.headers,
self.path, True,
GETrequestDigest,
GETrequestBody,
self.server.debug):
2019-09-25 10:22:49 +00:00
return True
return False
2019-09-25 09:22:10 +00:00
2020-04-02 21:35:06 +00:00
def _login_headers(self, fileFormat: str, length: int,
2020-03-28 17:24:40 +00:00
callingDomain: str) -> None:
2019-06-28 18:55:29 +00:00
self.send_response(200)
self.send_header('Content-type', fileFormat)
2019-08-15 13:20:09 +00:00
self.send_header('Content-Length', str(length))
2020-03-28 17:24:40 +00:00
self.send_header('Host', callingDomain)
2020-04-02 21:35:06 +00:00
self.send_header('WWW-Authenticate',
2019-11-03 15:27:29 +00:00
'title="Login to Epicyon", Basic realm="epicyon"')
2020-04-02 21:35:06 +00:00
self.send_header('X-Robots-Tag', 'noindex')
2019-06-28 18:55:29 +00:00
self.end_headers()
2020-04-02 21:35:06 +00:00
def _logout_headers(self, fileFormat: str, length: int,
2020-03-28 17:24:40 +00:00
callingDomain: str) -> None:
self.send_response(200)
self.send_header('Content-type', fileFormat)
self.send_header('Content-Length', str(length))
self.send_header('Set-Cookie', 'epicyon=; SameSite=Strict')
2020-03-28 17:24:40 +00:00
self.send_header('Host', callingDomain)
2020-04-02 21:35:06 +00:00
self.send_header('WWW-Authenticate',
2019-11-03 15:27:29 +00:00
'title="Login to Epicyon", Basic realm="epicyon"')
2020-04-02 21:35:06 +00:00
self.send_header('X-Robots-Tag', 'noindex')
self.end_headers()
2020-03-22 21:16:02 +00:00
2020-10-13 12:40:08 +00:00
def _logout_redirect(self, redirect: str, cookie: str,
callingDomain: str) -> None:
if '://' not in redirect:
print('REDIRECT ERROR: redirect is not an absolute url ' +
redirect)
self.send_response(303)
self.send_header('Set-Cookie', 'epicyon=; SameSite=Strict')
self.send_header('Location', redirect)
self.send_header('Host', callingDomain)
self.send_header('InstanceID', self.server.instanceId)
self.send_header('Content-Length', '0')
self.send_header('X-Robots-Tag', 'noindex')
self.end_headers()
2020-04-02 21:35:06 +00:00
def _set_headers_base(self, fileFormat: str, length: int, cookie: str,
2020-03-28 17:24:40 +00:00
callingDomain: str) -> None:
2019-08-15 12:22:34 +00:00
self.send_response(200)
self.send_header('Content-type', fileFormat)
2020-04-02 21:35:06 +00:00
if length > -1:
2019-12-03 22:11:51 +00:00
self.send_header('Content-Length', str(length))
2019-08-15 12:22:34 +00:00
if cookie:
2020-06-21 12:23:59 +00:00
cookieStr = cookie
if 'HttpOnly;' not in cookieStr:
if self.server.httpPrefix == 'https':
cookieStr += '; Secure'
cookieStr += '; HttpOnly; SameSite=Strict'
2020-06-21 12:23:59 +00:00
self.send_header('Cookie', cookieStr)
2020-03-28 17:24:40 +00:00
self.send_header('Host', callingDomain)
2019-08-15 12:22:34 +00:00
self.send_header('InstanceID', self.server.instanceId)
2020-04-02 21:35:06 +00:00
self.send_header('X-Robots-Tag', 'noindex')
self.send_header('X-Clacks-Overhead', 'GNU Natalie Nguyen')
self.send_header('Accept-Ranges', 'none')
2019-12-04 12:36:13 +00:00
2020-04-02 21:35:06 +00:00
def _set_headers(self, fileFormat: str, length: int, cookie: str,
2020-03-28 17:24:40 +00:00
callingDomain: str) -> None:
2020-04-02 21:35:06 +00:00
self._set_headers_base(fileFormat, length, cookie, callingDomain)
2020-04-13 20:18:45 +00:00
self.send_header('Cache-Control', 'public, max-age=0')
2019-12-04 12:36:13 +00:00
self.end_headers()
2020-04-02 21:35:06 +00:00
def _set_headers_head(self, fileFormat: str, length: int, etag: str,
2020-03-28 17:24:40 +00:00
callingDomain: str) -> None:
2020-04-02 21:35:06 +00:00
self._set_headers_base(fileFormat, length, None, callingDomain)
2019-12-04 13:54:59 +00:00
if etag:
2020-04-02 21:35:06 +00:00
self.send_header('ETag', etag)
2019-12-04 13:54:59 +00:00
self.end_headers()
2020-04-02 21:35:06 +00:00
def _set_headers_etag(self, mediaFilename: str, fileFormat: str,
data, cookie: str, callingDomain: str) -> None:
self._set_headers_base(fileFormat, len(data), cookie, callingDomain)
2020-04-13 19:58:29 +00:00
self.send_header('Cache-Control', 'public, max-age=86400')
2020-04-02 21:35:06 +00:00
etag = None
if os.path.isfile(mediaFilename + '.etag'):
2019-12-04 12:36:13 +00:00
try:
2020-04-02 21:35:06 +00:00
with open(mediaFilename + '.etag', 'r') as etagFile:
etag = etagFile.read()
except BaseException:
2019-12-04 12:36:13 +00:00
pass
if not etag:
2020-07-08 15:17:00 +00:00
etag = sha1(data).hexdigest() # nosec
2019-12-04 12:36:13 +00:00
try:
2020-07-12 20:04:58 +00:00
with open(mediaFilename + '.etag', 'w+') as etagFile:
2019-12-04 12:36:13 +00:00
etagFile.write(etag)
2020-04-02 21:35:06 +00:00
except BaseException:
2019-12-04 12:36:13 +00:00
pass
if etag:
2020-04-02 21:35:06 +00:00
self.send_header('ETag', etag)
2019-08-15 12:22:34 +00:00
self.end_headers()
2020-04-13 19:44:01 +00:00
def _etag_exists(self, mediaFilename: str) -> bool:
"""Does an etag header exist for the given file?
"""
etagHeader = 'If-None-Match'
if not self.headers.get(etagHeader):
etagHeader = 'if-none-match'
if not self.headers.get(etagHeader):
etagHeader = 'If-none-match'
if self.headers.get(etagHeader):
oldEtag = self.headers['If-None-Match']
if os.path.isfile(mediaFilename + '.etag'):
# load the etag from file
currEtag = ''
try:
with open(mediaFilename, 'r') as etagFile:
currEtag = etagFile.read()
except BaseException:
pass
if oldEtag == currEtag:
# The file has not changed
return True
return False
2020-04-02 21:35:06 +00:00
def _redirect_headers(self, redirect: str, cookie: str,
2020-06-19 11:14:49 +00:00
callingDomain: str) -> None:
2019-11-26 15:30:13 +00:00
if '://' not in redirect:
2020-04-02 21:35:06 +00:00
print('REDIRECT ERROR: redirect is not an absolute url ' +
redirect)
2020-06-19 10:47:25 +00:00
2020-06-19 11:14:49 +00:00
self.send_response(303)
2020-06-19 11:01:42 +00:00
if cookie:
2020-06-21 12:23:59 +00:00
cookieStr = cookie.replace('SET:', '').strip()
if 'HttpOnly;' not in cookieStr:
2020-06-21 12:04:46 +00:00
if self.server.httpPrefix == 'https':
cookieStr += '; Secure'
cookieStr += '; HttpOnly; SameSite=Strict'
2020-06-21 12:23:59 +00:00
if not cookie.startswith('SET:'):
2020-06-21 12:04:46 +00:00
self.send_header('Cookie', cookieStr)
2020-06-19 11:01:42 +00:00
else:
2020-06-21 12:23:59 +00:00
self.send_header('Set-Cookie', cookieStr)
2020-06-19 11:01:42 +00:00
self.send_header('Location', redirect)
self.send_header('Host', callingDomain)
self.send_header('InstanceID', self.server.instanceId)
self.send_header('Content-Length', '0')
self.send_header('X-Robots-Tag', 'noindex')
self.end_headers()
2019-07-28 14:09:48 +00:00
2020-06-22 12:05:58 +00:00
def _httpReturnCode(self, httpCode: int, httpDescription: str,
longDescription: str) -> None:
msg = \
'<html><head><title>' + str(httpCode) + '</title></head>' \
'<body bgcolor="linen" text="black">' \
'<div style="font-size: 400px; ' \
'text-align: center;">' + str(httpCode) + '</div>' \
'<div style="font-size: 128px; ' \
'text-align: center; font-variant: ' \
'small-caps;">' + httpDescription + '</div>' \
'<div style="text-align: center;">' + longDescription + '</div>' \
'</body></html>'
2020-04-02 21:35:06 +00:00
msg = msg.encode('utf-8')
2019-11-15 14:03:43 +00:00
self.send_response(httpCode)
2019-06-28 18:55:29 +00:00
self.send_header('Content-Type', 'text/html; charset=utf-8')
2019-08-15 13:20:09 +00:00
self.send_header('Content-Length', str(len(msg)))
2020-04-02 21:35:06 +00:00
self.send_header('X-Robots-Tag', 'noindex')
2019-06-28 18:55:29 +00:00
self.end_headers()
2020-09-02 16:32:50 +00:00
if not self._write(msg):
2020-04-02 21:35:06 +00:00
print('Error when showing ' + str(httpCode))
2019-06-28 18:55:29 +00:00
2020-02-19 15:12:05 +00:00
def _200(self) -> None:
2020-07-03 21:45:31 +00:00
if self.server.translate:
self._httpReturnCode(200, self.server.translate['Ok'],
self.server.translate['This is nothing ' +
'less than an utter ' +
'triumph'])
else:
self._httpReturnCode(200, 'Ok',
'This is nothing less ' +
'than an utter triumph')
2020-02-19 15:12:05 +00:00
2019-11-15 14:03:43 +00:00
def _404(self) -> None:
2020-07-03 21:45:31 +00:00
if self.server.translate:
self._httpReturnCode(404, self.server.translate['Not Found'],
self.server.translate['These are not the ' +
'droids you are ' +
'looking for'])
else:
self._httpReturnCode(404, 'Not Found',
'These are not the ' +
'droids you are ' +
'looking for')
2019-11-15 14:03:43 +00:00
2019-12-04 13:05:12 +00:00
def _304(self) -> None:
2020-07-03 21:45:31 +00:00
if self.server.translate:
self._httpReturnCode(304, self.server.translate['Not changed'],
self.server.translate['The contents of ' +
'your local cache ' +
'are up to date'])
else:
self._httpReturnCode(304, 'Not changed',
'The contents of ' +
'your local cache ' +
'are up to date')
2019-12-04 13:05:12 +00:00
2019-11-15 14:03:43 +00:00
def _400(self) -> None:
2020-07-03 21:45:31 +00:00
if self.server.translate:
self._httpReturnCode(400, self.server.translate['Bad Request'],
self.server.translate['Better luck ' +
'next time'])
else:
self._httpReturnCode(400, 'Bad Request',
'Better luck next time')
2019-11-15 21:43:20 +00:00
def _503(self) -> None:
2020-07-03 21:45:31 +00:00
if self.server.translate:
self._httpReturnCode(503, self.server.translate['Unavailable'],
self.server.translate['The server is busy. ' +
'Please try again ' +
'later'])
else:
self._httpReturnCode(503, 'Unavailable',
'The server is busy. Please try again ' +
'later')
2020-03-22 21:16:02 +00:00
2020-09-02 16:32:50 +00:00
def _write(self, msg) -> bool:
2020-04-02 21:35:06 +00:00
tries = 0
while tries < 5:
2019-10-16 13:45:51 +00:00
try:
self.wfile.write(msg)
2020-09-02 16:32:50 +00:00
return True
2019-10-16 13:45:51 +00:00
except Exception as e:
print(e)
2020-09-02 16:32:50 +00:00
time.sleep(0.5)
2020-09-02 16:34:05 +00:00
tries += 1
2020-09-02 16:32:50 +00:00
return False
2019-10-22 12:35:51 +00:00
def _robotsTxt(self) -> bool:
if not self.path.lower().startswith('/robot'):
return False
2020-04-02 21:35:06 +00:00
msg = 'User-agent: *\nDisallow: /'
msg = msg.encode('utf-8')
self._set_headers('text/plain; charset=utf-8', len(msg),
None, self.server.domainFull)
2019-10-22 12:35:51 +00:00
self._write(msg)
return True
2020-06-18 20:56:29 +00:00
def _hasAccept(self, callingDomain: str) -> bool:
if self.headers.get('Accept') or callingDomain.endswith('.b32.i2p'):
2020-06-18 21:20:56 +00:00
if not self.headers.get('Accept'):
self.headers['Accept'] = \
'text/html,application/xhtml+xml,' \
'application/xml;q=0.9,image/webp,*/*;q=0.8'
2020-06-18 20:56:29 +00:00
return True
return False
2020-04-02 21:35:06 +00:00
def _mastoApi(self, callingDomain: str) -> bool:
2019-11-13 11:24:27 +00:00
"""This is a vestigil mastodon API for the purpose
of returning an empty result to sites like
https://mastopeek.app-dist.eu
"""
if not self.path.startswith('/api/v1/'):
return False
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: mastodon api ' + self.path)
2020-10-10 16:04:27 +00:00
adminNickname = getConfigParam(self.server.baseDir, 'admin')
if adminNickname and self.path == '/api/v1/instance':
2020-04-02 21:35:06 +00:00
instanceDescriptionShort = \
getConfigParam(self.server.baseDir,
'instanceDescriptionShort')
2020-10-10 16:04:27 +00:00
instanceDescriptionShort = 'Yet another Epicyon Instance'
2020-04-02 21:35:06 +00:00
instanceDescription = getConfigParam(self.server.baseDir,
'instanceDescription')
instanceTitle = getConfigParam(self.server.baseDir,
'instanceTitle')
instanceJson = \
metaDataInstance(instanceTitle,
instanceDescriptionShort,
instanceDescription,
self.server.httpPrefix,
self.server.baseDir,
adminNickname,
self.server.domain,
self.server.domainFull,
self.server.registration,
self.server.systemLanguage,
2019-11-13 12:45:41 +00:00
self.server.projectVersion)
2020-04-02 21:35:06 +00:00
msg = json.dumps(instanceJson).encode('utf-8')
2020-06-18 20:56:29 +00:00
if self._hasAccept(callingDomain):
2019-11-13 15:41:03 +00:00
if 'application/ld+json' in self.headers['Accept']:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 15:41:03 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/json', len(msg),
None, callingDomain)
2019-11-13 14:21:21 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 12:45:41 +00:00
self._write(msg)
2019-11-13 14:32:44 +00:00
print('instance metadata sent')
2020-03-22 21:16:02 +00:00
return True
2020-07-07 14:19:09 +00:00
if self.path.startswith('/api/v1/instance/peers'):
2019-11-13 11:24:27 +00:00
# This is just a dummy result.
# Showing the full list of peers would have privacy implications.
2020-04-02 21:35:06 +00:00
# On a large instance you are somewhat lost in the crowd, but on
# small instances a full list of peers would convey a lot of
# information about the interests of a small number of accounts
msg = json.dumps(['mastodon.social',
self.server.domainFull]).encode('utf-8')
2020-06-18 20:56:29 +00:00
if self._hasAccept(callingDomain):
2019-11-13 15:41:03 +00:00
if 'application/ld+json' in self.headers['Accept']:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 15:41:03 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/json', len(msg),
None, callingDomain)
2019-11-13 14:21:21 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 11:24:27 +00:00
self._write(msg)
2019-11-13 14:32:44 +00:00
print('instance peers metadata sent')
2019-11-13 11:24:27 +00:00
return True
2019-11-13 11:26:02 +00:00
if self.path.startswith('/api/v1/instance/activity'):
2019-11-13 11:24:27 +00:00
# This is just a dummy result.
2020-04-02 21:35:06 +00:00
msg = json.dumps([]).encode('utf-8')
2020-06-18 20:56:29 +00:00
if self._hasAccept(callingDomain):
2019-11-13 15:41:03 +00:00
if 'application/ld+json' in self.headers['Accept']:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 15:41:03 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/json', len(msg),
None, callingDomain)
2019-11-13 14:21:21 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 11:24:27 +00:00
self._write(msg)
2019-11-13 14:32:44 +00:00
print('instance activity metadata sent')
2019-11-13 11:24:27 +00:00
return True
2020-03-22 21:16:02 +00:00
self._404()
2019-11-15 13:45:21 +00:00
return True
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
def _nodeinfo(self, callingDomain: str) -> bool:
2019-11-13 10:32:12 +00:00
if not self.path.startswith('/nodeinfo/2.0'):
return False
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: nodeinfo ' + self.path)
info = metaDataNodeInfo(self.server.baseDir,
self.server.registration,
self.server.projectVersion)
2019-11-13 10:32:12 +00:00
if info:
2020-04-02 21:35:06 +00:00
msg = json.dumps(info).encode('utf-8')
2020-06-18 20:56:29 +00:00
if self._hasAccept(callingDomain):
2019-11-13 15:41:03 +00:00
if 'application/ld+json' in self.headers['Accept']:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 15:41:03 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/json', len(msg),
None, callingDomain)
2019-11-13 14:21:21 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 10:32:12 +00:00
self._write(msg)
2019-11-13 14:32:44 +00:00
print('nodeinfo sent')
2019-11-15 13:45:21 +00:00
return True
self._404()
2019-11-13 10:32:12 +00:00
return True
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
def _webfinger(self, callingDomain: str) -> bool:
2019-06-28 18:55:29 +00:00
if not self.path.startswith('/.well-known'):
return False
2019-07-04 14:36:29 +00:00
if self.server.debug:
print('DEBUG: WEBFINGER well-known')
2019-06-28 18:55:29 +00:00
2019-07-03 16:14:45 +00:00
if self.server.debug:
print('DEBUG: WEBFINGER host-meta')
2019-06-28 18:55:29 +00:00
if self.path.startswith('/.well-known/host-meta'):
2020-06-03 19:14:24 +00:00
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
2020-04-02 21:35:06 +00:00
wfResult = \
2020-06-03 19:14:24 +00:00
webfingerMeta('http', self.server.onionDomain)
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
wfResult = \
2020-06-19 22:50:41 +00:00
webfingerMeta('http', self.server.i2pDomain)
2020-03-27 12:18:11 +00:00
else:
2020-04-02 21:35:06 +00:00
wfResult = \
2020-06-03 19:14:24 +00:00
webfingerMeta(self.server.httpPrefix,
self.server.domainFull)
2019-06-28 18:55:29 +00:00
if wfResult:
2020-04-02 21:35:06 +00:00
msg = wfResult.encode('utf-8')
self._set_headers('application/xrd+xml', len(msg),
None, callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2019-11-15 13:45:21 +00:00
return True
2020-03-22 21:16:02 +00:00
self._404()
2019-11-13 10:32:12 +00:00
return True
if self.path.startswith('/.well-known/nodeinfo'):
2020-06-03 19:14:24 +00:00
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
2020-04-02 21:35:06 +00:00
wfResult = \
2020-06-03 19:14:24 +00:00
webfingerNodeInfo('http', self.server.onionDomain)
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
wfResult = \
2020-06-19 22:50:41 +00:00
webfingerNodeInfo('http', self.server.i2pDomain)
2020-03-27 12:18:11 +00:00
else:
2020-04-02 21:35:06 +00:00
wfResult = \
2020-06-03 19:14:24 +00:00
webfingerNodeInfo(self.server.httpPrefix,
self.server.domainFull)
2019-11-13 10:32:12 +00:00
if wfResult:
2020-04-02 21:35:06 +00:00
msg = json.dumps(wfResult).encode('utf-8')
2020-06-18 20:56:29 +00:00
if self._hasAccept(callingDomain):
2019-11-13 15:41:03 +00:00
if 'application/ld+json' in self.headers['Accept']:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 15:41:03 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/json', len(msg),
None, callingDomain)
2019-11-13 14:21:21 +00:00
else:
2020-04-02 21:35:06 +00:00
self._set_headers('application/ld+json', len(msg),
None, callingDomain)
2019-11-13 10:32:12 +00:00
self._write(msg)
2019-11-15 13:45:21 +00:00
return True
2020-03-22 21:16:02 +00:00
self._404()
2019-11-13 10:32:12 +00:00
return True
2019-06-28 18:55:29 +00:00
2019-07-03 16:14:45 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: WEBFINGER lookup ' + self.path + ' ' +
str(self.server.baseDir))
wfResult = \
webfingerLookup(self.path, self.server.baseDir,
self.server.domain, self.server.onionDomain,
self.server.port, self.server.debug)
2019-06-28 18:55:29 +00:00
if wfResult:
2020-04-02 21:35:06 +00:00
msg = json.dumps(wfResult).encode('utf-8')
self._set_headers('application/jrd+json', len(msg),
None, callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2019-06-28 18:55:29 +00:00
else:
2019-07-03 16:14:45 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: WEBFINGER lookup 404 ' + self.path)
2019-06-28 18:55:29 +00:00
self._404()
return True
2020-04-02 21:35:06 +00:00
def _permittedDir(self, path: str) -> bool:
"""These are special paths which should not be accessible
directly via GET or POST
"""
2019-06-28 18:55:29 +00:00
if path.startswith('/wfendpoints') or \
path.startswith('/keys') or \
path.startswith('/accounts'):
return False
return True
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
def _postToOutbox(self, messageJson: {}, version: str,
postToNickname=None) -> bool:
"""post is received by the outbox
Client to server message post
https://www.w3.org/TR/activitypub/#client-to-server-outbox-delivery
"""
2020-01-13 11:01:31 +00:00
if postToNickname:
2020-04-02 21:35:06 +00:00
print('Posting to nickname ' + postToNickname)
self.postToNickname = postToNickname
2020-04-02 21:35:06 +00:00
return postMessageToOutbox(messageJson, self.postToNickname,
self.server, self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
2020-06-03 20:21:44 +00:00
self.server.i2pDomain,
2020-04-02 21:35:06 +00:00
self.server.port,
self.server.recentPostsCache,
self.server.followersThreads,
self.server.federationList,
self.server.sendThreads,
self.server.postLog,
self.server.cachedWebfingers,
self.server.personCache,
self.server.allowDeletion,
2020-06-09 11:03:59 +00:00
self.server.proxyType, version,
self.server.debug,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly)
2020-04-02 21:35:06 +00:00
def _postToOutboxThread(self, messageJson: {}) -> bool:
2019-09-03 17:16:26 +00:00
"""Creates a thread to send a post
"""
2020-04-02 21:35:06 +00:00
accountOutboxThreadName = self.postToNickname
2019-09-03 19:40:44 +00:00
if not accountOutboxThreadName:
2020-04-02 21:35:06 +00:00
accountOutboxThreadName = '*'
2020-03-22 21:16:02 +00:00
2019-09-03 19:40:44 +00:00
if self.server.outboxThread.get(accountOutboxThreadName):
2019-09-03 17:16:26 +00:00
print('Waiting for previous outbox thread to end')
2020-04-02 21:35:06 +00:00
waitCtr = 0
thName = accountOutboxThreadName
while self.server.outboxThread[thName].isAlive() and waitCtr < 8:
2019-09-03 17:16:26 +00:00
time.sleep(1)
2020-04-02 21:35:06 +00:00
waitCtr += 1
if waitCtr >= 8:
2019-09-03 19:40:44 +00:00
self.server.outboxThread[accountOutboxThreadName].kill()
2019-09-03 17:16:26 +00:00
print('Creating outbox thread')
2020-04-02 21:35:06 +00:00
self.server.outboxThread[accountOutboxThreadName] = \
threadWithTrace(target=self._postToOutbox,
args=(messageJson.copy(), __version__),
daemon=True)
2019-09-03 17:16:26 +00:00
print('Starting outbox thread')
2019-09-03 19:40:44 +00:00
self.server.outboxThread[accountOutboxThreadName].start()
2019-09-03 17:16:26 +00:00
return True
2020-04-02 21:35:06 +00:00
def _updateInboxQueue(self, nickname: str, messageJson: {},
2019-11-03 15:27:29 +00:00
messageBytes: str) -> int:
2019-09-03 09:44:50 +00:00
"""Update the inbox queue
"""
2020-04-27 09:41:38 +00:00
if self.server.restartInboxQueueInProgress:
self._503()
print('Message arrrived but currently restarting inbox queue')
self.server.POSTbusy = False
return 2
2020-04-16 18:49:16 +00:00
# check for blocked domains so that they can be rejected early
messageDomain = None
if messageJson.get('actor'):
2020-04-16 18:54:38 +00:00
messageDomain, messagePort = \
getDomainFromActor(messageJson['actor'])
2020-04-16 18:49:16 +00:00
if isBlockedDomain(self.server.baseDir, messageDomain):
2020-04-16 18:51:44 +00:00
print('POST from blocked domain ' + messageDomain)
2020-04-16 18:49:16 +00:00
self._400()
self.server.POSTbusy = False
return 3
2020-04-16 11:48:00 +00:00
# if the inbox queue is full then return a busy code
if len(self.server.inboxQueue) >= self.server.maxQueueLength:
2020-04-16 18:49:16 +00:00
if messageDomain:
2020-04-16 11:57:13 +00:00
print('Queue: Inbox queue is full. Incoming post from ' +
messageJson['actor'])
else:
print('Queue: Inbox queue is full')
2020-04-16 11:48:00 +00:00
self._503()
2020-05-22 11:48:13 +00:00
clearQueueItems(self.server.baseDir, self.server.inboxQueue)
2020-05-02 10:19:24 +00:00
if not self.server.restartInboxQueueInProgress:
self.server.restartInboxQueue = True
2020-05-22 11:48:13 +00:00
self.server.POSTbusy = False
2020-04-16 11:48:00 +00:00
return 2
2019-08-15 22:12:58 +00:00
2019-08-16 08:39:01 +00:00
# Convert the headers needed for signature verification to dict
2020-04-02 21:35:06 +00:00
headersDict = {}
headersDict['host'] = self.headers['host']
headersDict['signature'] = self.headers['signature']
2019-08-15 22:12:58 +00:00
if self.headers.get('Date'):
2020-04-02 21:35:06 +00:00
headersDict['Date'] = self.headers['Date']
2019-08-15 22:12:58 +00:00
if self.headers.get('digest'):
2020-04-02 21:35:06 +00:00
headersDict['digest'] = self.headers['digest']
2019-08-15 22:12:58 +00:00
if self.headers.get('Content-type'):
2020-04-02 21:35:06 +00:00
headersDict['Content-type'] = self.headers['Content-type']
2019-11-12 17:30:31 +00:00
if self.headers.get('Content-Length'):
2020-04-02 21:35:06 +00:00
headersDict['Content-Length'] = self.headers['Content-Length']
2019-11-12 18:32:33 +00:00
elif self.headers.get('content-length'):
2020-04-02 21:35:06 +00:00
headersDict['content-length'] = self.headers['content-length']
2019-08-18 09:39:12 +00:00
2019-11-03 15:27:29 +00:00
# For follow activities add a 'to' field, which is a copy
# of the object field
2020-04-02 21:35:06 +00:00
messageJson, toFieldExists = \
addToField('Follow', messageJson, self.server.debug)
2020-03-22 21:16:02 +00:00
2019-11-03 15:27:29 +00:00
# For like activities add a 'to' field, which is a copy of
# the actor within the object field
2020-04-02 21:35:06 +00:00
messageJson, toFieldExists = \
addToField('Like', messageJson, self.server.debug)
2019-08-18 09:39:12 +00:00
2020-04-02 21:35:06 +00:00
beginSaveTime = time.time()
2019-08-15 22:12:58 +00:00
# save the json for later queue processing
2020-04-02 21:35:06 +00:00
queueFilename = \
savePostToInboxQueue(self.server.baseDir,
self.server.httpPrefix,
nickname,
self.server.domainFull,
2019-07-05 11:27:18 +00:00
messageJson,
messageBytes.decode('utf-8'),
2019-08-15 22:12:58 +00:00
headersDict,
2019-08-16 08:39:01 +00:00
self.path,
2019-07-06 13:49:25 +00:00
self.server.debug)
if queueFilename:
2019-07-15 12:27:26 +00:00
# add json to the queue
2019-07-06 13:49:25 +00:00
if queueFilename not in self.server.inboxQueue:
self.server.inboxQueue.append(queueFilename)
2019-11-15 23:03:37 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
timeDiff = int((time.time() - beginSaveTime) * 1000)
if timeDiff > 200:
print('SLOW: slow save of inbox queue item ' +
queueFilename + ' took ' + str(timeDiff) + ' mS')
2019-07-05 11:27:18 +00:00
self.send_response(201)
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-15 12:27:26 +00:00
return 0
2020-04-16 11:48:00 +00:00
self._503()
2020-04-16 10:28:56 +00:00
self.server.POSTbusy = False
2020-04-16 10:27:55 +00:00
return 1
2019-07-05 11:27:18 +00:00
2019-07-12 11:05:43 +00:00
def _isAuthorized(self) -> bool:
2020-08-06 21:23:17 +00:00
self.authorizedNickname = None
if self.path.startswith('/icons/') or \
self.path.startswith('/avatars/') or \
2020-10-04 21:07:44 +00:00
self.path.startswith('/favicon.ico') or \
2020-10-04 21:01:17 +00:00
self.path.startswith('/newswire.xml'):
return False
# token based authenticated used by the web interface
2020-05-25 16:21:53 +00:00
if self.headers.get('Cookie'):
2019-11-15 12:56:07 +00:00
if self.headers['Cookie'].startswith('epicyon='):
2020-04-02 21:35:06 +00:00
tokenStr = self.headers['Cookie'].split('=', 1)[1].strip()
2019-11-26 17:30:22 +00:00
if ';' in tokenStr:
2020-04-02 21:35:06 +00:00
tokenStr = tokenStr.split(';')[0].strip()
if self.server.tokensLookup.get(tokenStr):
2020-04-02 21:35:06 +00:00
nickname = self.server.tokensLookup[tokenStr]
2020-08-06 21:23:17 +00:00
self.authorizedNickname = nickname
2019-10-23 22:27:52 +00:00
# default to the inbox of the person
2020-04-02 21:35:06 +00:00
if self.path == '/':
self.path = '/users/' + nickname + '/inbox'
# check that the path contains the same nickname
# as the cookie otherwise it would be possible
# to be authorized to use an account you don't own
if '/' + nickname + '/' in self.path:
return True
2020-07-11 15:10:30 +00:00
elif '/' + nickname + '?' in self.path:
2020-06-28 10:29:20 +00:00
return True
2020-10-10 19:14:36 +00:00
elif self.path.endswith('/' + nickname):
return True
2020-04-02 21:35:06 +00:00
print('AUTH: nickname ' + nickname +
' was not found in path ' + self.path)
2019-11-26 17:52:08 +00:00
return False
2020-07-11 15:10:30 +00:00
print('AUTH: epicyon cookie ' +
'authorization failed, header=' +
self.headers['Cookie'].replace('epicyon=', '') +
' tokenStr=' + tokenStr + ' tokens=' +
str(self.server.tokensLookup))
2019-11-26 17:52:08 +00:00
return False
2019-11-26 17:19:03 +00:00
print('AUTH: Header cookie was not authorized')
return False
# basic auth
2019-07-12 11:05:43 +00:00
if self.headers.get('Authorization'):
2020-04-02 21:35:06 +00:00
if authorize(self.server.baseDir, self.path,
self.headers['Authorization'],
2019-07-12 11:05:43 +00:00
self.server.debug):
return True
2020-04-02 21:35:06 +00:00
print('AUTH: Basic auth did not authorize ' +
self.headers['Authorization'])
2019-07-12 11:05:43 +00:00
return False
2020-03-22 21:16:02 +00:00
2020-11-01 17:18:05 +00:00
def _clearLoginDetails(self, nickname: str, callingDomain: str) -> None:
"""Clears login details for the given account
"""
# remove any token
if self.server.tokens.get(nickname):
del self.server.tokensLookup[self.server.tokens[nickname]]
del self.server.tokens[nickname]
2020-06-19 09:40:35 +00:00
self._redirect_headers(self.server.httpPrefix + '://' +
self.server.domainFull + '/login',
'epicyon=; SameSite=Strict',
callingDomain)
2020-08-28 20:07:29 +00:00
def _benchmarkGETtimings(self, GETstartTime, GETtimings: {},
2020-11-01 17:18:05 +00:00
prevGetId: str,
currGetId: str) -> None:
2020-08-28 20:07:29 +00:00
"""Updates a dictionary containing how long each segment of GET takes
2019-11-16 13:25:44 +00:00
"""
2020-08-28 20:29:41 +00:00
timeDiff = int((time.time() - GETstartTime) * 1000)
logEvent = False
if timeDiff > 100:
logEvent = True
if prevGetId:
if GETtimings.get(prevGetId):
timeDiff = int(timeDiff - int(GETtimings[prevGetId]))
GETtimings[currGetId] = str(timeDiff)
if logEvent:
2020-08-28 20:31:40 +00:00
print('GET TIMING ' + currGetId + ' = ' + str(timeDiff))
2019-11-15 18:59:15 +00:00
2020-04-02 21:35:06 +00:00
def _benchmarkPOSTtimings(self, POSTstartTime, POSTtimings: [],
2020-11-01 17:18:05 +00:00
postID: int) -> None:
2019-11-16 11:03:02 +00:00
"""Updates a list containing how long each segment of POST takes
"""
if self.server.debug:
2020-04-02 21:35:06 +00:00
timeDiff = int((time.time() - POSTstartTime) * 1000)
logEvent = False
if timeDiff > 100:
logEvent = True
2019-11-16 11:03:02 +00:00
if POSTtimings:
2020-04-02 21:35:06 +00:00
timeDiff = int(timeDiff - int(POSTtimings[-1]))
2019-11-16 11:03:02 +00:00
POSTtimings.append(str(timeDiff))
if logEvent:
2020-04-02 21:35:06 +00:00
ctr = 1
2019-11-16 11:03:02 +00:00
for timeDiff in POSTtimings:
2020-04-02 21:35:06 +00:00
print('POST TIMING|' + str(ctr) + '|' + timeDiff)
ctr += 1
2019-11-16 11:03:02 +00:00
2020-04-02 21:35:06 +00:00
def _pathContainsBlogLink(self, baseDir: str,
httpPrefix: str, domain: str,
domainFull: str, path: str) -> (str, str):
2020-02-24 18:26:07 +00:00
"""If the path contains a blog entry then return its filename
"""
2020-02-24 18:40:23 +00:00
if '/users/' not in path:
2020-04-02 21:35:06 +00:00
return None, None
userEnding = path.split('/users/', 1)[1]
2020-02-24 18:26:07 +00:00
if '/' not in userEnding:
2020-04-02 21:35:06 +00:00
return None, None
userEnding2 = userEnding.split('/')
nickname = userEnding2[0]
if len(userEnding2) != 2:
return None, None
if len(userEnding2[1]) < 14:
return None, None
userEnding2[1] = userEnding2[1].strip()
2020-02-24 20:10:49 +00:00
if not userEnding2[1].isdigit():
2020-04-02 21:35:06 +00:00
return None, None
2020-02-24 18:26:07 +00:00
# check for blog posts
2020-04-02 21:35:06 +00:00
blogIndexFilename = baseDir + '/accounts/' + \
nickname + '@' + domain + '/tlblogs.index'
2020-02-24 18:26:07 +00:00
if not os.path.isfile(blogIndexFilename):
2020-04-02 21:35:06 +00:00
return None, None
if '#' + userEnding2[1] + '.' not in open(blogIndexFilename).read():
return None, None
messageId = httpPrefix + '://' + domainFull + \
'/users/' + nickname + '/statuses/' + userEnding2[1]
return locatePost(baseDir, nickname, domain, messageId), nickname
2020-02-24 18:26:07 +00:00
2020-08-30 14:40:55 +00:00
def _loginScreen(self, path: str, callingDomain: str, cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-08-30 14:40:55 +00:00
"""Shows the login screen
"""
# get the contents of POST containing login credentials
length = int(self.headers['Content-length'])
if length > 512:
print('Login failed - credentials too long')
self.send_response(401)
self.end_headers()
self.server.POSTbusy = False
return
try:
loginParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST login read ' +
'connection reset by peer')
else:
print('WARN: POST login read socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST login read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
loginNickname, loginPassword, register = \
htmlGetLoginCredentials(loginParams, self.server.lastLoginTime)
if loginNickname:
2020-10-13 11:13:32 +00:00
if isSystemAccount(loginNickname):
print('Invalid username login: ' + loginNickname +
' (system account)')
2020-10-07 16:01:45 +00:00
self._clearLoginDetails(loginNickname, callingDomain)
self.server.POSTbusy = False
return
2020-08-30 14:40:55 +00:00
self.server.lastLoginTime = int(time.time())
if register:
2020-08-30 14:44:50 +00:00
if not registerAccount(baseDir, httpPrefix, domain, port,
loginNickname, loginPassword,
2020-08-30 14:40:55 +00:00
self.server.manualFollowerApproval):
self.server.POSTbusy = False
if callingDomain.endswith('.onion') and onionDomain:
2020-08-30 14:44:50 +00:00
self._redirect_headers('http://' + onionDomain +
'/login', cookie,
callingDomain)
2020-08-30 14:40:55 +00:00
elif (callingDomain.endswith('.i2p') and i2pDomain):
2020-08-30 14:44:50 +00:00
self._redirect_headers('http://' + i2pDomain +
'/login', cookie,
callingDomain)
2020-08-30 14:40:55 +00:00
else:
2020-08-30 14:44:50 +00:00
self._redirect_headers(httpPrefix + '://' +
domainFull + '/login',
2020-08-30 14:40:55 +00:00
cookie, callingDomain)
return
2020-08-30 14:44:50 +00:00
authHeader = \
createBasicAuthHeader(loginNickname, loginPassword)
2020-08-30 14:40:55 +00:00
if not authorizeBasic(baseDir, '/users/' +
loginNickname + '/outbox',
authHeader, False):
print('Login failed: ' + loginNickname)
self._clearLoginDetails(loginNickname, callingDomain)
self.server.POSTbusy = False
return
else:
if isSuspended(baseDir, loginNickname):
msg = \
2020-10-29 12:48:58 +00:00
htmlSuspended(self.server.cssCache,
baseDir).encode('utf-8')
2020-08-30 14:40:55 +00:00
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
# login success - redirect with authorization
print('Login success: ' + loginNickname)
# re-activate account if needed
activateAccount(baseDir, loginNickname, domain)
# This produces a deterministic token based
# on nick+password+salt
saltFilename = \
baseDir+'/accounts/' + \
loginNickname + '@' + domain + '/.salt'
salt = createPassword(32)
if os.path.isfile(saltFilename):
try:
with open(saltFilename, 'r') as fp:
salt = fp.read()
except Exception as e:
print('WARN: Unable to read salt for ' +
loginNickname + ' ' + str(e))
else:
try:
with open(saltFilename, 'w+') as fp:
fp.write(salt)
except Exception as e:
print('WARN: Unable to save salt for ' +
loginNickname + ' ' + str(e))
tokenText = loginNickname + loginPassword + salt
token = sha256(tokenText.encode('utf-8')).hexdigest()
self.server.tokens[loginNickname] = token
loginHandle = loginNickname + '@' + domain
tokenFilename = \
baseDir+'/accounts/' + \
loginHandle + '/.token'
try:
with open(tokenFilename, 'w+') as fp:
fp.write(token)
except Exception as e:
print('WARN: Unable to save token for ' +
loginNickname + ' ' + str(e))
personUpgradeActor(baseDir, None, loginHandle,
baseDir + '/accounts/' +
loginHandle + '.json')
index = self.server.tokens[loginNickname]
self.server.tokensLookup[index] = loginNickname
cookieStr = 'SET:epicyon=' + \
self.server.tokens[loginNickname] + '; SameSite=Strict'
if callingDomain.endswith('.onion') and onionDomain:
self._redirect_headers('http://' +
onionDomain +
'/users/' +
loginNickname + '/' +
self.server.defaultTimeline,
cookieStr, callingDomain)
elif (callingDomain.endswith('.i2p') and i2pDomain):
self._redirect_headers('http://' +
i2pDomain +
'/users/' +
loginNickname + '/' +
self.server.defaultTimeline,
cookieStr, callingDomain)
else:
self._redirect_headers(httpPrefix + '://' +
2020-08-30 14:44:50 +00:00
domainFull + '/users/' +
2020-08-30 14:40:55 +00:00
loginNickname + '/' +
self.server.defaultTimeline,
cookieStr, callingDomain)
self.server.POSTbusy = False
return
self._200()
self.server.POSTbusy = False
2020-08-30 14:09:47 +00:00
def _moderatorActions(self, path: str, callingDomain: str, cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-08-30 14:09:47 +00:00
"""Actions on the moderator screeen
"""
usersPath = path.replace('/moderationaction', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
length = int(self.headers['Content-length'])
try:
moderationParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST moderationParams connection was reset')
else:
print('WARN: POST moderationParams ' +
'rfile.read socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST moderationParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&' in moderationParams:
moderationText = None
moderationButton = None
for moderationStr in moderationParams.split('&'):
if moderationStr.startswith('moderationAction'):
if '=' in moderationStr:
moderationText = \
moderationStr.split('=')[1].strip()
modText = moderationText.replace('+', ' ')
moderationText = \
urllib.parse.unquote_plus(modText.strip())
elif moderationStr.startswith('submitInfo'):
2020-10-29 12:48:58 +00:00
msg = htmlModerationInfo(self.server.cssCache,
self.server.translate,
2020-08-30 14:09:47 +00:00
baseDir, httpPrefix)
msg = msg.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
elif moderationStr.startswith('submitBlock'):
moderationButton = 'block'
elif moderationStr.startswith('submitUnblock'):
moderationButton = 'unblock'
elif moderationStr.startswith('submitSuspend'):
moderationButton = 'suspend'
elif moderationStr.startswith('submitUnsuspend'):
moderationButton = 'unsuspend'
elif moderationStr.startswith('submitRemove'):
moderationButton = 'remove'
if moderationButton and moderationText:
if debug:
print('moderationButton: ' + moderationButton)
print('moderationText: ' + moderationText)
nickname = moderationText
if nickname.startswith('http') or \
nickname.startswith('dat'):
nickname = getNicknameFromActor(nickname)
if '@' in nickname:
nickname = nickname.split('@')[0]
if moderationButton == 'suspend':
suspendAccount(baseDir, nickname, domain)
if moderationButton == 'unsuspend':
unsuspendAccount(baseDir, nickname)
if moderationButton == 'block':
fullBlockDomain = None
if moderationText.startswith('http') or \
moderationText.startswith('dat'):
2020-09-05 09:41:09 +00:00
# https://domain
2020-08-30 14:09:47 +00:00
blockDomain, blockPort = \
getDomainFromActor(moderationText)
fullBlockDomain = blockDomain
if blockPort:
if blockPort != 80 and blockPort != 443:
if ':' not in blockDomain:
fullBlockDomain = \
blockDomain + ':' + str(blockPort)
if '@' in moderationText:
2020-09-05 09:41:09 +00:00
# nick@domain or *@domain
2020-08-30 14:09:47 +00:00
fullBlockDomain = moderationText.split('@')[1]
2020-09-05 09:41:09 +00:00
else:
# assume the text is a domain name
if not fullBlockDomain and '.' in moderationText:
nickname = '*'
fullBlockDomain = moderationText.strip()
2020-08-30 14:09:47 +00:00
if fullBlockDomain or nickname.startswith('#'):
addGlobalBlock(baseDir, nickname, fullBlockDomain)
if moderationButton == 'unblock':
fullBlockDomain = None
if moderationText.startswith('http') or \
moderationText.startswith('dat'):
2020-09-05 09:41:09 +00:00
# https://domain
2020-08-30 14:09:47 +00:00
blockDomain, blockPort = \
getDomainFromActor(moderationText)
fullBlockDomain = blockDomain
if blockPort:
if blockPort != 80 and blockPort != 443:
if ':' not in blockDomain:
fullBlockDomain = \
blockDomain + ':' + str(blockPort)
if '@' in moderationText:
2020-09-05 09:41:09 +00:00
# nick@domain or *@domain
2020-08-30 14:09:47 +00:00
fullBlockDomain = moderationText.split('@')[1]
2020-09-05 09:41:09 +00:00
else:
# assume the text is a domain name
if not fullBlockDomain and '.' in moderationText:
nickname = '*'
fullBlockDomain = moderationText.strip()
2020-08-30 14:09:47 +00:00
if fullBlockDomain or nickname.startswith('#'):
removeGlobalBlock(baseDir, nickname, fullBlockDomain)
if moderationButton == 'remove':
if '/statuses/' not in moderationText:
removeAccount(baseDir, nickname, domain, port)
else:
# remove a post or thread
postFilename = \
locatePost(baseDir, nickname, domain,
moderationText)
if postFilename:
if canRemovePost(baseDir,
nickname,
domain,
port,
moderationText):
deletePost(baseDir,
httpPrefix,
nickname, domain,
postFilename,
debug,
self.server.recentPostsCache)
if callingDomain.endswith('.onion') and onionDomain:
actorStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/moderation',
cookie, callingDomain)
self.server.POSTbusy = False
return
2020-08-30 20:07:01 +00:00
def _personOptions(self, path: str,
callingDomain: str, cookie: str,
2020-08-30 13:52:46 +00:00
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-08-30 13:52:46 +00:00
"""Receive POST from person options screen
"""
pageNumber = 1
usersPath = path.split('/personoptions')[0]
originPathStr = httpPrefix + '://' + domainFull + usersPath
chooserNickname = getNicknameFromActor(originPathStr)
if not chooserNickname:
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
print('WARN: unable to find nickname in ' + originPathStr)
self._redirect_headers(originPathStr, cookie, callingDomain)
self.server.POSTbusy = False
return
length = int(self.headers['Content-length'])
try:
optionsConfirmParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST optionsConfirmParams ' +
'connection reset by peer')
else:
print('WARN: POST optionsConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST optionsConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
optionsConfirmParams = \
urllib.parse.unquote_plus(optionsConfirmParams)
# page number to return to
if 'pageNumber=' in optionsConfirmParams:
pageNumberStr = optionsConfirmParams.split('pageNumber=')[1]
if '&' in pageNumberStr:
pageNumberStr = pageNumberStr.split('&')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
# actor for the person
optionsActor = optionsConfirmParams.split('actor=')[1]
if '&' in optionsActor:
optionsActor = optionsActor.split('&')[0]
# url of the avatar
optionsAvatarUrl = optionsConfirmParams.split('avatarUrl=')[1]
if '&' in optionsAvatarUrl:
optionsAvatarUrl = optionsAvatarUrl.split('&')[0]
# link to a post, which can then be included in reports
postUrl = None
if 'postUrl' in optionsConfirmParams:
postUrl = optionsConfirmParams.split('postUrl=')[1]
if '&' in postUrl:
postUrl = postUrl.split('&')[0]
# petname for this person
petname = None
if 'optionpetname' in optionsConfirmParams:
petname = optionsConfirmParams.split('optionpetname=')[1]
if '&' in petname:
petname = petname.split('&')[0]
# Limit the length of the petname
if len(petname) > 20 or \
' ' in petname or '/' in petname or \
'?' in petname or '#' in petname:
petname = None
# notes about this person
personNotes = None
if 'optionnotes' in optionsConfirmParams:
personNotes = optionsConfirmParams.split('optionnotes=')[1]
if '&' in personNotes:
personNotes = personNotes.split('&')[0]
personNotes = urllib.parse.unquote_plus(personNotes.strip())
# Limit the length of the notes
if len(personNotes) > 64000:
personNotes = None
# get the nickname
optionsNickname = getNicknameFromActor(optionsActor)
if not optionsNickname:
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
print('WARN: unable to find nickname in ' + optionsActor)
self._redirect_headers(originPathStr, cookie, callingDomain)
self.server.POSTbusy = False
return
optionsDomain, optionsPort = getDomainFromActor(optionsActor)
optionsDomainFull = optionsDomain
if optionsPort:
if optionsPort != 80 and optionsPort != 443:
if ':' not in optionsDomain:
optionsDomainFull = optionsDomain + ':' + \
str(optionsPort)
if chooserNickname == optionsNickname and \
optionsDomain == domain and \
optionsPort == port:
if debug:
print('You cannot perform an option action on yourself')
2020-10-14 09:43:32 +00:00
# person options screen, view button
2020-10-14 09:46:10 +00:00
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitView=' in optionsConfirmParams:
if debug:
print('Viewing ' + optionsActor)
self._redirect_headers(optionsActor,
cookie, callingDomain)
self.server.POSTbusy = False
return
2020-10-14 09:43:32 +00:00
# person options screen, petname submit button
2020-10-14 09:46:10 +00:00
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitPetname=' in optionsConfirmParams and petname:
if debug:
print('Change petname to ' + petname)
handle = optionsNickname + '@' + optionsDomainFull
setPetName(baseDir,
chooserNickname,
domain,
handle, petname)
self._redirect_headers(usersPath + '/' +
self.server.defaultTimeline +
'?page='+str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
2020-10-14 09:43:32 +00:00
# person options screen, person notes submit button
2020-10-14 09:46:10 +00:00
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitPersonNotes=' in optionsConfirmParams:
if debug:
print('Change person notes')
handle = optionsNickname + '@' + optionsDomainFull
if not personNotes:
personNotes = ''
setPersonNotes(baseDir,
chooserNickname,
domain,
handle, personNotes)
self._redirect_headers(usersPath + '/' +
self.server.defaultTimeline +
'?page='+str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
# person options screen, on calendar checkbox
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitOnCalendar=' in optionsConfirmParams:
onCalendar = None
if 'onCalendar=' in optionsConfirmParams:
onCalendar = optionsConfirmParams.split('onCalendar=')[1]
if '&' in onCalendar:
onCalendar = onCalendar.split('&')[0]
if onCalendar == 'on':
addPersonToCalendar(baseDir,
chooserNickname,
domain,
optionsNickname,
optionsDomainFull)
else:
removePersonFromCalendar(baseDir,
chooserNickname,
domain,
optionsNickname,
optionsDomainFull)
self._redirect_headers(usersPath + '/' +
self.server.defaultTimeline +
'?page='+str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
# person options screen, permission to post to newswire
# See htmlPersonOptions
if '&submitPostToNews=' in optionsConfirmParams:
2020-10-14 09:43:32 +00:00
if isModerator(self.server.baseDir, chooserNickname):
postsToNews = None
if 'postsToNews=' in optionsConfirmParams:
postsToNews = optionsConfirmParams.split('postsToNews=')[1]
if '&' in postsToNews:
postsToNews = postsToNews.split('&')[0]
newswireBlockedFilename = \
self.server.baseDir + '/accounts/' + \
optionsNickname + '@' + optionsDomain + '/.nonewswire'
if postsToNews == 'on':
if os.path.isfile(newswireBlockedFilename):
os.remove(newswireBlockedFilename)
else:
noNewswireFile = open(newswireBlockedFilename, "w+")
if noNewswireFile:
noNewswireFile.write('\n')
noNewswireFile.close()
self._redirect_headers(usersPath + '/' +
self.server.defaultTimeline +
'?page='+str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, block button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitBlock=' in optionsConfirmParams:
if debug:
print('Adding block by ' + chooserNickname +
' of ' + optionsActor)
addBlock(baseDir, chooserNickname,
domain,
optionsNickname, optionsDomainFull)
2020-10-14 09:46:10 +00:00
# person options screen, unblock button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitUnblock=' in optionsConfirmParams:
if debug:
print('Unblocking ' + optionsActor)
msg = \
2020-10-29 12:48:58 +00:00
htmlUnblockConfirm(self.server.cssCache,
self.server.translate,
2020-08-30 13:52:46 +00:00
baseDir,
usersPath,
optionsActor,
optionsAvatarUrl).encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, follow button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitFollow=' in optionsConfirmParams:
if debug:
print('Following ' + optionsActor)
msg = \
2020-10-29 12:48:58 +00:00
htmlFollowConfirm(self.server.cssCache,
self.server.translate,
2020-08-30 13:52:46 +00:00
baseDir,
usersPath,
optionsActor,
optionsAvatarUrl).encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, unfollow button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitUnfollow=' in optionsConfirmParams:
2020-11-06 18:13:17 +00:00
print('Unfollowing ' + optionsActor)
2020-08-30 13:52:46 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlUnfollowConfirm(self.server.cssCache,
self.server.translate,
2020-08-30 13:52:46 +00:00
baseDir,
usersPath,
optionsActor,
optionsAvatarUrl).encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, DM button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitDM=' in optionsConfirmParams:
if debug:
print('Sending DM to ' + optionsActor)
2020-09-01 15:13:50 +00:00
reportPath = path.replace('/personoptions', '') + '/newdm'
2020-10-29 12:48:58 +00:00
msg = htmlNewPost(self.server.cssCache,
False, self.server.translate,
2020-08-30 13:52:46 +00:00
baseDir,
httpPrefix,
reportPath, None,
[optionsActor], None,
pageNumber,
chooserNickname,
domain,
domainFull,
2020-11-04 15:27:13 +00:00
self.server.defaultTimeline,
self.server.newswire).encode('utf-8')
2020-08-30 13:52:46 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, snooze button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitSnooze=' in optionsConfirmParams:
2020-09-01 15:13:50 +00:00
usersPath = path.split('/personoptions')[0]
2020-08-30 13:52:46 +00:00
thisActor = httpPrefix + '://' + domainFull + usersPath
if debug:
print('Snoozing ' + optionsActor + ' ' + thisActor)
if '/users/' in thisActor:
nickname = thisActor.split('/users/')[1]
personSnooze(baseDir, nickname,
domain, optionsActor)
if callingDomain.endswith('.onion') and onionDomain:
thisActor = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
thisActor = 'http://' + i2pDomain + usersPath
self._redirect_headers(thisActor + '/' +
self.server.defaultTimeline +
'?page='+str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, unsnooze button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitUnSnooze=' in optionsConfirmParams:
usersPath = path.split('/personoptions')[0]
thisActor = httpPrefix + '://' + domainFull + usersPath
if debug:
print('Unsnoozing ' + optionsActor + ' ' + thisActor)
if '/users/' in thisActor:
nickname = thisActor.split('/users/')[1]
personUnsnooze(baseDir, nickname,
domain, optionsActor)
if callingDomain.endswith('.onion') and onionDomain:
thisActor = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
thisActor = 'http://' + i2pDomain + usersPath
self._redirect_headers(thisActor + '/' +
self.server.defaultTimeline +
'?page=' + str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
2020-10-14 09:46:10 +00:00
# person options screen, report button
# See htmlPersonOptions
2020-08-30 13:52:46 +00:00
if '&submitReport=' in optionsConfirmParams:
if debug:
print('Reporting ' + optionsActor)
reportPath = \
path.replace('/personoptions', '') + '/newreport'
2020-10-29 12:48:58 +00:00
msg = htmlNewPost(self.server.cssCache,
False, self.server.translate,
2020-08-30 13:52:46 +00:00
baseDir,
httpPrefix,
reportPath, None, [],
postUrl, pageNumber,
chooserNickname,
domain,
domainFull,
2020-11-04 15:27:13 +00:00
self.server.defaultTimeline,
self.server.newswire).encode('utf-8')
2020-08-30 13:52:46 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
# redirect back from person options screen
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif callingDomain.endswith('.i2p') and i2pDomain:
originPathStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(originPathStr, cookie, callingDomain)
self.server.POSTbusy = False
return
2020-08-30 16:00:33 +00:00
def _unfollowConfirm(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 16:00:33 +00:00
"""Confirm to unfollow
"""
usersPath = path.split('/unfollowconfirm')[0]
originPathStr = httpPrefix + '://' + domainFull + usersPath
followerNickname = getNicknameFromActor(originPathStr)
length = int(self.headers['Content-length'])
try:
followConfirmParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST followConfirmParams ' +
'connection was reset')
else:
print('WARN: POST followConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST followConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&submitYes=' in followConfirmParams:
followingActor = \
urllib.parse.unquote_plus(followConfirmParams)
followingActor = followingActor.split('actor=')[1]
if '&' in followingActor:
followingActor = followingActor.split('&')[0]
followingNickname = getNicknameFromActor(followingActor)
followingDomain, followingPort = \
getDomainFromActor(followingActor)
2020-11-06 18:13:17 +00:00
followingDomainFull = followingDomain
if followingPort:
if followingPort != 80 and followingPort != 443:
followingDomainFull = \
followingDomain + ':' + str(followingPort)
2020-08-30 16:00:33 +00:00
if followerNickname == followingNickname and \
followingDomain == domain and \
followingPort == port:
if debug:
print('You cannot unfollow yourself!')
else:
if debug:
print(followerNickname + ' stops following ' +
followingActor)
followActor = \
httpPrefix + '://' + domainFull + \
'/users/' + followerNickname
statusNumber, published = getStatusNumber()
followId = followActor + '/statuses/' + str(statusNumber)
unfollowJson = {
'@context': 'https://www.w3.org/ns/activitystreams',
'id': followId + '/undo',
'type': 'Undo',
'actor': followActor,
'object': {
'id': followId,
'type': 'Follow',
'actor': followActor,
'object': followingActor
}
}
pathUsersSection = path.split('/users/')[1]
self.postToNickname = pathUsersSection.split('/')[0]
2020-11-06 18:13:17 +00:00
unfollowPerson(self.server.baseDir, self.postToNickname,
self.server.domain,
followingNickname, followingDomainFull)
2020-08-30 16:00:33 +00:00
self._postToOutboxThread(unfollowJson)
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(originPathStr, cookie, callingDomain)
self.server.POSTbusy = False
def _followConfirm(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 16:00:33 +00:00
"""Confirm to follow
"""
usersPath = path.split('/followconfirm')[0]
originPathStr = httpPrefix + '://' + domainFull + usersPath
followerNickname = getNicknameFromActor(originPathStr)
length = int(self.headers['Content-length'])
try:
followConfirmParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST followConfirmParams ' +
'connection was reset')
else:
print('WARN: POST followConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST followConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&submitView=' in followConfirmParams:
followingActor = \
urllib.parse.unquote_plus(followConfirmParams)
followingActor = followingActor.split('actor=')[1]
if '&' in followingActor:
followingActor = followingActor.split('&')[0]
self._redirect_headers(followingActor, cookie, callingDomain)
self.server.POSTbusy = False
return
if '&submitYes=' in followConfirmParams:
followingActor = \
urllib.parse.unquote_plus(followConfirmParams)
followingActor = followingActor.split('actor=')[1]
if '&' in followingActor:
followingActor = followingActor.split('&')[0]
followingNickname = getNicknameFromActor(followingActor)
followingDomain, followingPort = \
getDomainFromActor(followingActor)
if followerNickname == followingNickname and \
followingDomain == domain and \
followingPort == port:
if debug:
print('You cannot follow yourself!')
else:
if debug:
print('Sending follow request from ' +
followerNickname + ' to ' + followingActor)
sendFollowRequest(self.server.session,
baseDir, followerNickname,
domain, port,
httpPrefix,
followingNickname,
followingDomain,
followingPort, httpPrefix,
False, self.server.federationList,
self.server.sendThreads,
self.server.postLog,
self.server.cachedWebfingers,
self.server.personCache,
debug,
2020-10-23 19:55:03 +00:00
self.server.projectVersion,
self.server.allowNewsFollowers)
2020-08-30 16:00:33 +00:00
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(originPathStr, cookie, callingDomain)
self.server.POSTbusy = False
2020-08-30 16:21:35 +00:00
def _blockConfirm(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 16:21:35 +00:00
"""Confirms a block
"""
usersPath = path.split('/blockconfirm')[0]
originPathStr = httpPrefix + '://' + domainFull + usersPath
blockerNickname = getNicknameFromActor(originPathStr)
if not blockerNickname:
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
print('WARN: unable to find nickname in ' + originPathStr)
self._redirect_headers(originPathStr,
cookie, callingDomain)
self.server.POSTbusy = False
return
length = int(self.headers['Content-length'])
try:
blockConfirmParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST blockConfirmParams ' +
'connection was reset')
else:
print('WARN: POST blockConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST blockConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&submitYes=' in blockConfirmParams:
blockingActor = \
urllib.parse.unquote_plus(blockConfirmParams)
blockingActor = blockingActor.split('actor=')[1]
if '&' in blockingActor:
blockingActor = blockingActor.split('&')[0]
blockingNickname = getNicknameFromActor(blockingActor)
if not blockingNickname:
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
print('WARN: unable to find nickname in ' + blockingActor)
self._redirect_headers(originPathStr,
cookie, callingDomain)
self.server.POSTbusy = False
return
blockingDomain, blockingPort = \
getDomainFromActor(blockingActor)
blockingDomainFull = blockingDomain
if blockingPort:
if blockingPort != 80 and blockingPort != 443:
if ':' not in blockingDomain:
blockingDomainFull = \
blockingDomain + ':' + str(blockingPort)
if blockerNickname == blockingNickname and \
blockingDomain == domain and \
blockingPort == port:
if debug:
print('You cannot block yourself!')
else:
if debug:
print('Adding block by ' + blockerNickname +
' of ' + blockingActor)
addBlock(baseDir, blockerNickname,
domain,
blockingNickname,
blockingDomainFull)
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(originPathStr, cookie, callingDomain)
self.server.POSTbusy = False
def _unblockConfirm(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 16:21:35 +00:00
"""Confirms a unblock
"""
usersPath = path.split('/unblockconfirm')[0]
originPathStr = httpPrefix + '://' + domainFull + usersPath
blockerNickname = getNicknameFromActor(originPathStr)
if not blockerNickname:
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
print('WARN: unable to find nickname in ' + originPathStr)
self._redirect_headers(originPathStr,
cookie, callingDomain)
self.server.POSTbusy = False
return
length = int(self.headers['Content-length'])
try:
blockConfirmParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST blockConfirmParams ' +
'connection was reset')
else:
print('WARN: POST blockConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST blockConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&submitYes=' in blockConfirmParams:
blockingActor = \
urllib.parse.unquote_plus(blockConfirmParams)
blockingActor = blockingActor.split('actor=')[1]
if '&' in blockingActor:
blockingActor = blockingActor.split('&')[0]
blockingNickname = getNicknameFromActor(blockingActor)
if not blockingNickname:
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
print('WARN: unable to find nickname in ' + blockingActor)
self._redirect_headers(originPathStr,
cookie, callingDomain)
self.server.POSTbusy = False
return
blockingDomain, blockingPort = \
getDomainFromActor(blockingActor)
blockingDomainFull = blockingDomain
if blockingPort:
if blockingPort != 80 and blockingPort != 443:
if ':' not in blockingDomain:
blockingDomainFull = \
blockingDomain + ':' + str(blockingPort)
if blockerNickname == blockingNickname and \
blockingDomain == domain and \
blockingPort == port:
if debug:
print('You cannot unblock yourself!')
else:
if debug:
print(blockerNickname + ' stops blocking ' +
blockingActor)
removeBlock(baseDir,
blockerNickname, domain,
blockingNickname, blockingDomainFull)
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(originPathStr,
cookie, callingDomain)
self.server.POSTbusy = False
2020-08-30 20:25:36 +00:00
def _receiveSearchQuery(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
port: int, searchForEmoji: bool,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 20:25:36 +00:00
"""Receive a search query
"""
# get the page number
pageNumber = 1
if '/searchhandle?page=' in path:
pageNumberStr = path.split('/searchhandle?page=')[1]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
path = path.split('?page=')[0]
usersPath = path.replace('/searchhandle', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
length = int(self.headers['Content-length'])
try:
searchParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST searchParams connection was reset')
else:
print('WARN: POST searchParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST searchParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if 'submitBack=' in searchParams:
# go back on search screen
if callingDomain.endswith('.onion') and onionDomain:
actorStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/' +
self.server.defaultTimeline,
cookie, callingDomain)
self.server.POSTbusy = False
return
if 'searchtext=' in searchParams:
searchStr = searchParams.split('searchtext=')[1]
if '&' in searchStr:
searchStr = searchStr.split('&')[0]
searchStr = \
urllib.parse.unquote_plus(searchStr.strip())
searchStr = searchStr.lower().strip()
2020-08-30 20:25:36 +00:00
print('searchStr: ' + searchStr)
if searchForEmoji:
searchStr = ':' + searchStr + ':'
if searchStr.startswith('#'):
nickname = getNicknameFromActor(actorStr)
# hashtag search
hashtagStr = \
2020-10-29 12:48:58 +00:00
htmlHashtagSearch(self.server.cssCache,
nickname, domain, port,
2020-08-30 20:25:36 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
baseDir,
searchStr[1:], 1,
maxPostsInFeed,
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
httpPrefix,
self.server.projectVersion,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly)
2020-08-30 20:25:36 +00:00
if hashtagStr:
msg = hashtagStr.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
elif searchStr.startswith('*'):
# skill search
searchStr = searchStr.replace('*', '').strip()
skillStr = \
2020-10-29 12:48:58 +00:00
htmlSkillsSearch(self.server.cssCache,
self.server.translate,
2020-08-30 20:25:36 +00:00
baseDir,
httpPrefix,
searchStr,
self.server.instanceOnlySkillsSearch,
64)
if skillStr:
msg = skillStr.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
elif searchStr.startswith('!'):
# your post history search
nickname = getNicknameFromActor(actorStr)
searchStr = searchStr.replace('!', '').strip()
historyStr = \
2020-10-29 12:48:58 +00:00
htmlHistorySearch(self.server.cssCache,
self.server.translate,
2020-08-30 20:25:36 +00:00
baseDir,
httpPrefix,
nickname,
domain,
searchStr,
maxPostsInFeed,
pageNumber,
self.server.projectVersion,
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
port,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly)
2020-08-30 20:25:36 +00:00
if historyStr:
msg = historyStr.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
elif ('@' in searchStr or
('://' in searchStr and
('/users/' in searchStr or
'/profile/' in searchStr or
'/accounts/' in searchStr or
'/channel/' in searchStr))):
# profile search
nickname = getNicknameFromActor(actorStr)
if not self.server.session:
print('Starting new session during handle search')
self.server.session = \
createSession(self.server.proxyType)
if not self.server.session:
print('ERROR: POST failed to create session ' +
'during handle search')
self._404()
self.server.POSTbusy = False
return
2020-09-01 15:13:50 +00:00
profilePathStr = path.replace('/searchhandle', '')
2020-08-30 20:25:36 +00:00
profileStr = \
2020-10-29 12:48:58 +00:00
htmlProfileAfterSearch(self.server.cssCache,
self.server.recentPostsCache,
2020-08-30 20:25:36 +00:00
self.server.maxRecentPosts,
self.server.translate,
baseDir,
profilePathStr,
httpPrefix,
nickname,
domain,
port,
searchStr,
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
self.server.debug,
self.server.projectVersion,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly)
2020-08-30 20:25:36 +00:00
if profileStr:
msg = profileStr.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
else:
if callingDomain.endswith('.onion') and onionDomain:
actorStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/search',
cookie, callingDomain)
self.server.POSTbusy = False
return
elif (searchStr.startswith(':') or
searchStr.endswith(' emoji')):
2020-08-30 20:25:36 +00:00
# eg. "cat emoji"
if searchStr.endswith(' emoji'):
2020-08-30 20:25:36 +00:00
searchStr = \
searchStr.replace(' emoji', '')
2020-08-30 20:25:36 +00:00
# emoji search
emojiStr = \
2020-10-29 12:48:58 +00:00
htmlSearchEmoji(self.server.cssCache,
self.server.translate,
2020-08-30 20:25:36 +00:00
baseDir,
httpPrefix,
searchStr)
if emojiStr:
msg = emojiStr.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
else:
# shared items search
sharedItemsStr = \
2020-10-29 12:48:58 +00:00
htmlSearchSharedItems(self.server.cssCache,
self.server.translate,
2020-08-30 20:25:36 +00:00
baseDir,
searchStr, pageNumber,
maxPostsInFeed,
httpPrefix,
domainFull,
actorStr, callingDomain)
if sharedItemsStr:
msg = sharedItemsStr.encode('utf-8')
self._login_headers('text/html',
len(msg), callingDomain)
self._write(msg)
self.server.POSTbusy = False
return
if callingDomain.endswith('.onion') and onionDomain:
actorStr = 'http://' + onionDomain + usersPath
elif callingDomain.endswith('.i2p') and i2pDomain:
actorStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/' +
self.server.defaultTimeline,
cookie, callingDomain)
self.server.POSTbusy = False
2020-08-30 19:52:12 +00:00
def _receiveVote(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 19:52:12 +00:00
"""Receive a vote via POST
"""
pageNumber = 1
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
path = path.split('?page=')[0]
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
# the actor who votes
usersPath = path.replace('/question', '')
actor = httpPrefix + '://' + domainFull + usersPath
nickname = getNicknameFromActor(actor)
if not nickname:
if callingDomain.endswith('.onion') and onionDomain:
actor = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
actor = 'http://' + i2pDomain + usersPath
self._redirect_headers(actor + '/' +
self.server.defaultTimeline +
'?page=' + str(pageNumber),
cookie, callingDomain)
self.server.POSTbusy = False
return
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
# get the parameters
length = int(self.headers['Content-length'])
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
try:
questionParams = self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST questionParams connection was reset')
else:
print('WARN: POST questionParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST questionParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
questionParams = questionParams.replace('+', ' ')
questionParams = questionParams.replace('%3F', '')
questionParams = \
urllib.parse.unquote_plus(questionParams.strip())
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
# post being voted on
messageId = None
if 'messageId=' in questionParams:
messageId = questionParams.split('messageId=')[1]
if '&' in messageId:
messageId = messageId.split('&')[0]
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
answer = None
if 'answer=' in questionParams:
answer = questionParams.split('answer=')[1]
if '&' in answer:
answer = answer.split('&')[0]
2020-08-30 20:04:21 +00:00
2020-08-30 19:52:12 +00:00
self._sendReplyToQuestion(nickname, messageId, answer)
if callingDomain.endswith('.onion') and onionDomain:
actor = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
actor = 'http://' + i2pDomain + usersPath
self._redirect_headers(actor + '/' +
self.server.defaultTimeline +
'?page=' + str(pageNumber), cookie,
callingDomain)
self.server.POSTbusy = False
return
2020-08-30 19:37:44 +00:00
def _receiveImage(self, length: int,
callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 19:37:44 +00:00
"""Receives an image via POST
"""
if not self.outboxAuthenticated:
if debug:
print('DEBUG: unauthenticated attempt to ' +
'post image to outbox')
self.send_response(403)
self.end_headers()
self.server.POSTbusy = False
return
pathUsersSection = path.split('/users/')[1]
if '/' not in pathUsersSection:
self._404()
self.server.POSTbusy = False
return
self.postFromNickname = pathUsersSection.split('/')[0]
accountsDir = \
baseDir + '/accounts/' + \
self.postFromNickname + '@' + domain
if not os.path.isdir(accountsDir):
self._404()
self.server.POSTbusy = False
return
try:
mediaBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST mediaBytes ' +
'connection reset by peer')
else:
print('WARN: POST mediaBytes socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST mediaBytes rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
mediaFilenameBase = accountsDir + '/upload'
mediaFilename = mediaFilenameBase + '.png'
if self.headers['Content-type'].endswith('jpeg'):
mediaFilename = mediaFilenameBase + '.jpg'
if self.headers['Content-type'].endswith('gif'):
mediaFilename = mediaFilenameBase + '.gif'
if self.headers['Content-type'].endswith('webp'):
mediaFilename = mediaFilenameBase + '.webp'
if self.headers['Content-type'].endswith('avif'):
mediaFilename = mediaFilenameBase + '.avif'
2020-08-30 19:37:44 +00:00
with open(mediaFilename, 'wb') as avFile:
avFile.write(mediaBytes)
if debug:
print('DEBUG: image saved to ' + mediaFilename)
self.send_response(201)
self.end_headers()
self.server.POSTbusy = False
def _removeShare(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
"""Removes a shared item
"""
usersPath = path.split('/rmshare')[0]
originPathStr = httpPrefix + '://' + domainFull + usersPath
length = int(self.headers['Content-length'])
try:
removeShareConfirmParams = \
self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST removeShareConfirmParams ' +
'connection was reset')
else:
print('WARN: POST removeShareConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST removeShareConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&submitYes=' in removeShareConfirmParams:
removeShareConfirmParams = \
removeShareConfirmParams.replace('+', ' ').strip()
removeShareConfirmParams = \
urllib.parse.unquote_plus(removeShareConfirmParams)
shareActor = removeShareConfirmParams.split('actor=')[1]
if '&' in shareActor:
shareActor = shareActor.split('&')[0]
shareName = removeShareConfirmParams.split('shareName=')[1]
if '&' in shareName:
shareName = shareName.split('&')[0]
shareNickname = getNicknameFromActor(shareActor)
if shareNickname:
shareDomain, sharePort = getDomainFromActor(shareActor)
removeShare(baseDir,
shareNickname, shareDomain, shareName)
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
self._redirect_headers(originPathStr + '/tlshares',
cookie, callingDomain)
self.server.POSTbusy = False
2020-08-30 16:35:18 +00:00
def _removePost(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 16:35:18 +00:00
"""Endpoint for removing posts
"""
pageNumber = 1
usersPath = path.split('/rmpost')[0]
originPathStr = \
httpPrefix + '://' + \
domainFull + usersPath
length = int(self.headers['Content-length'])
try:
removePostConfirmParams = \
self.rfile.read(length).decode('utf-8')
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST removePostConfirmParams ' +
'connection was reset')
else:
print('WARN: POST removePostConfirmParams socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: POST removePostConfirmParams rfile.read failed')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
if '&submitYes=' in removePostConfirmParams:
removePostConfirmParams = \
urllib.parse.unquote_plus(removePostConfirmParams)
removeMessageId = \
removePostConfirmParams.split('messageId=')[1]
if '&' in removeMessageId:
removeMessageId = removeMessageId.split('&')[0]
if 'pageNumber=' in removePostConfirmParams:
pageNumberStr = \
removePostConfirmParams.split('pageNumber=')[1]
if '&' in pageNumberStr:
pageNumberStr = pageNumberStr.split('&')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
yearStr = None
if 'year=' in removePostConfirmParams:
yearStr = removePostConfirmParams.split('year=')[1]
if '&' in yearStr:
yearStr = yearStr.split('&')[0]
monthStr = None
if 'month=' in removePostConfirmParams:
monthStr = removePostConfirmParams.split('month=')[1]
if '&' in monthStr:
monthStr = monthStr.split('&')[0]
if '/statuses/' in removeMessageId:
removePostActor = removeMessageId.split('/statuses/')[0]
if originPathStr in removePostActor:
toList = ['https://www.w3.org/ns/activitystreams#Public',
removePostActor]
deleteJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'actor': removePostActor,
'object': removeMessageId,
'to': toList,
'cc': [removePostActor+'/followers'],
'type': 'Delete'
}
self.postToNickname = getNicknameFromActor(removePostActor)
if self.postToNickname:
if monthStr and yearStr:
if monthStr.isdigit() and yearStr.isdigit():
removeCalendarEvent(baseDir,
self.postToNickname,
domain,
int(yearStr),
int(monthStr),
removeMessageId)
self._postToOutboxThread(deleteJson)
if callingDomain.endswith('.onion') and onionDomain:
originPathStr = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStr = 'http://' + i2pDomain + usersPath
if pageNumber == 1:
self._redirect_headers(originPathStr + '/outbox', cookie,
callingDomain)
else:
self._redirect_headers(originPathStr + '/outbox?page=' +
str(pageNumber),
cookie, callingDomain)
self.server.POSTbusy = False
2020-10-01 19:34:39 +00:00
def _linksUpdate(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
onionDomain: str, i2pDomain: str, debug: bool,
2020-11-01 17:18:05 +00:00
defaultTimeline: str) -> None:
2020-10-01 19:34:39 +00:00
"""Updates the left links column of the timeline
"""
usersPath = path.replace('/linksdata', '')
usersPath = usersPath.replace('/editlinks', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
if ' boundary=' in self.headers['Content-type']:
boundary = self.headers['Content-type'].split('boundary=')[1]
if ';' in boundary:
boundary = boundary.split(';')[0]
# get the nickname
nickname = getNicknameFromActor(actorStr)
2020-10-11 11:40:53 +00:00
editor = None
2020-10-04 09:22:27 +00:00
if nickname:
2020-10-11 11:40:53 +00:00
editor = isEditor(baseDir, nickname)
if not nickname or not editor:
2020-10-01 19:34:39 +00:00
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
2020-10-04 09:22:27 +00:00
if not nickname:
print('WARN: nickname not found in ' + actorStr)
else:
print('WARN: nickname is not a moderator' + actorStr)
2020-10-01 19:34:39 +00:00
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
length = int(self.headers['Content-length'])
# check that the POST isn't too large
if length > self.server.maxPostLength:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
print('Maximum links data length exceeded ' + str(length))
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
try:
# read the bytes of the http form POST
postBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: connection was reset while ' +
'reading bytes from http form POST')
else:
print('WARN: error while reading bytes ' +
'from http form POST')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: failed to read bytes for POST')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
linksFilename = baseDir + '/accounts/links.txt'
# extract all of the text fields into a dict
fields = \
extractTextFieldsInPOST(postBytes, boundary, debug)
if fields.get('editedLinks'):
linksStr = fields['editedLinks']
linksFile = open(linksFilename, "w+")
if linksFile:
linksFile.write(linksStr)
linksFile.close()
else:
if os.path.isfile(linksFilename):
os.remove(linksFilename)
# redirect back to the default timeline
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/' + defaultTimeline,
cookie, callingDomain)
self.server.POSTbusy = False
2020-10-04 09:22:27 +00:00
def _newswireUpdate(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
onionDomain: str, i2pDomain: str, debug: bool,
2020-11-01 17:18:05 +00:00
defaultTimeline: str) -> None:
2020-10-04 09:22:27 +00:00
"""Updates the right newswire column of the timeline
"""
usersPath = path.replace('/newswiredata', '')
usersPath = usersPath.replace('/editnewswire', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
if ' boundary=' in self.headers['Content-type']:
boundary = self.headers['Content-type'].split('boundary=')[1]
if ';' in boundary:
boundary = boundary.split(';')[0]
# get the nickname
nickname = getNicknameFromActor(actorStr)
moderator = None
if nickname:
moderator = isModerator(baseDir, nickname)
if not nickname or not moderator:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
if not nickname:
print('WARN: nickname not found in ' + actorStr)
else:
print('WARN: nickname is not a moderator' + actorStr)
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
length = int(self.headers['Content-length'])
# check that the POST isn't too large
if length > self.server.maxPostLength:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
print('Maximum newswire data length exceeded ' + str(length))
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
try:
# read the bytes of the http form POST
postBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: connection was reset while ' +
'reading bytes from http form POST')
else:
print('WARN: error while reading bytes ' +
'from http form POST')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: failed to read bytes for POST')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
newswireFilename = baseDir + '/accounts/newswire.txt'
# extract all of the text fields into a dict
fields = \
extractTextFieldsInPOST(postBytes, boundary, debug)
if fields.get('editedNewswire'):
newswireStr = fields['editedNewswire']
newswireFile = open(newswireFilename, "w+")
if newswireFile:
newswireFile.write(newswireStr)
newswireFile.close()
else:
if os.path.isfile(newswireFilename):
os.remove(newswireFilename)
2020-10-17 20:12:49 +00:00
# save filtered words list for the newswire
filterNewswireFilename = \
baseDir + '/accounts/' + \
'news@' + domain + '/filters.txt'
if fields.get('filteredWordsNewswire'):
with open(filterNewswireFilename, 'w+') as filterfile:
filterfile.write(fields['filteredWordsNewswire'])
else:
if os.path.isfile(filterNewswireFilename):
os.remove(filterNewswireFilename)
# save news tagging rules
hashtagRulesFilename = \
baseDir + '/accounts/hashtagrules.txt'
if fields.get('hashtagRulesList'):
with open(hashtagRulesFilename, 'w+') as rulesfile:
rulesfile.write(fields['hashtagRulesList'])
else:
if os.path.isfile(hashtagRulesFilename):
os.remove(hashtagRulesFilename)
newswireTrustedFilename = baseDir + '/accounts/newswiretrusted.txt'
if fields.get('trustedNewswire'):
newswireTrusted = fields['trustedNewswire']
2020-10-05 18:53:41 +00:00
if not newswireTrusted.endswith('\n'):
newswireTrusted += '\n'
trustFile = open(newswireTrustedFilename, "w+")
if trustFile:
trustFile.write(newswireTrusted)
trustFile.close()
else:
if os.path.isfile(newswireTrustedFilename):
os.remove(newswireTrustedFilename)
2020-10-04 09:22:27 +00:00
# redirect back to the default timeline
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/' + defaultTimeline,
cookie, callingDomain)
self.server.POSTbusy = False
2020-11-05 18:47:03 +00:00
def _citationsUpdate(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
onionDomain: str, i2pDomain: str, debug: bool,
defaultTimeline: str,
newswire: {}) -> None:
2020-11-05 18:47:03 +00:00
"""Updates the citations for a blog post after hitting
update button on the citations screen
"""
2020-11-06 12:10:56 +00:00
usersPath = path.replace('/citationsdata', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
nickname = getNicknameFromActor(actorStr)
2020-11-06 12:06:41 +00:00
citationsFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + '/.citations.txt'
# remove any existing citations file
if os.path.isfile(citationsFilename):
os.remove(citationsFilename)
if newswire and \
' boundary=' in self.headers['Content-type']:
2020-11-05 18:47:03 +00:00
boundary = self.headers['Content-type'].split('boundary=')[1]
if ';' in boundary:
boundary = boundary.split(';')[0]
length = int(self.headers['Content-length'])
# check that the POST isn't too large
if length > self.server.maxPostLength:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
print('Maximum citations data length exceeded ' + str(length))
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
try:
# read the bytes of the http form POST
postBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: connection was reset while ' +
'reading bytes from http form ' +
'citation screen POST')
else:
print('WARN: error while reading bytes ' +
'from http form citations screen POST')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: failed to read bytes for ' +
'citations screen POST')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
# extract all of the text fields into a dict
fields = \
extractTextFieldsInPOST(postBytes, boundary, debug)
2020-11-06 10:12:29 +00:00
print('citationstest: ' + str(fields))
citations = []
2020-11-06 10:27:19 +00:00
for ctr in range(0, 128):
fieldName = 'newswire' + str(ctr)
if not fields.get(fieldName):
continue
citations.append(fields[fieldName])
if citations:
citationsStr = ''
for citationDate in citations:
citationsStr += citationDate + '\n'
# save citations dates, so that they can be added when
2020-11-05 18:47:03 +00:00
# reloading the newblog screen
citationsFile = open(citationsFilename, "w+")
if citationsFile:
citationsFile.write(citationsStr)
citationsFile.close()
# redirect back to the default timeline
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr + '/newblog',
cookie, callingDomain)
self.server.POSTbusy = False
2020-10-10 19:14:36 +00:00
def _newsPostEdit(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
onionDomain: str, i2pDomain: str, debug: bool,
2020-11-01 17:18:05 +00:00
defaultTimeline: str) -> None:
2020-10-10 19:14:36 +00:00
"""edits a news post
"""
usersPath = path.replace('/newseditdata', '')
usersPath = usersPath.replace('/editnewspost', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
if ' boundary=' in self.headers['Content-type']:
boundary = self.headers['Content-type'].split('boundary=')[1]
if ';' in boundary:
boundary = boundary.split(';')[0]
# get the nickname
nickname = getNicknameFromActor(actorStr)
editorRole = None
if nickname:
editorRole = isEditor(baseDir, nickname)
if not nickname or not editorRole:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
if not nickname:
print('WARN: nickname not found in ' + actorStr)
else:
print('WARN: nickname is not an editor' + actorStr)
2020-10-11 11:07:15 +00:00
self._redirect_headers(actorStr + '/tlnews',
cookie, callingDomain)
2020-10-10 19:14:36 +00:00
self.server.POSTbusy = False
return
length = int(self.headers['Content-length'])
# check that the POST isn't too large
if length > self.server.maxPostLength:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
print('Maximum news data length exceeded ' + str(length))
2020-10-11 11:07:15 +00:00
self._redirect_headers(actorStr + 'tlnews',
cookie, callingDomain)
2020-10-10 19:14:36 +00:00
self.server.POSTbusy = False
return
try:
# read the bytes of the http form POST
postBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: connection was reset while ' +
'reading bytes from http form POST')
else:
print('WARN: error while reading bytes ' +
'from http form POST')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: failed to read bytes for POST')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
# extract all of the text fields into a dict
fields = \
extractTextFieldsInPOST(postBytes, boundary, debug)
newsPostUrl = None
newsPostTitle = None
newsPostContent = None
if fields.get('newsPostUrl'):
newsPostUrl = fields['newsPostUrl']
if fields.get('newsPostTitle'):
newsPostTitle = fields['newsPostTitle']
if fields.get('editedNewsPost'):
newsPostContent = fields['editedNewsPost']
if newsPostUrl and newsPostContent and newsPostTitle:
# load the post
postFilename = \
locatePost(baseDir, nickname, domain,
newsPostUrl)
if postFilename:
postJsonObject = loadJson(postFilename)
# update the content and title
postJsonObject['object']['summary'] = \
newsPostTitle
postJsonObject['object']['content'] = \
newsPostContent
2020-10-10 22:41:38 +00:00
# update newswire
pubDate = postJsonObject['object']['published']
publishedDate = \
datetime.datetime.strptime(pubDate,
"%Y-%m-%dT%H:%M:%SZ")
if self.server.newswire.get(str(publishedDate)):
self.server.newswire[publishedDate][0] = \
newsPostTitle
self.server.newswire[publishedDate][4] = \
newsPostContent
# save newswire
newswireStateFilename = \
baseDir + '/accounts/.newswirestate.json'
try:
saveJson(self.server.newswire,
newswireStateFilename)
except Exception as e:
print('ERROR saving newswire state, ' + str(e))
2020-10-18 16:24:28 +00:00
# remove any previous cached news posts
newsId = \
postJsonObject['object']['id'].replace('/', '#')
clearFromPostCaches(baseDir, self.server.recentPostsCache,
newsId)
2020-10-18 16:24:28 +00:00
2020-10-10 19:14:36 +00:00
# save the news post
saveJson(postJsonObject, postFilename)
# redirect back to the default timeline
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
2020-10-11 11:07:15 +00:00
self._redirect_headers(actorStr + '/tlnews',
2020-10-10 19:14:36 +00:00
cookie, callingDomain)
self.server.POSTbusy = False
2020-08-30 10:42:44 +00:00
def _profileUpdate(self, callingDomain: str, cookie: str,
authorized: bool, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
2020-11-01 17:18:05 +00:00
onionDomain: str, i2pDomain: str,
debug: bool) -> None:
2020-08-30 10:42:44 +00:00
"""Updates your user profile after editing via the Edit button
on the profile screen
"""
usersPath = path.replace('/profiledata', '')
usersPath = usersPath.replace('/editprofile', '')
actorStr = httpPrefix + '://' + domainFull + usersPath
if ' boundary=' in self.headers['Content-type']:
boundary = self.headers['Content-type'].split('boundary=')[1]
if ';' in boundary:
boundary = boundary.split(';')[0]
2020-08-30 12:10:33 +00:00
# get the nickname
2020-08-30 10:42:44 +00:00
nickname = getNicknameFromActor(actorStr)
if not nickname:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
print('WARN: nickname not found in ' + actorStr)
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
length = int(self.headers['Content-length'])
2020-08-30 12:10:33 +00:00
# check that the POST isn't too large
2020-08-30 10:42:44 +00:00
if length > self.server.maxPostLength:
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
print('Maximum profile data length exceeded ' +
str(length))
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
return
try:
# read the bytes of the http form POST
postBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: connection was reset while ' +
'reading bytes from http form POST')
else:
print('WARN: error while reading bytes ' +
'from http form POST')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
except ValueError as e:
print('ERROR: failed to read bytes for POST')
print(e)
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
2020-08-30 12:10:33 +00:00
# get the various avatar, banner and background images
2020-08-30 10:42:44 +00:00
actorChanged = True
profileMediaTypes = ('avatar', 'image',
'banner', 'search_banner',
2020-10-02 14:37:05 +00:00
'instanceLogo',
'left_col_image', 'right_col_image')
2020-08-30 10:42:44 +00:00
profileMediaTypesUploaded = {}
for mType in profileMediaTypes:
if debug:
print('DEBUG: profile update extracting ' + mType +
' image or font from POST')
mediaBytes, postBytes = \
extractMediaInFormPOST(postBytes, boundary, mType)
if mediaBytes:
if debug:
print('DEBUG: profile update ' + mType +
' image or font was found. ' +
str(len(mediaBytes)) + ' bytes')
else:
if debug:
print('DEBUG: profile update, no ' + mType +
' image or font was found in POST')
continue
# Note: a .temp extension is used here so that at no
# time is an image with metadata publicly exposed,
# even for a few mS
if mType == 'instanceLogo':
filenameBase = \
baseDir + '/accounts/login.temp'
else:
filenameBase = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/' + mType + '.temp'
filename, attachmentMediaType = \
saveMediaInFormPOST(mediaBytes, debug,
filenameBase)
if filename:
print('Profile update POST ' + mType +
' media or font filename is ' + filename)
else:
print('Profile update, no ' + mType +
' media or font filename in POST')
continue
postImageFilename = filename.replace('.temp', '')
if debug:
print('DEBUG: POST ' + mType +
' media removing metadata')
# remove existing etag
if os.path.isfile(postImageFilename + '.etag'):
try:
os.remove(postImageFilename + '.etag')
except BaseException:
pass
removeMetaData(filename, postImageFilename)
if os.path.isfile(postImageFilename):
print('profile update POST ' + mType +
' image or font saved to ' + postImageFilename)
if mType != 'instanceLogo':
lastPartOfImageFilename = \
postImageFilename.split('/')[-1]
profileMediaTypesUploaded[mType] = \
lastPartOfImageFilename
actorChanged = True
else:
print('ERROR: profile update POST ' + mType +
' image or font could not be saved to ' +
postImageFilename)
2020-08-30 12:10:33 +00:00
# extract all of the text fields into a dict
2020-08-30 10:42:44 +00:00
fields = \
2020-10-01 19:34:39 +00:00
extractTextFieldsInPOST(postBytes, boundary, debug)
2020-08-30 10:42:44 +00:00
if debug:
if fields:
print('DEBUG: profile update text ' +
'field extracted from POST ' + str(fields))
else:
print('WARN: profile update, no text ' +
'fields could be extracted from POST')
2020-08-30 12:10:33 +00:00
# load the json for the actor for this user
2020-08-30 10:42:44 +00:00
actorFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + '.json'
if os.path.isfile(actorFilename):
actorJson = loadJson(actorFilename)
if actorJson:
# update the avatar/image url file extension
uploads = profileMediaTypesUploaded.items()
for mType, lastPart in uploads:
repStr = '/' + lastPart
if mType == 'avatar':
lastPartOfUrl = \
actorJson['icon']['url'].split('/')[-1]
srchStr = '/' + lastPartOfUrl
actorJson['icon']['url'] = \
actorJson['icon']['url'].replace(srchStr,
repStr)
elif mType == 'image':
lastPartOfUrl = \
actorJson['image']['url'].split('/')[-1]
srchStr = '/' + lastPartOfUrl
actorJson['image']['url'] = \
actorJson['image']['url'].replace(srchStr,
repStr)
2020-08-30 12:10:33 +00:00
# set skill levels
2020-08-30 10:42:44 +00:00
skillCtr = 1
newSkills = {}
while skillCtr < 10:
skillName = \
fields.get('skillName' + str(skillCtr))
if not skillName:
skillCtr += 1
continue
skillValue = \
fields.get('skillValue' + str(skillCtr))
if not skillValue:
skillCtr += 1
continue
if not actorJson['skills'].get(skillName):
actorChanged = True
else:
if actorJson['skills'][skillName] != \
int(skillValue):
actorChanged = True
newSkills[skillName] = int(skillValue)
skillCtr += 1
if len(actorJson['skills'].items()) != \
len(newSkills.items()):
actorChanged = True
actorJson['skills'] = newSkills
2020-08-30 12:10:33 +00:00
# change password
2020-08-30 10:42:44 +00:00
if fields.get('password'):
if fields.get('passwordconfirm'):
if actorJson['password'] == \
fields['passwordconfirm']:
if len(actorJson['password']) > 2:
# set password
pwd = actorJson['password']
storeBasicCredentials(baseDir,
nickname,
pwd)
2020-08-30 12:10:33 +00:00
# change displayed name
2020-08-30 10:42:44 +00:00
if fields.get('displayNickname'):
if fields['displayNickname'] != actorJson['name']:
actorJson['name'] = fields['displayNickname']
actorChanged = True
2020-10-13 20:19:23 +00:00
# change media instance status
if fields.get('mediaInstance'):
self.server.mediaInstance = False
self.server.defaultTimeline = 'inbox'
if fields['mediaInstance'] == 'on':
self.server.mediaInstance = True
self.server.blogsInstance = False
self.server.newsInstance = False
self.server.defaultTimeline = 'tlmedia'
setConfigParam(baseDir,
"mediaInstance",
self.server.mediaInstance)
setConfigParam(baseDir,
"blogsInstance",
self.server.blogsInstance)
setConfigParam(baseDir,
"newsInstance",
self.server.newsInstance)
else:
if self.server.mediaInstance:
self.server.mediaInstance = False
self.server.defaultTimeline = 'inbox'
setConfigParam(baseDir,
"mediaInstance",
self.server.mediaInstance)
2020-10-13 20:42:07 +00:00
# change news instance status
if fields.get('newsInstance'):
self.server.newsInstance = False
self.server.defaultTimeline = 'inbox'
if fields['newsInstance'] == 'on':
self.server.newsInstance = True
self.server.blogsInstance = False
self.server.mediaInstance = False
self.server.defaultTimeline = 'tlnews'
setConfigParam(baseDir,
"mediaInstance",
self.server.mediaInstance)
setConfigParam(baseDir,
"blogsInstance",
self.server.blogsInstance)
setConfigParam(baseDir,
"newsInstance",
self.server.newsInstance)
else:
if self.server.newsInstance:
self.server.newsInstance = False
self.server.defaultTimeline = 'inbox'
setConfigParam(baseDir,
"newsInstance",
self.server.mediaInstance)
# change blog instance status
if fields.get('blogsInstance'):
self.server.blogsInstance = False
self.server.defaultTimeline = 'inbox'
if fields['blogsInstance'] == 'on':
self.server.blogsInstance = True
self.server.mediaInstance = False
self.server.newsInstance = False
self.server.defaultTimeline = 'tlblogs'
setConfigParam(baseDir,
"blogsInstance",
self.server.blogsInstance)
setConfigParam(baseDir,
"mediaInstance",
self.server.mediaInstance)
setConfigParam(baseDir,
"newsInstance",
self.server.newsInstance)
else:
if self.server.blogsInstance:
self.server.blogsInstance = False
self.server.defaultTimeline = 'inbox'
setConfigParam(baseDir,
"blogsInstance",
self.server.blogsInstance)
2020-10-13 20:19:23 +00:00
# change theme
2020-08-30 10:42:44 +00:00
if fields.get('themeDropdown'):
setTheme(baseDir,
2020-10-09 19:32:05 +00:00
fields['themeDropdown'],
domain)
self.server.showPublishAsIcon = \
getConfigParam(self.server.baseDir,
'showPublishAsIcon')
self.server.fullWidthTimelineButtonHeader = \
getConfigParam(self.server.baseDir,
'fullWidthTimelineButtonHeader')
self.server.iconsAsButtons = \
getConfigParam(self.server.baseDir,
'iconsAsButtons')
self.server.rssIconAtTop = \
getConfigParam(self.server.baseDir,
'rssIconAtTop')
2020-10-26 21:32:08 +00:00
self.server.publishButtonAtTop = \
getConfigParam(self.server.baseDir,
'publishButtonAtTop')
2020-10-13 21:38:19 +00:00
setNewsAvatar(baseDir,
fields['themeDropdown'],
httpPrefix,
domain,
domainFull)
2020-08-30 10:42:44 +00:00
2020-08-30 12:10:33 +00:00
# change email address
2020-08-30 10:42:44 +00:00
currentEmailAddress = getEmailAddress(actorJson)
if fields.get('email'):
if fields['email'] != currentEmailAddress:
setEmailAddress(actorJson, fields['email'])
actorChanged = True
else:
if currentEmailAddress:
setEmailAddress(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change xmpp address
2020-08-30 10:42:44 +00:00
currentXmppAddress = getXmppAddress(actorJson)
if fields.get('xmppAddress'):
if fields['xmppAddress'] != currentXmppAddress:
setXmppAddress(actorJson,
fields['xmppAddress'])
actorChanged = True
else:
if currentXmppAddress:
setXmppAddress(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change matrix address
2020-08-30 10:42:44 +00:00
currentMatrixAddress = getMatrixAddress(actorJson)
if fields.get('matrixAddress'):
if fields['matrixAddress'] != currentMatrixAddress:
setMatrixAddress(actorJson,
fields['matrixAddress'])
actorChanged = True
else:
if currentMatrixAddress:
setMatrixAddress(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change SSB address
2020-08-30 10:42:44 +00:00
currentSSBAddress = getSSBAddress(actorJson)
if fields.get('ssbAddress'):
if fields['ssbAddress'] != currentSSBAddress:
setSSBAddress(actorJson,
fields['ssbAddress'])
actorChanged = True
else:
if currentSSBAddress:
setSSBAddress(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change blog address
2020-08-30 10:42:44 +00:00
currentBlogAddress = getBlogAddress(actorJson)
if fields.get('blogAddress'):
if fields['blogAddress'] != currentBlogAddress:
setBlogAddress(actorJson,
fields['blogAddress'])
actorChanged = True
else:
if currentBlogAddress:
setBlogAddress(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change tox address
2020-08-30 10:42:44 +00:00
currentToxAddress = getToxAddress(actorJson)
if fields.get('toxAddress'):
if fields['toxAddress'] != currentToxAddress:
setToxAddress(actorJson,
fields['toxAddress'])
actorChanged = True
else:
if currentToxAddress:
setToxAddress(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change PGP public key
2020-08-30 10:42:44 +00:00
currentPGPpubKey = getPGPpubKey(actorJson)
if fields.get('pgp'):
if fields['pgp'] != currentPGPpubKey:
setPGPpubKey(actorJson,
fields['pgp'])
actorChanged = True
else:
if currentPGPpubKey:
setPGPpubKey(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change PGP fingerprint
2020-08-30 10:42:44 +00:00
currentPGPfingerprint = getPGPfingerprint(actorJson)
if fields.get('openpgp'):
if fields['openpgp'] != currentPGPfingerprint:
setPGPfingerprint(actorJson,
fields['openpgp'])
actorChanged = True
else:
if currentPGPfingerprint:
setPGPfingerprint(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change donation link
2020-08-30 10:42:44 +00:00
currentDonateUrl = getDonationUrl(actorJson)
if fields.get('donateUrl'):
if fields['donateUrl'] != currentDonateUrl:
setDonationUrl(actorJson,
fields['donateUrl'])
actorChanged = True
else:
if currentDonateUrl:
setDonationUrl(actorJson, '')
actorChanged = True
2020-08-30 12:10:33 +00:00
# change instance title
2020-08-30 10:42:44 +00:00
if fields.get('instanceTitle'):
currInstanceTitle = \
2020-10-10 16:04:27 +00:00
getConfigParam(baseDir, 'instanceTitle')
2020-08-30 10:42:44 +00:00
if fields['instanceTitle'] != currInstanceTitle:
2020-10-10 16:04:27 +00:00
setConfigParam(baseDir, 'instanceTitle',
2020-08-30 10:42:44 +00:00
fields['instanceTitle'])
2020-08-30 12:10:33 +00:00
# change YouTube alternate domain
2020-08-30 10:42:44 +00:00
if fields.get('ytdomain'):
currYTDomain = self.server.YTReplacementDomain
if fields['ytdomain'] != currYTDomain:
newYTDomain = fields['ytdomain']
if '://' in newYTDomain:
newYTDomain = newYTDomain.split('://')[1]
if '/' in newYTDomain:
newYTDomain = newYTDomain.split('/')[0]
if '.' in newYTDomain:
setConfigParam(baseDir,
'youtubedomain',
newYTDomain)
self.server.YTReplacementDomain = \
newYTDomain
else:
setConfigParam(baseDir,
'youtubedomain', '')
self.server.YTReplacementDomain = None
2020-08-30 12:10:33 +00:00
# change instance description
2020-08-30 10:42:44 +00:00
currInstanceDescriptionShort = \
getConfigParam(baseDir,
'instanceDescriptionShort')
if fields.get('instanceDescriptionShort'):
if fields['instanceDescriptionShort'] != \
currInstanceDescriptionShort:
iDesc = fields['instanceDescriptionShort']
setConfigParam(baseDir,
'instanceDescriptionShort',
iDesc)
else:
if currInstanceDescriptionShort:
setConfigParam(baseDir,
'instanceDescriptionShort', '')
currInstanceDescription = \
2020-10-10 16:04:27 +00:00
getConfigParam(baseDir, 'instanceDescription')
2020-08-30 10:42:44 +00:00
if fields.get('instanceDescription'):
if fields['instanceDescription'] != \
currInstanceDescription:
setConfigParam(baseDir,
'instanceDescription',
fields['instanceDescription'])
else:
if currInstanceDescription:
setConfigParam(baseDir,
'instanceDescription', '')
2020-08-30 12:10:33 +00:00
# change user bio
2020-08-30 10:42:44 +00:00
if fields.get('bio'):
if fields['bio'] != actorJson['summary']:
actorTags = {}
actorJson['summary'] = \
addHtmlTags(baseDir,
httpPrefix,
nickname,
domainFull,
fields['bio'], [], actorTags)
if actorTags:
actorJson['tag'] = []
for tagName, tag in actorTags.items():
actorJson['tag'].append(tag)
actorChanged = True
else:
if actorJson['summary']:
actorJson['summary'] = ''
actorChanged = True
2020-08-30 12:10:33 +00:00
# change moderators list
2020-08-30 10:42:44 +00:00
if fields.get('moderators'):
adminNickname = \
getConfigParam(baseDir, 'admin')
2020-10-10 16:04:27 +00:00
if adminNickname:
if path.startswith('/users/' +
adminNickname + '/'):
moderatorsFile = \
baseDir + \
'/accounts/moderators.txt'
clearModeratorStatus(baseDir)
if ',' in fields['moderators']:
# if the list was given as comma separated
modFile = open(moderatorsFile, "w+")
mods = fields['moderators'].split(',')
for modNick in mods:
modNick = modNick.strip()
modDir = baseDir + \
'/accounts/' + modNick + \
'@' + domain
if os.path.isdir(modDir):
modFile.write(modNick + '\n')
modFile.close()
mods = fields['moderators'].split(',')
for modNick in mods:
modNick = modNick.strip()
modDir = baseDir + \
'/accounts/' + modNick + \
'@' + domain
if os.path.isdir(modDir):
setRole(baseDir,
modNick, domain,
'instance', 'moderator')
else:
# nicknames on separate lines
modFile = open(moderatorsFile, "w+")
mods = fields['moderators'].split('\n')
for modNick in mods:
modNick = modNick.strip()
modDir = \
baseDir + \
'/accounts/' + modNick + \
'@' + domain
if os.path.isdir(modDir):
modFile.write(modNick + '\n')
modFile.close()
mods = fields['moderators'].split('\n')
for modNick in mods:
modNick = modNick.strip()
modDir = \
baseDir + \
'/accounts/' + \
modNick + '@' + \
domain
if os.path.isdir(modDir):
setRole(baseDir,
modNick, domain,
'instance',
'moderator')
2020-08-30 10:42:44 +00:00
2020-10-11 19:42:21 +00:00
# change site editors list
if fields.get('editors'):
adminNickname = \
getConfigParam(baseDir, 'admin')
if adminNickname:
if path.startswith('/users/' +
adminNickname + '/'):
editorsFile = \
baseDir + \
'/accounts/editors.txt'
clearEditorStatus(baseDir)
if ',' in fields['editors']:
# if the list was given as comma separated
edFile = open(editorsFile, "w+")
eds = fields['editors'].split(',')
for edNick in eds:
edNick = edNick.strip()
edDir = baseDir + \
'/accounts/' + edNick + \
'@' + domain
if os.path.isdir(edDir):
edFile.write(edNick + '\n')
edFile.close()
eds = fields['editors'].split(',')
for edNick in eds:
edNick = edNick.strip()
edDir = baseDir + \
'/accounts/' + edNick + \
'@' + domain
if os.path.isdir(edDir):
setRole(baseDir,
edNick, domain,
'instance', 'editor')
else:
# nicknames on separate lines
edFile = open(editorsFile, "w+")
eds = fields['editors'].split('\n')
for edNick in eds:
edNick = edNick.strip()
edDir = \
baseDir + \
'/accounts/' + edNick + \
'@' + domain
if os.path.isdir(edDir):
edFile.write(edNick + '\n')
edFile.close()
eds = fields['editors'].split('\n')
for edNick in eds:
edNick = edNick.strip()
edDir = \
baseDir + \
'/accounts/' + \
edNick + '@' + \
domain
if os.path.isdir(edDir):
setRole(baseDir,
edNick, domain,
'instance',
'editor')
2020-08-30 12:10:33 +00:00
# remove scheduled posts
2020-08-30 10:42:44 +00:00
if fields.get('removeScheduledPosts'):
if fields['removeScheduledPosts'] == 'on':
removeScheduledPosts(baseDir,
nickname, domain)
2020-08-30 12:10:33 +00:00
# approve followers
2020-08-30 10:42:44 +00:00
approveFollowers = False
if fields.get('approveFollowers'):
if fields['approveFollowers'] == 'on':
approveFollowers = True
if approveFollowers != \
actorJson['manuallyApprovesFollowers']:
actorJson['manuallyApprovesFollowers'] = \
approveFollowers
actorChanged = True
2020-08-30 12:10:33 +00:00
# remove a custom font
2020-08-30 10:42:44 +00:00
if fields.get('removeCustomFont'):
if fields['removeCustomFont'] == 'on':
fontExt = ('woff', 'woff2', 'otf', 'ttf')
for ext in fontExt:
if os.path.isfile(baseDir +
'/fonts/custom.' + ext):
os.remove(baseDir +
'/fonts/custom.' + ext)
if os.path.isfile(baseDir +
'/fonts/custom.' + ext +
'.etag'):
os.remove(baseDir +
'/fonts/custom.' + ext +
'.etag')
currTheme = getTheme(baseDir)
if currTheme:
2020-10-09 19:32:05 +00:00
setTheme(baseDir, currTheme, domain)
self.server.showPublishAsIcon = \
getConfigParam(self.server.baseDir,
'showPublishAsIcon')
self.server.fullWidthTimelineButtonHeader = \
getConfigParam(self.server.baseDir,
'fullWidthTimeline' +
'ButtonHeader')
self.server.iconsAsButtons = \
getConfigParam(self.server.baseDir,
'iconsAsButtons')
self.server.rssIconAtTop = \
getConfigParam(self.server.baseDir,
'rssIconAtTop')
2020-10-26 21:32:08 +00:00
self.server.publishButtonAtTop = \
getConfigParam(self.server.baseDir,
'publishButtonAtTop')
2020-08-30 10:42:44 +00:00
# only receive DMs from accounts you follow
followDMsFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/.followDMs'
followDMsActive = False
if fields.get('followDMs'):
if fields['followDMs'] == 'on':
followDMsActive = True
with open(followDMsFilename, 'w+') as fFile:
fFile.write('\n')
if not followDMsActive:
if os.path.isfile(followDMsFilename):
os.remove(followDMsFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# remove Twitter retweets
removeTwitterFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/.removeTwitter'
removeTwitterActive = False
if fields.get('removeTwitter'):
if fields['removeTwitter'] == 'on':
removeTwitterActive = True
with open(removeTwitterFilename,
'w+') as rFile:
rFile.write('\n')
if not removeTwitterActive:
if os.path.isfile(removeTwitterFilename):
os.remove(removeTwitterFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# hide Like button
hideLikeButtonFile = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/.hideLikeButton'
notifyLikesFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/.notifyLikes'
hideLikeButtonActive = False
if fields.get('hideLikeButton'):
if fields['hideLikeButton'] == 'on':
hideLikeButtonActive = True
with open(hideLikeButtonFile, 'w+') as rFile:
rFile.write('\n')
# remove notify likes selection
if os.path.isfile(notifyLikesFilename):
os.remove(notifyLikesFilename)
if not hideLikeButtonActive:
if os.path.isfile(hideLikeButtonFile):
os.remove(hideLikeButtonFile)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# notify about new Likes
notifyLikesActive = False
if fields.get('notifyLikes'):
if fields['notifyLikes'] == 'on' and \
not hideLikeButtonActive:
notifyLikesActive = True
with open(notifyLikesFilename, 'w+') as rFile:
rFile.write('\n')
if not notifyLikesActive:
if os.path.isfile(notifyLikesFilename):
os.remove(notifyLikesFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# this account is a bot
if fields.get('isBot'):
if fields['isBot'] == 'on':
if actorJson['type'] != 'Service':
actorJson['type'] = 'Service'
actorChanged = True
else:
# this account is a group
if fields.get('isGroup'):
if fields['isGroup'] == 'on':
if actorJson['type'] != 'Group':
actorJson['type'] = 'Group'
actorChanged = True
else:
# this account is a person (default)
if actorJson['type'] != 'Person':
actorJson['type'] = 'Person'
actorChanged = True
2020-08-30 12:10:33 +00:00
# grayscale theme
2020-08-30 10:42:44 +00:00
grayscale = False
if fields.get('grayscale'):
if fields['grayscale'] == 'on':
grayscale = True
if grayscale:
enableGrayscale(baseDir)
else:
disableGrayscale(baseDir)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# save filtered words list
filterFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/filters.txt'
if fields.get('filteredWords'):
with open(filterFilename, 'w+') as filterfile:
filterfile.write(fields['filteredWords'])
else:
if os.path.isfile(filterFilename):
os.remove(filterFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# word replacements
switchFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/replacewords.txt'
if fields.get('switchWords'):
with open(switchFilename, 'w+') as switchfile:
switchfile.write(fields['switchWords'])
else:
if os.path.isfile(switchFilename):
os.remove(switchFilename)
2020-08-30 12:10:33 +00:00
# autogenerated tags
autoTagsFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/autotags.txt'
if fields.get('autoTags'):
with open(autoTagsFilename, 'w+') as autoTagsFile:
autoTagsFile.write(fields['autoTags'])
else:
if os.path.isfile(autoTagsFilename):
os.remove(autoTagsFilename)
2020-09-13 18:56:41 +00:00
# autogenerated content warnings
autoCWFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/autocw.txt'
if fields.get('autoCW'):
with open(autoCWFilename, 'w+') as autoCWFile:
autoCWFile.write(fields['autoCW'])
else:
if os.path.isfile(autoCWFilename):
os.remove(autoCWFilename)
2020-08-30 10:42:44 +00:00
# save blocked accounts list
blockedFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/blocking.txt'
if fields.get('blocked'):
with open(blockedFilename, 'w+') as blockedfile:
blockedfile.write(fields['blocked'])
else:
if os.path.isfile(blockedFilename):
os.remove(blockedFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# save allowed instances list
allowedInstancesFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/allowedinstances.txt'
if fields.get('allowedInstances'):
with open(allowedInstancesFilename, 'w+') as aFile:
aFile.write(fields['allowedInstances'])
else:
if os.path.isfile(allowedInstancesFilename):
os.remove(allowedInstancesFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# save git project names list
gitProjectsFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + \
'/gitprojects.txt'
if fields.get('gitProjects'):
with open(gitProjectsFilename, 'w+') as aFile:
aFile.write(fields['gitProjects'].lower())
else:
if os.path.isfile(gitProjectsFilename):
os.remove(gitProjectsFilename)
2020-08-30 12:10:33 +00:00
2020-08-30 10:42:44 +00:00
# save actor json file within accounts
if actorChanged:
# update the context for the actor
actorJson['@context'] = [
'https://www.w3.org/ns/activitystreams',
'https://w3id.org/security/v1',
getDefaultPersonContext()
]
randomizeActorImages(actorJson)
saveJson(actorJson, actorFilename)
webfingerUpdate(baseDir,
nickname, domain,
onionDomain,
self.server.cachedWebfingers)
# also copy to the actors cache and
# personCache in memory
storePersonInCache(baseDir,
actorJson['id'], actorJson,
self.server.personCache,
True)
# clear any cached images for this actor
idStr = actorJson['id'].replace('/', '-')
removeAvatarFromCache(baseDir, idStr)
# save the actor to the cache
actorCacheFilename = \
baseDir + '/cache/actors/' + \
actorJson['id'].replace('/', '#') + '.json'
saveJson(actorJson, actorCacheFilename)
# send profile update to followers
ccStr = 'https://www.w3.org/ns/' + \
'activitystreams#Public'
updateActorJson = {
'type': 'Update',
'actor': actorJson['id'],
'to': [actorJson['id'] + '/followers'],
'cc': [ccStr],
'object': actorJson
}
self._postToOutbox(updateActorJson,
__version__, nickname)
2020-08-30 12:10:33 +00:00
# deactivate the account
2020-08-30 10:42:44 +00:00
if fields.get('deactivateThisAccount'):
if fields['deactivateThisAccount'] == 'on':
deactivateAccount(baseDir,
nickname, domain)
self._clearLoginDetails(nickname,
callingDomain)
self.server.POSTbusy = False
return
2020-08-30 12:10:33 +00:00
# redirect back to the profile screen
2020-08-30 10:42:44 +00:00
if callingDomain.endswith('.onion') and \
onionDomain:
actorStr = \
'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and
i2pDomain):
actorStr = \
'http://' + i2pDomain + usersPath
self._redirect_headers(actorStr, cookie, callingDomain)
self.server.POSTbusy = False
2020-08-31 14:31:00 +00:00
def _progressiveWebAppManifest(self, callingDomain: str,
2020-11-01 17:18:05 +00:00
GETstartTime,
GETtimings: {}) -> None:
2020-08-31 14:31:00 +00:00
"""gets the PWA manifest
"""
app1 = "https://f-droid.org/en/packages/eu.siacs.conversations"
app2 = "https://staging.f-droid.org/en/packages/im.vector.app"
manifest = {
"name": "Epicyon",
"short_name": "Epicyon",
"start_url": "/index.html",
"display": "standalone",
"background_color": "black",
"theme_color": "grey",
"orientation": "portrait-primary",
"categories": ["microblog", "fediverse", "activitypub"],
"screenshots": [
{
"src": "/mobile.jpg",
"sizes": "418x851",
"type": "image/jpeg"
},
{
"src": "/mobile_person.jpg",
"sizes": "429x860",
"type": "image/jpeg"
},
{
"src": "/mobile_search.jpg",
"sizes": "422x861",
"type": "image/jpeg"
}
],
"icons": [
{
"src": "/logo72.png",
"type": "image/png",
"sizes": "72x72"
},
{
"src": "/logo96.png",
"type": "image/png",
"sizes": "96x96"
},
{
"src": "/logo128.png",
"type": "image/png",
"sizes": "128x128"
},
{
"src": "/logo144.png",
"type": "image/png",
"sizes": "144x144"
},
{
"src": "/logo152.png",
"type": "image/png",
"sizes": "152x152"
},
{
"src": "/logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "/logo256.png",
"type": "image/png",
"sizes": "256x256"
},
{
"src": "/logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"related_applications": [
{
"platform": "fdroid",
"url": app1
},
{
"platform": "fdroid",
"url": app2
}
]
}
msg = json.dumps(manifest,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
if self.server.debug:
print('Sent manifest: ' + callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show logout', 'send manifest')
2020-08-31 14:36:44 +00:00
def _getFavicon(self, callingDomain: str,
2020-11-01 17:18:05 +00:00
baseDir: str, debug: bool) -> None:
2020-08-31 14:36:44 +00:00
"""Return the favicon
"""
favType = 'image/x-icon'
favFilename = 'favicon.ico'
if self._hasAccept(callingDomain):
if 'image/webp' in self.headers['Accept']:
favType = 'image/webp'
favFilename = 'favicon.webp'
if 'image/avif' in self.headers['Accept']:
favType = 'image/avif'
favFilename = 'favicon.avif'
2020-08-31 14:36:44 +00:00
# custom favicon
faviconFilename = baseDir + '/' + favFilename
if not os.path.isfile(faviconFilename):
# default favicon
faviconFilename = \
baseDir + '/img/icons/' + favFilename
if self._etag_exists(faviconFilename):
# The file has not changed
if debug:
print('favicon icon has not changed: ' + callingDomain)
self._304()
return
if self.server.iconsCache.get(favFilename):
favBinary = self.server.iconsCache[favFilename]
self._set_headers_etag(faviconFilename,
favType,
favBinary, None,
callingDomain)
self._write(favBinary)
if debug:
print('Sent favicon from cache: ' + callingDomain)
return
else:
if os.path.isfile(faviconFilename):
with open(faviconFilename, 'rb') as favFile:
favBinary = favFile.read()
self._set_headers_etag(faviconFilename,
favType,
favBinary, None,
callingDomain)
self._write(favBinary)
self.server.iconsCache[favFilename] = favBinary
if self.server.debug:
print('Sent favicon from file: ' + callingDomain)
return
if debug:
print('favicon not sent: ' + callingDomain)
self._404()
2020-08-31 14:44:51 +00:00
def _getFonts(self, callingDomain: str, path: str,
baseDir: str, debug: bool,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
2020-08-31 14:44:51 +00:00
"""Returns a font
"""
fontStr = path.split('/fonts/')[1]
if fontStr.endswith('.otf') or \
fontStr.endswith('.ttf') or \
fontStr.endswith('.woff') or \
fontStr.endswith('.woff2'):
if fontStr.endswith('.otf'):
fontType = 'font/otf'
elif fontStr.endswith('.ttf'):
fontType = 'font/ttf'
elif fontStr.endswith('.woff'):
fontType = 'font/woff'
else:
fontType = 'font/woff2'
fontFilename = \
baseDir + '/fonts/' + fontStr
if self._etag_exists(fontFilename):
# The file has not changed
self._304()
return
if self.server.fontsCache.get(fontStr):
fontBinary = self.server.fontsCache[fontStr]
self._set_headers_etag(fontFilename,
fontType,
fontBinary, None,
callingDomain)
self._write(fontBinary)
if debug:
print('font sent from cache: ' +
path + ' ' + callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'hasAccept',
'send font from cache')
return
else:
if os.path.isfile(fontFilename):
with open(fontFilename, 'rb') as fontFile:
fontBinary = fontFile.read()
self._set_headers_etag(fontFilename,
fontType,
fontBinary, None,
callingDomain)
self._write(fontBinary)
self.server.fontsCache[fontStr] = fontBinary
if debug:
print('font sent from file: ' +
path + ' ' + callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'hasAccept',
'send font from file')
return
if debug:
print('font not found: ' + path + ' ' + callingDomain)
self._404()
2020-08-31 15:48:46 +00:00
def _getRSS2feed(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, port: int, proxyType: str,
GETstartTime, GETtimings: {},
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-08-31 15:48:46 +00:00
"""Returns an RSS2 feed for the blog
"""
nickname = path.split('/blog/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
if not nickname.startswith('rss.'):
if os.path.isdir(self.server.baseDir +
'/accounts/' + nickname + '@' + domain):
if not self.server.session:
print('Starting new session during RSS request')
self.server.session = \
createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during RSS request')
self._404()
return
msg = \
htmlBlogPageRSS2(authorized,
self.server.session,
baseDir,
httpPrefix,
self.server.translate,
nickname,
domain,
port,
2020-10-13 16:58:45 +00:00
maxPostsInRSSFeed, 1,
True)
2020-08-31 15:48:46 +00:00
if msg is not None:
msg = msg.encode('utf-8')
self._set_headers('text/xml', len(msg),
None, callingDomain)
self._write(msg)
if debug:
print('Sent rss2 feed: ' +
path + ' ' + callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'sharedInbox enabled',
'blog rss2')
return
if debug:
print('Failed to get rss2 feed: ' +
path + ' ' + callingDomain)
self._404()
2020-10-13 16:58:45 +00:00
def _getRSS2site(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domainFull: str, port: int, proxyType: str,
translate: {},
GETstartTime, GETtimings: {},
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-10-13 16:58:45 +00:00
"""Returns an RSS2 feed for all blogs on this instance
"""
if not self.server.session:
print('Starting new session during RSS request')
self.server.session = \
createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during RSS request')
self._404()
return
msg = ''
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
for acct in dirs:
if '@' not in acct:
continue
if 'inbox@' in acct or 'news@' in acct:
continue
nickname = acct.split('@')[0]
domain = acct.split('@')[1]
msg += \
htmlBlogPageRSS2(authorized,
self.server.session,
baseDir,
httpPrefix,
self.server.translate,
nickname,
domain,
port,
maxPostsInRSSFeed, 1,
False)
if msg:
msg = rss2Header(httpPrefix,
'news', domainFull,
2020-10-13 17:14:57 +00:00
'Site', translate) + msg + rss2Footer()
2020-10-13 16:58:45 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/xml', len(msg),
None, callingDomain)
self._write(msg)
if debug:
print('Sent rss2 feed: ' +
path + ' ' + callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'sharedInbox enabled',
'blog rss2')
return
if debug:
print('Failed to get rss2 feed: ' +
path + ' ' + callingDomain)
self._404()
2020-10-04 12:29:07 +00:00
def _getNewswireFeed(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, port: int, proxyType: str,
GETstartTime, GETtimings: {},
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-10-04 12:29:07 +00:00
"""Returns the newswire feed
"""
if not self.server.session:
print('Starting new session during RSS request')
self.server.session = \
createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during RSS request')
self._404()
return
msg = getRSSfromDict(self.server.baseDir, self.server.newswire,
self.server.httpPrefix,
self.server.domainFull,
'Newswire', self.server.translate)
if msg:
msg = msg.encode('utf-8')
self._set_headers('text/xml', len(msg),
None, callingDomain)
self._write(msg)
if debug:
print('Sent rss2 newswire feed: ' +
path + ' ' + callingDomain)
return
if debug:
print('Failed to get rss2 newswire feed: ' +
path + ' ' + callingDomain)
self._404()
2020-08-31 15:55:00 +00:00
def _getRSS3feed(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, port: int, proxyType: str,
GETstartTime, GETtimings: {},
2020-11-01 17:18:05 +00:00
debug: bool) -> None:
2020-08-31 15:55:00 +00:00
"""Returns an RSS3 feed
"""
nickname = path.split('/blog/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
if not nickname.startswith('rss.'):
if os.path.isdir(baseDir +
'/accounts/' + nickname + '@' + domain):
if not self.server.session:
print('Starting new session during RSS3 request')
self.server.session = \
createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during RSS3 request')
self._404()
return
msg = \
htmlBlogPageRSS3(authorized,
self.server.session,
baseDir, httpPrefix,
self.server.translate,
nickname, domain, port,
maxPostsInRSSFeed, 1)
if msg is not None:
msg = msg.encode('utf-8')
self._set_headers('text/plain; charset=utf-8',
len(msg), None, callingDomain)
self._write(msg)
if self.server.debug:
print('Sent rss3 feed: ' +
path + ' ' + callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'sharedInbox enabled',
'blog rss3')
return
if debug:
print('Failed to get rss3 feed: ' +
path + ' ' + callingDomain)
self._404()
2020-08-31 16:13:48 +00:00
def _showPersonOptions(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
GETstartTime, GETtimings: {},
onionDomain: str, i2pDomain: str,
2020-11-01 17:18:05 +00:00
cookie: str, debug: bool) -> None:
2020-08-31 16:13:48 +00:00
"""Show person options screen
"""
2020-09-01 15:13:50 +00:00
optionsStr = path.split('?options=')[1]
originPathStr = path.split('?options=')[0]
2020-10-25 22:53:25 +00:00
if ';' in optionsStr and '/users/news/' not in path:
2020-08-31 16:13:48 +00:00
pageNumber = 1
optionsList = optionsStr.split(';')
optionsActor = optionsList[0]
optionsPageNumber = optionsList[1]
optionsProfileUrl = optionsList[2]
if optionsPageNumber.isdigit():
pageNumber = int(optionsPageNumber)
optionsLink = None
if len(optionsList) > 3:
optionsLink = optionsList[3]
donateUrl = None
PGPpubKey = None
PGPfingerprint = None
xmppAddress = None
matrixAddress = None
blogAddress = None
toxAddress = None
ssbAddress = None
emailAddress = None
actorJson = getPersonFromCache(baseDir,
optionsActor,
self.server.personCache,
True)
if actorJson:
donateUrl = getDonationUrl(actorJson)
xmppAddress = getXmppAddress(actorJson)
matrixAddress = getMatrixAddress(actorJson)
ssbAddress = getSSBAddress(actorJson)
blogAddress = getBlogAddress(actorJson)
toxAddress = getToxAddress(actorJson)
emailAddress = getEmailAddress(actorJson)
PGPpubKey = getPGPpubKey(actorJson)
PGPfingerprint = getPGPfingerprint(actorJson)
2020-10-29 12:48:58 +00:00
msg = htmlPersonOptions(self.server.cssCache,
self.server.translate,
2020-08-31 16:13:48 +00:00
baseDir, domain,
domainFull,
2020-08-31 16:13:48 +00:00
originPathStr,
optionsActor,
optionsProfileUrl,
optionsLink,
pageNumber, donateUrl,
xmppAddress, matrixAddress,
ssbAddress, blogAddress,
toxAddress,
PGPpubKey, PGPfingerprint,
emailAddress).encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'registered devices done',
'person options')
return
2020-10-25 23:01:04 +00:00
if '/users/news/' in path:
2020-10-25 23:04:40 +00:00
self._redirect_headers(originPathStr + '/tlnews',
cookie, callingDomain)
2020-10-25 23:01:04 +00:00
return
2020-08-31 16:13:48 +00:00
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif callingDomain.endswith('.i2p') and i2pDomain:
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
else:
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr
self._redirect_headers(originPathStrAbsolute, cookie,
callingDomain)
2020-08-31 17:55:13 +00:00
def _showMedia(self, callingDomain: str,
path: str, baseDir: str,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
2020-08-31 17:55:13 +00:00
"""Returns a media file
"""
if self._pathIsImage(path) or \
self._pathIsVideo(path) or \
self._pathIsAudio(path):
mediaStr = path.split('/media/')[1]
mediaFilename = baseDir + '/media/' + mediaStr
if os.path.isfile(mediaFilename):
if self._etag_exists(mediaFilename):
# The file has not changed
self._304()
return
mediaFileType = 'image/png'
if mediaFilename.endswith('.png'):
mediaFileType = 'image/png'
elif mediaFilename.endswith('.jpg'):
mediaFileType = 'image/jpeg'
elif mediaFilename.endswith('.gif'):
mediaFileType = 'image/gif'
elif mediaFilename.endswith('.webp'):
mediaFileType = 'image/webp'
elif mediaFilename.endswith('.avif'):
mediaFileType = 'image/avif'
2020-08-31 17:55:13 +00:00
elif mediaFilename.endswith('.mp4'):
mediaFileType = 'video/mp4'
elif mediaFilename.endswith('.ogv'):
mediaFileType = 'video/ogv'
elif mediaFilename.endswith('.mp3'):
mediaFileType = 'audio/mpeg'
elif mediaFilename.endswith('.ogg'):
mediaFileType = 'audio/ogg'
with open(mediaFilename, 'rb') as avFile:
mediaBinary = avFile.read()
self._set_headers_etag(mediaFilename, mediaFileType,
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show emoji done',
'show media')
return
self._404()
2020-08-31 18:00:40 +00:00
def _showEmoji(self, callingDomain: str, path: str,
baseDir: str,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
2020-08-31 18:00:40 +00:00
"""Returns an emoji image
"""
if self._pathIsImage(path):
emojiStr = path.split('/emoji/')[1]
emojiFilename = baseDir + '/emoji/' + emojiStr
if os.path.isfile(emojiFilename):
if self._etag_exists(emojiFilename):
# The file has not changed
self._304()
return
mediaImageType = 'png'
if emojiFilename.endswith('.png'):
mediaImageType = 'png'
elif emojiFilename.endswith('.jpg'):
mediaImageType = 'jpeg'
elif emojiFilename.endswith('.webp'):
mediaImageType = 'webp'
elif emojiFilename.endswith('.avif'):
mediaImageType = 'avif'
2020-08-31 18:00:40 +00:00
else:
mediaImageType = 'gif'
with open(emojiFilename, 'rb') as avFile:
mediaBinary = avFile.read()
self._set_headers_etag(emojiFilename,
'image/' + mediaImageType,
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'background shown done',
'show emoji')
return
self._404()
2020-08-31 18:15:53 +00:00
def _showIcon(self, callingDomain: str, path: str,
baseDir: str,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
2020-08-31 18:15:53 +00:00
"""Shows an icon
"""
if path.endswith('.png'):
mediaStr = path.split('/icons/')[1]
mediaFilename = baseDir + '/img/icons/' + mediaStr
if self._etag_exists(mediaFilename):
# The file has not changed
self._304()
return
if self.server.iconsCache.get(mediaStr):
mediaBinary = self.server.iconsCache[mediaStr]
self._set_headers_etag(mediaFilename,
'image/png',
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
return
else:
if os.path.isfile(mediaFilename):
with open(mediaFilename, 'rb') as avFile:
mediaBinary = avFile.read()
self._set_headers_etag(mediaFilename,
'image/png',
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self.server.iconsCache[mediaStr] = mediaBinary
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show files done',
'icon shown')
return
self._404()
def _showCachedAvatar(self, callingDomain: str, path: str,
baseDir: str,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
"""Shows an avatar image obtained from the cache
"""
mediaFilename = baseDir + '/cache' + path
if os.path.isfile(mediaFilename):
if self._etag_exists(mediaFilename):
# The file has not changed
self._304()
return
with open(mediaFilename, 'rb') as avFile:
mediaBinary = avFile.read()
if mediaFilename.endswith('.png'):
self._set_headers_etag(mediaFilename,
'image/png',
mediaBinary, None,
callingDomain)
elif mediaFilename.endswith('.jpg'):
self._set_headers_etag(mediaFilename,
'image/jpeg',
mediaBinary, None,
callingDomain)
elif mediaFilename.endswith('.gif'):
self._set_headers_etag(mediaFilename,
'image/gif',
mediaBinary, None,
callingDomain)
elif mediaFilename.endswith('.webp'):
self._set_headers_etag(mediaFilename,
'image/webp',
mediaBinary, None,
callingDomain)
elif mediaFilename.endswith('.avif'):
self._set_headers_etag(mediaFilename,
'image/avif',
mediaBinary, None,
callingDomain)
else:
# default to jpeg
self._set_headers_etag(mediaFilename,
'image/jpeg',
mediaBinary, None,
callingDomain)
# self._404()
return
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'icon shown done',
'avatar shown')
return
self._404()
2020-08-31 21:19:57 +00:00
def _hashtagSearch(self, callingDomain: str,
path: str, cookie: str,
2020-08-31 21:16:21 +00:00
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
2020-08-31 21:16:21 +00:00
"""Return the result of a hashtag search
"""
pageNumber = 1
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
hashtag = path.split('/tags/')[1]
if '?page=' in hashtag:
hashtag = hashtag.split('?page=')[0]
if isBlockedHashtag(baseDir, hashtag):
2020-10-29 12:48:58 +00:00
msg = htmlHashtagBlocked(self.server.cssCache, baseDir,
self.server.translate).encode('utf-8')
2020-08-31 21:16:21 +00:00
self._login_headers('text/html', len(msg), callingDomain)
self._write(msg)
self.server.GETbusy = False
return
nickname = None
if '/users/' in path:
actor = \
httpPrefix + '://' + domainFull + path
nickname = \
getNicknameFromActor(actor)
hashtagStr = \
2020-10-29 12:48:58 +00:00
htmlHashtagSearch(self.server.cssCache,
nickname, domain, port,
2020-08-31 21:16:21 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
baseDir, hashtag, pageNumber,
maxPostsInFeed, self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
httpPrefix,
self.server.projectVersion,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly)
2020-08-31 21:16:21 +00:00
if hashtagStr:
msg = hashtagStr.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
originPathStr = path.split('/tags/')[0]
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif (callingDomain.endswith('.i2p') and onionDomain):
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
self._redirect_headers(originPathStrAbsolute + '/search',
cookie, callingDomain)
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'login shown done',
'hashtag search')
2020-09-26 18:23:43 +00:00
def _hashtagSearchRSS2(self, callingDomain: str,
path: str, cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
2020-11-01 17:18:05 +00:00
GETstartTime, GETtimings: {}) -> None:
2020-09-26 18:23:43 +00:00
"""Return an RSS 2 feed for a hashtag
"""
hashtag = path.split('/tags/rss2/')[1]
if isBlockedHashtag(baseDir, hashtag):
self._400()
self.server.GETbusy = False
return
nickname = None
if '/users/' in path:
actor = \
httpPrefix + '://' + domainFull + path
nickname = \
getNicknameFromActor(actor)
hashtagStr = \
rssHashtagSearch(nickname,
domain, port,
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
baseDir, hashtag,
maxPostsInFeed, self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
httpPrefix,
self.server.projectVersion,
self.server.YTReplacementDomain)
if hashtagStr:
msg = hashtagStr.encode('utf-8')
self._set_headers('text/xml', len(msg),
cookie, callingDomain)
self._write(msg)
else:
originPathStr = path.split('/tags/rss2/')[0]
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif (callingDomain.endswith('.i2p') and onionDomain):
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
self._redirect_headers(originPathStrAbsolute + '/search',
cookie, callingDomain)
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'login shown done',
'hashtag rss feed')
2020-08-31 21:50:43 +00:00
def _announceButton(self, callingDomain: str, path: str,
baseDir: str,
cookie: str, proxyType: str,
httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
2020-11-01 17:18:05 +00:00
repeatPrivate: bool,
debug: bool) -> None:
2020-08-31 21:50:43 +00:00
"""The announce/repeat button was pressed on a post
"""
pageNumber = 1
repeatUrl = path.split('?repeat=')[1]
if '?' in repeatUrl:
repeatUrl = repeatUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
timelineStr = 'inbox'
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
actor = path.split('?repeat=')[0]
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber), cookie,
callingDomain)
return
if not self.server.session:
print('Starting new session during repeat button')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during repeat button')
self._404()
self.server.GETbusy = False
return
self.server.actorRepeat = path.split('?actor=')[1]
announceToStr = \
httpPrefix + '://' + domainFull + '/users/' + \
self.postToNickname + '/followers'
if not repeatPrivate:
announceToStr = 'https://www.w3.org/ns/activitystreams#Public'
announceJson = \
createAnnounce(self.server.session,
baseDir,
self.server.federationList,
self.postToNickname,
domain, port,
announceToStr,
None, httpPrefix,
repeatUrl, False, False,
self.server.sendThreads,
self.server.postLog,
self.server.personCache,
self.server.cachedWebfingers,
debug,
self.server.projectVersion)
if announceJson:
self._postToOutboxThread(announceJson)
self.server.GETbusy = False
actorAbsolute = httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' +
timelineStr + '?page=' +
str(pageNumber) +
timelineBookmark, cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'emoji search shown done',
'show announce')
2020-08-31 21:58:18 +00:00
def _undoAnnounceButton(self, callingDomain: str, path: str,
baseDir: str,
cookie: str, proxyType: str,
httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
repeatPrivate: bool, debug: bool):
"""Undo announce/repeat button was pressed
"""
pageNumber = 1
repeatUrl = path.split('?unrepeat=')[1]
if '?' in repeatUrl:
repeatUrl = repeatUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
timelineStr = 'inbox'
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
actor = path.split('?unrepeat=')[0]
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
actorAbsolute = httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' +
timelineStr + '?page=' +
str(pageNumber), cookie,
callingDomain)
return
if not self.server.session:
print('Starting new session during undo repeat')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during undo repeat')
self._404()
self.server.GETbusy = False
return
undoAnnounceActor = \
httpPrefix + '://' + domainFull + \
'/users/' + self.postToNickname
unRepeatToStr = 'https://www.w3.org/ns/activitystreams#Public'
newUndoAnnounce = {
"@context": "https://www.w3.org/ns/activitystreams",
'actor': undoAnnounceActor,
'type': 'Undo',
'cc': [undoAnnounceActor+'/followers'],
'to': [unRepeatToStr],
'object': {
'actor': undoAnnounceActor,
'cc': [undoAnnounceActor+'/followers'],
'object': repeatUrl,
'to': [unRepeatToStr],
'type': 'Announce'
}
}
self._postToOutboxThread(newUndoAnnounce)
self.server.GETbusy = False
actorAbsolute = httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' +
timelineStr + '?page=' +
str(pageNumber) +
timelineBookmark, cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show announce done',
'unannounce')
2020-08-31 22:23:06 +00:00
def _followApproveButton(self, callingDomain: str, path: str,
cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, debug: bool):
"""Follow approve button was pressed
"""
originPathStr = path.split('/followapprove=')[0]
followerNickname = originPathStr.replace('/users/', '')
followingHandle = path.split('/followapprove=')[1]
if '@' in followingHandle:
if not self.server.session:
print('Starting new session during follow approval')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during follow approval')
self._404()
self.server.GETbusy = False
return
manualApproveFollowRequest(self.server.session,
baseDir, httpPrefix,
followerNickname,
domain, port,
followingHandle,
self.server.federationList,
self.server.sendThreads,
self.server.postLog,
self.server.cachedWebfingers,
self.server.personCache,
debug,
self.server.projectVersion)
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
self._redirect_headers(originPathStrAbsolute,
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unannounce done',
'follow approve shown')
self.server.GETbusy = False
2020-10-06 20:17:34 +00:00
def _newswireVote(self, callingDomain: str, path: str,
cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
2020-10-06 20:22:48 +00:00
proxyType: str, debug: bool,
newswire: {}):
2020-10-06 20:17:34 +00:00
"""Vote for a newswire item
"""
originPathStr = path.split('/newswirevote=')[0]
2020-10-06 20:26:25 +00:00
dateStr = \
path.split('/newswirevote=')[1].replace('T', ' ') + '+00:00'
2020-10-06 20:17:34 +00:00
nickname = originPathStr.split('/users/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
2020-10-06 20:22:48 +00:00
if newswire.get(dateStr):
2020-10-06 20:17:34 +00:00
if isModerator(baseDir, nickname):
2020-10-06 20:22:48 +00:00
if 'vote:' + nickname not in newswire[dateStr][2]:
newswire[dateStr][2].append('vote:' + nickname)
filename = newswire[dateStr][3]
2020-10-10 22:41:38 +00:00
newswireStateFilename = \
baseDir + '/accounts/.newswirestate.json'
try:
saveJson(newswire, newswireStateFilename)
except Exception as e:
print('ERROR saving newswire state, ' + str(e))
2020-10-06 20:17:34 +00:00
if filename:
2020-10-06 20:22:48 +00:00
saveJson(newswire[dateStr][2],
2020-10-06 20:17:34 +00:00
filename + '.votes')
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr + '/' + \
self.server.defaultTimeline
2020-10-06 20:17:34 +00:00
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
self._redirect_headers(originPathStrAbsolute,
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unannounce done',
'vote for newswite item')
2020-10-06 20:17:34 +00:00
self.server.GETbusy = False
def _newswireUnvote(self, callingDomain: str, path: str,
cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
2020-10-06 20:22:48 +00:00
proxyType: str, debug: bool,
newswire: {}):
2020-10-06 20:17:34 +00:00
"""Remove vote for a newswire item
"""
2020-10-06 20:26:25 +00:00
originPathStr = path.split('/newswireunvote=')[0]
dateStr = \
path.split('/newswireunvote=')[1].replace('T', ' ') + '+00:00'
2020-10-06 20:17:34 +00:00
nickname = originPathStr.split('/users/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
2020-10-06 20:22:48 +00:00
if newswire.get(dateStr):
2020-10-06 20:17:34 +00:00
if isModerator(baseDir, nickname):
2020-10-06 20:22:48 +00:00
if 'vote:' + nickname in newswire[dateStr][2]:
newswire[dateStr][2].remove('vote:' + nickname)
filename = newswire[dateStr][3]
2020-10-10 22:41:38 +00:00
newswireStateFilename = \
baseDir + '/accounts/.newswirestate.json'
try:
saveJson(newswire, newswireStateFilename)
except Exception as e:
print('ERROR saving newswire state, ' + str(e))
2020-10-06 20:17:34 +00:00
if filename:
2020-10-06 20:22:48 +00:00
saveJson(newswire[dateStr][2],
2020-10-06 20:17:34 +00:00
filename + '.votes')
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr + '/' + \
self.server.defaultTimeline
2020-10-06 20:17:34 +00:00
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif (callingDomain.endswith('.i2p') and i2pDomain):
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
self._redirect_headers(originPathStrAbsolute,
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unannounce done',
'unvote for newswite item')
2020-10-06 20:17:34 +00:00
self.server.GETbusy = False
2020-08-31 22:29:40 +00:00
def _followDenyButton(self, callingDomain: str, path: str,
cookie: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, debug: bool):
"""Follow deny button was pressed
"""
originPathStr = path.split('/followdeny=')[0]
followerNickname = originPathStr.replace('/users/', '')
followingHandle = path.split('/followdeny=')[1]
if '@' in followingHandle:
manualDenyFollowRequest(self.server.session,
baseDir, httpPrefix,
followerNickname,
domain, port,
followingHandle,
self.server.federationList,
self.server.sendThreads,
self.server.postLog,
self.server.cachedWebfingers,
self.server.personCache,
debug,
self.server.projectVersion)
originPathStrAbsolute = \
httpPrefix + '://' + domainFull + originPathStr
if callingDomain.endswith('.onion') and onionDomain:
originPathStrAbsolute = \
'http://' + onionDomain + originPathStr
elif callingDomain.endswith('.i2p') and i2pDomain:
originPathStrAbsolute = \
'http://' + i2pDomain + originPathStr
self._redirect_headers(originPathStrAbsolute,
cookie, callingDomain)
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'follow approve done',
'follow deny shown')
2020-09-01 09:03:50 +00:00
def _likeButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""Press the like button
"""
pageNumber = 1
likeUrl = path.split('?like=')[1]
if '?' in likeUrl:
likeUrl = likeUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
actor = path.split('?like=')[0]
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
timelineStr = 'inbox'
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber) +
timelineBookmark, cookie,
callingDomain)
return
if not self.server.session:
print('Starting new session during like')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session during like')
self._404()
self.server.GETbusy = False
return
likeActor = \
httpPrefix + '://' + \
domainFull + '/users/' + self.postToNickname
actorLiked = path.split('?actor=')[1]
if '?' in actorLiked:
actorLiked = actorLiked.split('?')[0]
likeJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Like',
'actor': likeActor,
'to': [actorLiked],
'object': likeUrl
}
# directly like the post file
likedPostFilename = locatePost(baseDir,
self.postToNickname,
domain,
likeUrl)
if likedPostFilename:
if debug:
print('Updating likes for ' + likedPostFilename)
updateLikesCollection(self.server.recentPostsCache,
baseDir,
likedPostFilename, likeUrl,
likeActor, domain,
debug)
else:
print('WARN: unable to locate file for liked post ' +
likeUrl)
# send out the like to followers
self._postToOutbox(likeJson, self.server.projectVersion)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif (callingDomain.endswith('.i2p') and i2pDomain):
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber) +
timelineBookmark, cookie,
callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'follow deny done',
'like shown')
2020-09-01 09:20:13 +00:00
def _undoLikeButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""A button is pressed to undo
"""
pageNumber = 1
likeUrl = path.split('?unlike=')[1]
if '?' in likeUrl:
likeUrl = likeUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
timelineStr = 'inbox'
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
actor = path.split('?unlike=')[0]
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif (callingDomain.endswith('.i2p') and onionDomain):
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber), cookie,
callingDomain)
return
if not self.server.session:
print('Starting new session during undo like')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during undo like')
self._404()
self.server.GETbusy = False
return
undoActor = \
httpPrefix + '://' + domainFull + '/users/' + self.postToNickname
actorLiked = path.split('?actor=')[1]
if '?' in actorLiked:
actorLiked = actorLiked.split('?')[0]
undoLikeJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Undo',
'actor': undoActor,
'to': [actorLiked],
'object': {
'type': 'Like',
'actor': undoActor,
'to': [actorLiked],
'object': likeUrl
}
}
# directly undo the like within the post file
likedPostFilename = locatePost(baseDir,
self.postToNickname,
domain, likeUrl)
if likedPostFilename:
if debug:
print('Removing likes for ' + likedPostFilename)
undoLikesCollectionEntry(self.server.recentPostsCache,
baseDir,
likedPostFilename, likeUrl,
undoActor, domain, debug)
# send out the undo like to followers
self._postToOutbox(undoLikeJson, self.server.projectVersion)
self.server.GETbusy = False
actorAbsolute = httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber) +
timelineBookmark, cookie,
callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'like shown done',
'unlike shown')
2020-09-01 09:27:58 +00:00
def _bookmarkButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""Bookmark button was pressed
"""
pageNumber = 1
bookmarkUrl = path.split('?bookmark=')[1]
if '?' in bookmarkUrl:
bookmarkUrl = bookmarkUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
actor = path.split('?bookmark=')[0]
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
timelineStr = 'inbox'
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber), cookie,
callingDomain)
return
if not self.server.session:
print('Starting new session during bookmark')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during bookmark')
self._404()
self.server.GETbusy = False
return
bookmarkActor = \
httpPrefix + '://' + domainFull + '/users/' + self.postToNickname
ccList = []
bookmark(self.server.recentPostsCache,
self.server.session,
baseDir,
self.server.federationList,
self.postToNickname,
domain, port,
ccList,
httpPrefix,
bookmarkUrl, bookmarkActor, False,
self.server.sendThreads,
self.server.postLog,
self.server.personCache,
self.server.cachedWebfingers,
self.server.debug,
self.server.projectVersion)
# self._postToOutbox(bookmarkJson, self.server.projectVersion)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber) +
timelineBookmark, cookie,
callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unlike shown done',
'bookmark shown')
def _undoBookmarkButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""Button pressed to undo a bookmark
"""
pageNumber = 1
bookmarkUrl = path.split('?unbookmark=')[1]
if '?' in bookmarkUrl:
bookmarkUrl = bookmarkUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
timelineStr = 'inbox'
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
actor = path.split('?unbookmark=')[0]
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber), cookie,
callingDomain)
return
if not self.server.session:
print('Starting new session during undo bookmark')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during undo bookmark')
self._404()
self.server.GETbusy = False
return
undoActor = \
httpPrefix + '://' + domainFull + '/users/' + self.postToNickname
ccList = []
undoBookmark(self.server.recentPostsCache,
self.server.session,
baseDir,
self.server.federationList,
self.postToNickname,
domain, port,
ccList,
httpPrefix,
bookmarkUrl, undoActor, False,
self.server.sendThreads,
self.server.postLog,
self.server.personCache,
self.server.cachedWebfingers,
debug,
self.server.projectVersion)
# self._postToOutbox(undoBookmarkJson, self.server.projectVersion)
self.server.GETbusy = False
actorAbsolute = \
httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute + '/' + timelineStr +
'?page=' + str(pageNumber) +
timelineBookmark, cookie,
callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'bookmark shown done',
'unbookmark shown')
2020-09-01 09:42:44 +00:00
def _deleteButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""Delete button is pressed
"""
if not cookie:
print('ERROR: no cookie given when deleting')
self._400()
self.server.GETbusy = False
return
pageNumber = 1
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
deleteUrl = path.split('?delete=')[1]
if '?' in deleteUrl:
deleteUrl = deleteUrl.split('?')[0]
timelineStr = self.server.defaultTimeline
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
usersPath = path.split('?delete=')[0]
actor = \
httpPrefix + '://' + domainFull + usersPath
if self.server.allowDeletion or \
deleteUrl.startswith(actor):
if self.server.debug:
print('DEBUG: deleteUrl=' + deleteUrl)
print('DEBUG: actor=' + actor)
if actor not in deleteUrl:
# You can only delete your own posts
self.server.GETbusy = False
if callingDomain.endswith('.onion') and onionDomain:
actor = 'http://' + onionDomain + usersPath
elif callingDomain.endswith('.i2p') and i2pDomain:
actor = 'http://' + i2pDomain + usersPath
self._redirect_headers(actor + '/' + timelineStr,
cookie, callingDomain)
return
self.postToNickname = getNicknameFromActor(actor)
if not self.postToNickname:
print('WARN: unable to find nickname in ' + actor)
self.server.GETbusy = False
if callingDomain.endswith('.onion') and onionDomain:
actor = 'http://' + onionDomain + usersPath
elif callingDomain.endswith('.i2p') and i2pDomain:
actor = 'http://' + i2pDomain + usersPath
self._redirect_headers(actor + '/' + timelineStr,
cookie, callingDomain)
return
if not self.server.session:
print('Starting new session during delete')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during delete')
self._404()
self.server.GETbusy = False
return
deleteStr = \
2020-10-29 12:48:58 +00:00
htmlDeletePost(self.server.cssCache,
self.server.recentPostsCache,
2020-09-01 09:42:44 +00:00
self.server.maxRecentPosts,
self.server.translate, pageNumber,
self.server.session, baseDir,
deleteUrl, httpPrefix,
__version__, self.server.cachedWebfingers,
self.server.personCache, callingDomain,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly)
2020-09-01 09:42:44 +00:00
if deleteStr:
self._set_headers('text/html', len(deleteStr),
cookie, callingDomain)
self._write(deleteStr.encode('utf-8'))
self.server.GETbusy = False
return
self.server.GETbusy = False
if callingDomain.endswith('.onion') and onionDomain:
actor = 'http://' + onionDomain + usersPath
elif (callingDomain.endswith('.i2p') and i2pDomain):
actor = 'http://' + i2pDomain + usersPath
self._redirect_headers(actor + '/' + timelineStr,
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unbookmark shown done',
'delete shown')
2020-09-01 09:50:34 +00:00
def _muteButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""Mute button is pressed
"""
muteUrl = path.split('?mute=')[1]
if '?' in muteUrl:
muteUrl = muteUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
timelineStr = self.server.defaultTimeline
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
actor = \
httpPrefix + '://' + domainFull + path.split('?mute=')[0]
nickname = getNicknameFromActor(actor)
mutePost(baseDir, nickname, domain,
muteUrl, self.server.recentPostsCache)
self.server.GETbusy = False
if callingDomain.endswith('.onion') and onionDomain:
actor = \
'http://' + onionDomain + \
path.split('?mute=')[0]
elif (callingDomain.endswith('.i2p') and i2pDomain):
actor = \
'http://' + i2pDomain + \
path.split('?mute=')[0]
self._redirect_headers(actor + '/' +
timelineStr + timelineBookmark,
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'delete shown done',
'post muted')
2020-09-01 09:56:10 +00:00
def _undoMuteButton(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str):
"""Undo mute button is pressed
"""
muteUrl = path.split('?unmute=')[1]
if '?' in muteUrl:
muteUrl = muteUrl.split('?')[0]
timelineBookmark = ''
if '?bm=' in path:
timelineBookmark = path.split('?bm=')[1]
if '?' in timelineBookmark:
timelineBookmark = timelineBookmark.split('?')[0]
timelineBookmark = '#' + timelineBookmark
timelineStr = self.server.defaultTimeline
if '?tl=' in path:
timelineStr = path.split('?tl=')[1]
if '?' in timelineStr:
timelineStr = timelineStr.split('?')[0]
actor = \
httpPrefix + '://' + domainFull + path.split('?unmute=')[0]
nickname = getNicknameFromActor(actor)
unmutePost(baseDir, nickname, domain,
muteUrl, self.server.recentPostsCache)
self.server.GETbusy = False
if callingDomain.endswith('.onion') and onionDomain:
actor = \
'http://' + onionDomain + path.split('?unmute=')[0]
elif callingDomain.endswith('.i2p') and i2pDomain:
actor = \
'http://' + i2pDomain + path.split('?unmute=')[0]
self._redirect_headers(actor + '/' + timelineStr +
timelineBookmark,
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'post muted done',
'unmute activated')
2020-09-01 13:12:00 +00:00
def _showRepliesToPost(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the replies to a post
"""
2020-09-01 10:24:58 +00:00
if not ('/statuses/' in path and '/users/' in path):
return False
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
namedStatus = path.split('/users/')[1]
if '/' not in namedStatus:
return False
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
postSections = namedStatus.split('/')
if len(postSections) < 4:
return False
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
if not postSections[3].startswith('replies'):
return False
nickname = postSections[0]
statusNumber = postSections[2]
if not (len(statusNumber) > 10 and statusNumber.isdigit()):
return False
boxname = 'outbox'
# get the replies file
postDir = \
baseDir + '/accounts/' + nickname + '@' + domain + '/' + boxname
postRepliesFilename = \
postDir + '/' + \
httpPrefix + ':##' + domainFull + '#users#' + \
nickname + '#statuses#' + statusNumber + '.replies'
if not os.path.isfile(postRepliesFilename):
# There are no replies,
# so show empty collection
contextStr = \
'https://www.w3.org/ns/activitystreams'
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
firstStr = \
httpPrefix + '://' + domainFull + '/users/' + nickname + \
'/statuses/' + statusNumber + '/replies?page=true'
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
idStr = \
httpPrefix + '://' + domainFull + '/users/' + nickname + \
'/statuses/' + statusNumber + '/replies'
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
lastStr = \
httpPrefix + '://' + domainFull + '/users/' + nickname + \
'/statuses/' + statusNumber + '/replies?page=true'
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
repliesJson = {
'@context': contextStr,
'first': firstStr,
'id': idStr,
'last': lastStr,
'totalItems': 0,
'type': 'OrderedCollection'
}
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
if self._requestHTTP():
if not self.server.session:
print('DEBUG: creating new session during get replies')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during get replies')
self._404()
self.server.GETbusy = False
return
recentPostsCache = self.server.recentPostsCache
maxRecentPosts = self.server.maxRecentPosts
translate = self.server.translate
session = self.server.session
cachedWebfingers = self.server.cachedWebfingers
personCache = self.server.personCache
projectVersion = self.server.projectVersion
ytDomain = self.server.YTReplacementDomain
msg = \
2020-10-29 12:48:58 +00:00
htmlPostReplies(self.server.cssCache,
recentPostsCache,
2020-09-01 10:24:58 +00:00
maxRecentPosts,
translate,
baseDir,
session,
cachedWebfingers,
personCache,
nickname,
domain,
port,
repliesJson,
httpPrefix,
projectVersion,
ytDomain,
self.server.showPublishedDateOnly)
2020-09-01 10:24:58 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
if self._fetchAuthenticated():
msg = json.dumps(repliesJson, ensure_ascii=False)
msg = msg.encode('utf-8')
protocolStr = 'application/json'
self._set_headers(protocolStr, len(msg), None,
callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
else:
# replies exist. Itterate through the
# text file containing message ids
contextStr = 'https://www.w3.org/ns/activitystreams'
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
idStr = \
httpPrefix + '://' + domainFull + \
'/users/' + nickname + '/statuses/' + \
statusNumber + '?page=true'
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
partOfStr = \
httpPrefix + '://' + domainFull + \
'/users/' + nickname + '/statuses/' + statusNumber
2020-09-01 10:28:27 +00:00
2020-09-01 10:24:58 +00:00
repliesJson = {
'@context': contextStr,
'id': idStr,
'orderedItems': [
],
'partOf': partOfStr,
'type': 'OrderedCollectionPage'
}
# populate the items list with replies
2020-09-01 10:28:27 +00:00
populateRepliesJson(baseDir, nickname, domain,
2020-09-01 10:24:58 +00:00
postRepliesFilename,
2020-09-01 10:28:27 +00:00
authorized, repliesJson)
2020-09-01 10:24:58 +00:00
# send the replies json
if self._requestHTTP():
if not self.server.session:
print('DEBUG: creating new session ' +
'during get replies 2')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to ' +
'create session ' +
'during get replies 2')
self._404()
self.server.GETbusy = False
return
recentPostsCache = self.server.recentPostsCache
maxRecentPosts = self.server.maxRecentPosts
translate = self.server.translate
session = self.server.session
cachedWebfingers = self.server.cachedWebfingers
personCache = self.server.personCache
projectVersion = self.server.projectVersion
ytDomain = self.server.YTReplacementDomain
msg = \
2020-10-29 12:48:58 +00:00
htmlPostReplies(self.server.cssCache,
recentPostsCache,
2020-09-01 10:24:58 +00:00
maxRecentPosts,
translate,
baseDir,
session,
cachedWebfingers,
personCache,
nickname,
domain,
port,
repliesJson,
httpPrefix,
projectVersion,
ytDomain,
self.server.showPublishedDateOnly)
2020-09-01 10:24:58 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime,
GETtimings,
'individual post done',
'post replies done')
else:
if self._fetchAuthenticated():
2020-09-01 10:28:27 +00:00
msg = json.dumps(repliesJson,
ensure_ascii=False)
2020-09-01 10:24:58 +00:00
msg = msg.encode('utf-8')
protocolStr = 'application/json'
self._set_headers(protocolStr, len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
def _showRoles(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Show roles within profile screen
"""
namedStatus = path.split('/users/')[1]
if '/' not in namedStatus:
return False
postSections = namedStatus.split('/')
nickname = postSections[0]
actorFilename = \
baseDir + '/accounts/' + nickname + '@' + domain + '.json'
if not os.path.isfile(actorFilename):
return False
actorJson = loadJson(actorFilename)
if not actorJson:
return False
if actorJson.get('roles'):
if self._requestHTTP():
getPerson = \
personLookup(domain, path.replace('/roles', ''),
baseDir)
if getPerson:
defaultTimeline = \
self.server.defaultTimeline
recentPostsCache = \
self.server.recentPostsCache
cachedWebfingers = \
self.server.cachedWebfingers
YTReplacementDomain = \
self.server.YTReplacementDomain
iconsAsButtons = \
self.server.iconsAsButtons
msg = \
htmlProfile(self.server.rssIconAtTop,
self.server.cssCache,
2020-10-29 12:48:58 +00:00
iconsAsButtons,
defaultTimeline,
recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
self.server.projectVersion,
baseDir, httpPrefix, True,
getPerson, 'roles',
self.server.session,
cachedWebfingers,
self.server.personCache,
YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
actorJson['roles'],
None, None)
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'post replies done',
'show roles')
else:
if self._fetchAuthenticated():
msg = json.dumps(actorJson['roles'],
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
def _showSkills(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Show skills on the profile screen
"""
namedStatus = path.split('/users/')[1]
if '/' in namedStatus:
postSections = namedStatus.split('/')
nickname = postSections[0]
actorFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + '.json'
if os.path.isfile(actorFilename):
actorJson = loadJson(actorFilename)
if actorJson:
if actorJson.get('skills'):
if self._requestHTTP():
getPerson = \
personLookup(domain,
path.replace('/skills', ''),
baseDir)
if getPerson:
defaultTimeline = \
self.server.defaultTimeline
recentPostsCache = \
self.server.recentPostsCache
cachedWebfingers = \
self.server.cachedWebfingers
YTReplacementDomain = \
self.server.YTReplacementDomain
showPublishedDateOnly = \
self.server.showPublishedDateOnly
iconsAsButtons = \
self.server.iconsAsButtons
msg = \
htmlProfile(self.server.rssIconAtTop,
self.server.cssCache,
2020-10-29 12:48:58 +00:00
iconsAsButtons,
defaultTimeline,
recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
self.server.projectVersion,
baseDir, httpPrefix, True,
getPerson, 'skills',
self.server.session,
cachedWebfingers,
self.server.personCache,
YTReplacementDomain,
showPublishedDateOnly,
self.server.newswire,
actorJson['skills'],
None, None)
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime,
GETtimings,
'post roles done',
'show skills')
else:
if self._fetchAuthenticated():
msg = json.dumps(actorJson['skills'],
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json',
len(msg), None,
callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
2020-09-01 12:40:40 +00:00
return True
actor = path.replace('/skills', '')
actorAbsolute = httpPrefix + '://' + domainFull + actor
if callingDomain.endswith('.onion') and onionDomain:
actorAbsolute = 'http://' + onionDomain + actor
elif callingDomain.endswith('.i2p') and i2pDomain:
actorAbsolute = 'http://' + i2pDomain + actor
self._redirect_headers(actorAbsolute, cookie, callingDomain)
self.server.GETbusy = False
2020-09-01 12:40:40 +00:00
return True
def _showIndividualAtPost(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""get an individual post from the path /@nickname/statusnumber
"""
if '/@' not in path:
return False
likedBy = None
if '?likedBy=' in path:
likedBy = path.split('?likedBy=')[1].strip()
if '?' in likedBy:
likedBy = likedBy.split('?')[0]
path = path.split('?likedBy=')[0]
namedStatus = path.split('/@')[1]
if '/' not in namedStatus:
# show actor
nickname = namedStatus
else:
postSections = namedStatus.split('/')
if len(postSections) == 2:
nickname = postSections[0]
statusNumber = postSections[1]
if len(statusNumber) > 10 and statusNumber.isdigit():
postFilename = \
baseDir + '/accounts/' + \
nickname + '@' + \
domain + '/outbox/' + \
httpPrefix + ':##' + \
domainFull + '#users#' + \
nickname + '#statuses#' + \
statusNumber + '.json'
if os.path.isfile(postFilename):
postJsonObject = loadJson(postFilename)
loadedPost = False
if postJsonObject:
loadedPost = True
else:
postJsonObject = {}
if loadedPost:
# Only authorized viewers get to see likes
# on posts. Otherwize marketers could gain
# more social graph info
if not authorized:
pjo = postJsonObject
self._removePostInteractions(pjo)
if self._requestHTTP():
recentPostsCache = \
self.server.recentPostsCache
maxRecentPosts = \
self.server.maxRecentPosts
translate = \
self.server.translate
cachedWebfingers = \
self.server.cachedWebfingers
personCache = \
self.server.personCache
projectVersion = \
self.server.projectVersion
ytDomain = \
self.server.YTReplacementDomain
showPublishedDateOnly = \
self.server.showPublishedDateOnly
2020-10-29 12:48:58 +00:00
cssCache = self.server.cssCache
msg = \
2020-10-29 12:48:58 +00:00
htmlIndividualPost(cssCache,
recentPostsCache,
maxRecentPosts,
translate,
self.server.session,
cachedWebfingers,
personCache,
nickname,
domain,
port,
authorized,
postJsonObject,
httpPrefix,
projectVersion,
likedBy,
ytDomain,
showPublishedDateOnly)
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
if self._fetchAuthenticated():
msg = json.dumps(postJsonObject,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'new post done',
'individual post shown')
return True
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-01 12:40:40 +00:00
def _showIndividualPost(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows an individual post
"""
likedBy = None
if '?likedBy=' in path:
likedBy = path.split('?likedBy=')[1].strip()
if '?' in likedBy:
likedBy = likedBy.split('?')[0]
2020-09-01 15:13:50 +00:00
path = path.split('?likedBy=')[0]
2020-09-01 12:40:40 +00:00
namedStatus = path.split('/users/')[1]
if '/' not in namedStatus:
return False
postSections = namedStatus.split('/')
if len(postSections) < 3:
return False
nickname = postSections[0]
statusNumber = postSections[2]
2020-09-01 15:13:50 +00:00
if len(statusNumber) <= 10 or (not statusNumber.isdigit()):
2020-09-01 12:40:40 +00:00
return False
postFilename = \
baseDir + '/accounts/' + \
nickname + '@' + \
domain + '/outbox/' + \
httpPrefix + ':##' + \
domainFull + '#users#' + \
nickname + '#statuses#' + \
statusNumber + '.json'
if os.path.isfile(postFilename):
postJsonObject = loadJson(postFilename)
if not postJsonObject:
self.send_response(429)
self.end_headers()
self.server.GETbusy = False
return True
else:
# Only authorized viewers get to see likes
# on posts
# Otherwize marketers could gain more social
# graph info
if not authorized:
pjo = postJsonObject
self._removePostInteractions(pjo)
if self._requestHTTP():
recentPostsCache = \
self.server.recentPostsCache
maxRecentPosts = \
self.server.maxRecentPosts
translate = \
self.server.translate
cachedWebfingers = \
self.server.cachedWebfingers
personCache = \
self.server.personCache
projectVersion = \
self.server.projectVersion
ytDomain = \
self.server.YTReplacementDomain
showPublishedDateOnly = \
self.server.showPublishedDateOnly
2020-09-01 12:40:40 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlIndividualPost(self.server.cssCache,
recentPostsCache,
2020-09-01 12:40:40 +00:00
maxRecentPosts,
translate,
baseDir,
self.server.session,
cachedWebfingers,
personCache,
nickname,
domain,
port,
authorized,
postJsonObject,
httpPrefix,
projectVersion,
likedBy,
ytDomain,
showPublishedDateOnly)
2020-09-01 12:40:40 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime,
GETtimings,
'show skills ' +
'done',
'show status')
else:
if self._fetchAuthenticated():
msg = json.dumps(postJsonObject,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-01 12:50:12 +00:00
def _showInbox(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
2020-09-01 19:34:52 +00:00
debug: str,
recentPostsCache: {}, session,
defaultTimeline: str,
maxRecentPosts: int,
translate: {},
cachedWebfingers: {},
personCache: {},
allowDeletion: bool,
projectVersion: str,
YTReplacementDomain: str) -> bool:
2020-09-01 12:50:12 +00:00
"""Shows the inbox timeline
"""
if '/users/' in path:
if authorized:
inboxFeed = \
2020-09-01 19:34:52 +00:00
personBoxJson(recentPostsCache,
session,
2020-09-01 12:50:12 +00:00
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInFeed, 'inbox',
2020-10-08 19:47:23 +00:00
authorized,
0,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 12:50:12 +00:00
if inboxFeed:
2020-09-28 14:10:54 +00:00
if GETstartTime:
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show status done',
'show inbox json')
2020-09-01 12:50:12 +00:00
if self._requestHTTP():
2020-09-01 15:13:50 +00:00
nickname = path.replace('/users/', '')
2020-09-01 12:50:12 +00:00
nickname = nickname.replace('/inbox', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
inboxFeed = \
2020-09-01 19:34:52 +00:00
personBoxJson(recentPostsCache,
session,
2020-09-01 12:50:12 +00:00
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInFeed, 'inbox',
2020-10-08 19:47:23 +00:00
authorized,
0,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-28 14:10:54 +00:00
if GETstartTime:
self._benchmarkGETtimings(GETstartTime,
GETtimings,
'show status done',
'show inbox page')
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-10-29 12:48:58 +00:00
msg = htmlInbox(self.server.cssCache,
defaultTimeline,
2020-09-01 19:34:52 +00:00
recentPostsCache,
maxRecentPosts,
translate,
2020-09-01 12:50:12 +00:00
pageNumber, maxPostsInFeed,
2020-09-01 19:34:52 +00:00
session,
2020-09-01 12:50:12 +00:00
baseDir,
2020-09-01 19:34:52 +00:00
cachedWebfingers,
personCache,
2020-09-01 12:50:12 +00:00
nickname,
domain,
port,
inboxFeed,
2020-09-01 19:34:52 +00:00
allowDeletion,
2020-09-01 12:50:12 +00:00
httpPrefix,
2020-09-01 19:34:52 +00:00
projectVersion,
2020-09-01 12:50:12 +00:00
self._isMinimal(nickname),
YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-28 14:10:54 +00:00
if GETstartTime:
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show status done',
'show inbox html')
2020-09-28 14:18:46 +00:00
if msg:
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
2020-09-28 14:10:54 +00:00
if GETstartTime:
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show status done',
'show inbox')
2020-09-01 12:50:12 +00:00
else:
# don't need authenticated fetch here because
# there is already the authorization check
msg = json.dumps(inboxFeed, ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
2020-09-01 15:13:50 +00:00
nickname = path.replace('/users/', '')
2020-09-01 12:50:12 +00:00
nickname = nickname.replace('/inbox', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if path != '/inbox':
# not the shared inbox
if debug:
print('DEBUG: GET access to inbox is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
return False
2020-09-01 13:05:48 +00:00
def _showDMs(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the DMs timeline
"""
if '/users/' in path:
if authorized:
inboxDMFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInFeed, 'dm',
2020-10-08 19:47:23 +00:00
authorized,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 13:05:48 +00:00
if inboxDMFeed:
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/dm', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
inboxDMFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
2020-09-01 15:13:50 +00:00
path + '?page=1',
2020-09-01 13:05:48 +00:00
httpPrefix,
maxPostsInFeed, 'dm',
2020-10-08 19:47:23 +00:00
authorized,
0,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-09-01 13:05:48 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlInboxDMs(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 13:05:48 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
inboxDMFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 13:05:48 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show inbox done',
'show dms')
else:
# don't need authenticated fetch here because
# there is already the authorization check
msg = json.dumps(inboxDMFeed, ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
nickname = path.replace('/users/', '')
nickname = nickname.replace('/dm', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if path != '/dm':
# not the DM inbox
if debug:
print('DEBUG: GET access to DM timeline is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
return False
def _showReplies(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the replies timeline
"""
if '/users/' in path:
if authorized:
inboxRepliesFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInFeed, 'tlreplies',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
if not inboxRepliesFeed:
inboxRepliesFeed = []
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlreplies', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
inboxRepliesFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInFeed, 'tlreplies',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
msg = \
2020-10-29 12:48:58 +00:00
htmlInboxReplies(self.server.cssCache,
self.server.defaultTimeline,
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
inboxRepliesFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show dms done',
'show replies 2')
else:
# don't need authenticated fetch here because there is
# already the authorization check
msg = json.dumps(inboxRepliesFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlreplies', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if path != '/tlreplies':
# not the replies inbox
if debug:
print('DEBUG: GET access to inbox is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
return False
2020-09-01 13:26:45 +00:00
def _showMediaTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the media timeline
"""
if '/users/' in path:
if authorized:
inboxMediaFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInMediaFeed, 'tlmedia',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 13:26:45 +00:00
if not inboxMediaFeed:
inboxMediaFeed = []
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlmedia', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
inboxMediaFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInMediaFeed, 'tlmedia',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-09-01 13:26:45 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlInboxMedia(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 13:26:45 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInMediaFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
inboxMediaFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 13:26:45 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show replies 2 done',
'show media 2')
else:
# don't need authenticated fetch here because there is
# already the authorization check
msg = json.dumps(inboxMediaFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlmedia', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if path != '/tlmedia':
# not the media inbox
if debug:
print('DEBUG: GET access to inbox is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
return False
2020-09-01 13:35:05 +00:00
def _showBlogsTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the blogs timeline
"""
if '/users/' in path:
if authorized:
inboxBlogsFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInBlogsFeed, 'tlblogs',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 13:35:05 +00:00
if not inboxBlogsFeed:
inboxBlogsFeed = []
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlblogs', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
inboxBlogsFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInBlogsFeed, 'tlblogs',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-09-01 13:35:05 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlInboxBlogs(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 13:35:05 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInBlogsFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
inboxBlogsFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 13:35:05 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show media 2 done',
'show blogs 2')
else:
# don't need authenticated fetch here because there is
# already the authorization check
msg = json.dumps(inboxBlogsFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
2020-09-01 15:13:50 +00:00
nickname = path.replace('/users/', '')
2020-09-01 13:35:05 +00:00
nickname = nickname.replace('/tlblogs', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if path != '/tlblogs':
# not the blogs inbox
if debug:
print('DEBUG: GET access to blogs is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
return False
2020-10-07 09:39:18 +00:00
def _showNewsTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the news timeline
"""
if '/users/' in path:
if authorized:
inboxNewsFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
2020-10-07 18:05:08 +00:00
path,
httpPrefix,
2020-10-07 18:05:08 +00:00
maxPostsInNewsFeed, 'tlnews',
2020-10-08 19:47:23 +00:00
True,
self.server.newswireVotesThreshold,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
if not inboxNewsFeed:
inboxNewsFeed = []
2020-10-07 09:39:18 +00:00
if self._requestHTTP():
2020-10-07 19:49:57 +00:00
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlnews', '')
2020-10-07 09:39:18 +00:00
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
inboxNewsFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
2020-10-07 18:05:08 +00:00
path + '?page=1',
httpPrefix,
2020-10-07 18:05:08 +00:00
maxPostsInBlogsFeed, 'tlnews',
2020-10-08 19:47:23 +00:00
True,
self.server.newswireVotesThreshold,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
2020-10-07 18:51:08 +00:00
currNickname = path.split('/users/')[1]
if '/' in currNickname:
currNickname = currNickname.split('/')[0]
moderator = isModerator(baseDir, currNickname)
2020-10-11 19:18:43 +00:00
editor = isEditor(baseDir, currNickname)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-10-07 09:39:18 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlInboxNews(self.server.cssCache,
self.server.defaultTimeline,
2020-10-07 19:04:15 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInNewsFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
inboxNewsFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
2020-10-11 19:18:43 +00:00
self.server.newswire,
moderator, editor,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-10-07 09:39:18 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show blogs 2 done',
'show news 2')
else:
# don't need authenticated fetch here because there is
# already the authorization check
msg = json.dumps(inboxNewsFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
2020-10-07 09:39:18 +00:00
self.server.GETbusy = False
return True
else:
if debug:
nickname = 'news'
2020-10-07 09:39:18 +00:00
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if path != '/tlnews':
# not the news inbox
if debug:
print('DEBUG: GET access to news is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
return False
2020-09-01 14:06:15 +00:00
def _showSharesTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the shares timeline
"""
if '/users/' in path:
if authorized:
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlshares', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
msg = \
2020-10-29 12:48:58 +00:00
htmlShares(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 14:06:15 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
self.server.fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 14:06:15 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show blogs 2 done',
'show shares 2')
self.server.GETbusy = False
return True
# not the shares timeline
if debug:
print('DEBUG: GET access to shares timeline is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
def _showBookmarksTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the bookmarks timeline
"""
if '/users/' in path:
if authorized:
bookmarksFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInFeed, 'tlbookmarks',
2020-10-08 19:47:23 +00:00
authorized,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
if bookmarksFeed:
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlbookmarks', '')
nickname = nickname.replace('/bookmarks', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
bookmarksFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInFeed,
'tlbookmarks',
2020-10-08 19:47:23 +00:00
authorized,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
msg = \
2020-10-29 12:48:58 +00:00
htmlBookmarks(self.server.cssCache,
self.server.defaultTimeline,
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
bookmarksFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show shares 2 done',
'show bookmarks 2')
else:
# don't need authenticated fetch here because
# there is already the authorization check
msg = json.dumps(bookmarksFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlbookmarks', '')
nickname = nickname.replace('/bookmarks', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if debug:
print('DEBUG: GET access to bookmarks is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
2020-09-01 14:31:39 +00:00
def _showEventsTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the events timeline
"""
if '/users/' in path:
if authorized:
# convert /events to /tlevents
if path.endswith('/events') or \
'/events?page=' in path:
path = path.replace('/events', '/tlevents')
eventsFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInFeed, 'tlevents',
2020-10-08 19:47:23 +00:00
authorized,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 14:31:39 +00:00
print('eventsFeed: ' + str(eventsFeed))
if eventsFeed:
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlevents', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
eventsFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInFeed,
'tlevents',
2020-10-08 19:47:23 +00:00
authorized,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-09-01 14:31:39 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlEvents(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 14:31:39 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
eventsFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 14:31:39 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show bookmarks 2 done',
'show events')
else:
# don't need authenticated fetch here because
# there is already the authorization check
msg = json.dumps(eventsFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
nickname = path.replace('/users/', '')
nickname = nickname.replace('/tlevents', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if debug:
print('DEBUG: GET access to events is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
def _showOutboxTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the outbox timeline
"""
# get outbox feed for a person
outboxFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir, domain,
port, path,
httpPrefix,
maxPostsInFeed, 'outbox',
2020-10-08 19:47:23 +00:00
authorized,
self.server.newswireVotesThreshold,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 14:31:39 +00:00
if outboxFeed:
if self._requestHTTP():
nickname = \
path.replace('/users/', '').replace('/outbox', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if a page wasn't specified then show the first one
outboxFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInFeed, 'outbox',
2020-10-08 19:47:23 +00:00
authorized,
self.server.newswireVotesThreshold,
2020-10-09 12:15:20 +00:00
self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-09-01 14:31:39 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlOutbox(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 14:31:39 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
outboxFeed,
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 14:31:39 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show events done',
'show outbox')
else:
if self._fetchAuthenticated():
msg = json.dumps(outboxFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-01 15:13:50 +00:00
def _showModTimeline(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the moderation timeline
"""
if '/users/' in path:
if authorized:
moderationFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path,
httpPrefix,
maxPostsInFeed, 'moderation',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
2020-09-01 15:13:50 +00:00
if moderationFeed:
if self._requestHTTP():
nickname = path.replace('/users/', '')
nickname = nickname.replace('/moderation', '')
pageNumber = 1
if '?page=' in nickname:
pageNumber = nickname.split('?page=')[1]
nickname = nickname.split('?page=')[0]
if pageNumber.isdigit():
pageNumber = int(pageNumber)
else:
pageNumber = 1
if 'page=' not in path:
# if no page was specified then show the first
moderationFeed = \
personBoxJson(self.server.recentPostsCache,
self.server.session,
baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
maxPostsInFeed, 'moderation',
2020-10-08 19:47:23 +00:00
True,
2020-10-09 12:15:20 +00:00
0, self.server.positiveVoting,
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
2020-09-01 15:13:50 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlModeration(self.server.cssCache,
self.server.defaultTimeline,
2020-09-01 15:13:50 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
pageNumber, maxPostsInFeed,
self.server.session,
baseDir,
self.server.cachedWebfingers,
self.server.personCache,
nickname,
domain,
port,
moderationFeed,
True,
httpPrefix,
self.server.projectVersion,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
self.server.positiveVoting,
self.server.showPublishAsIcon,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader,
self.server.iconsAsButtons,
2020-10-26 21:32:08 +00:00
self.server.rssIconAtTop,
self.server.publishButtonAtTop,
authorized)
2020-09-01 15:13:50 +00:00
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show outbox done',
'show moderation')
else:
# don't need authenticated fetch here because
# there is already the authorization check
msg = json.dumps(moderationFeed,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
else:
if debug:
nickname = path.replace('/users/', '')
nickname = nickname.replace('/moderation', '')
print('DEBUG: ' + nickname +
' was not authorized to access ' + path)
if debug:
print('DEBUG: GET access to moderation feed is unauthorized')
self.send_response(405)
self.end_headers()
self.server.GETbusy = False
return True
2020-09-01 16:21:41 +00:00
def _showSharesFeed(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the shares feed
"""
shares = \
getSharesFeedForPerson(baseDir, domain, port, path,
httpPrefix, sharesPerPage)
if shares:
if self._requestHTTP():
pageNumber = 1
if '?page=' not in path:
searchPath = path
# get a page of shares, not the summary
shares = \
getSharesFeedForPerson(baseDir, domain, port,
path + '?page=true',
httpPrefix,
sharesPerPage)
else:
pageNumberStr = path.split('?page=')[1]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
searchPath = path.split('?page=')[0]
getPerson = \
personLookup(domain,
searchPath.replace('/shares', ''),
baseDir)
if getPerson:
if not self.server.session:
print('Starting new session during profile')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during profile')
self._404()
self.server.GETbusy = False
return True
msg = \
htmlProfile(self.server.rssIconAtTop,
self.server.cssCache,
2020-10-29 12:48:58 +00:00
self.server.iconsAsButtons,
self.server.defaultTimeline,
2020-09-01 16:21:41 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
self.server.projectVersion,
baseDir, httpPrefix,
authorized,
getPerson, 'shares',
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
2020-09-01 16:21:41 +00:00
shares,
pageNumber, sharesPerPage)
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show moderation done',
'show profile 2')
self.server.GETbusy = False
return True
else:
if self._fetchAuthenticated():
msg = json.dumps(shares,
ensure_ascii=False)
msg = msg.encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-01 16:28:22 +00:00
def _showFollowingFeed(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the following feed
"""
following = \
getFollowingFeed(baseDir, domain, port, path,
httpPrefix, authorized, followsPerPage)
if following:
if self._requestHTTP():
pageNumber = 1
if '?page=' not in path:
searchPath = path
# get a page of following, not the summary
following = \
getFollowingFeed(baseDir,
domain,
port,
path + '?page=true',
httpPrefix,
authorized, followsPerPage)
else:
pageNumberStr = path.split('?page=')[1]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
searchPath = path.split('?page=')[0]
getPerson = \
personLookup(domain,
searchPath.replace('/following', ''),
baseDir)
if getPerson:
if not self.server.session:
print('Starting new session during following')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during following')
self._404()
self.server.GETbusy = False
return True
msg = \
htmlProfile(self.server.rssIconAtTop,
self.server.cssCache,
2020-10-29 12:48:58 +00:00
self.server.iconsAsButtons,
self.server.defaultTimeline,
2020-09-01 16:28:22 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
self.server.projectVersion,
baseDir, httpPrefix,
authorized,
getPerson, 'following',
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
2020-09-01 16:28:22 +00:00
following,
pageNumber,
followsPerPage).encode('utf-8')
self._set_headers('text/html',
len(msg), cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile 2 done',
'show profile 3')
return True
else:
if self._fetchAuthenticated():
msg = json.dumps(following,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-01 16:35:10 +00:00
def _showFollowersFeed(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the followers feed
"""
followers = \
getFollowingFeed(baseDir, domain, port, path, httpPrefix,
authorized, followsPerPage, 'followers')
if followers:
if self._requestHTTP():
pageNumber = 1
if '?page=' not in path:
searchPath = path
# get a page of followers, not the summary
followers = \
getFollowingFeed(baseDir,
domain,
port,
path + '?page=1',
httpPrefix,
authorized, followsPerPage,
'followers')
else:
pageNumberStr = path.split('?page=')[1]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
searchPath = path.split('?page=')[0]
getPerson = \
personLookup(domain,
searchPath.replace('/followers', ''),
baseDir)
if getPerson:
if not self.server.session:
print('Starting new session during following2')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during following2')
self._404()
self.server.GETbusy = False
return True
msg = \
htmlProfile(self.server.rssIconAtTop,
self.server.cssCache,
2020-10-29 12:48:58 +00:00
self.server.iconsAsButtons,
self.server.defaultTimeline,
2020-09-01 16:35:10 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
self.server.projectVersion,
baseDir,
httpPrefix,
authorized,
getPerson, 'followers',
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
2020-09-01 16:35:10 +00:00
followers,
pageNumber,
followsPerPage).encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile 3 done',
'show profile 4')
return True
else:
if self._fetchAuthenticated():
msg = json.dumps(followers,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-01 16:47:27 +00:00
def _showPersonProfile(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
debug: str) -> bool:
"""Shows the profile for a person
"""
# look up a person
getPerson = personLookup(domain, path, baseDir)
if getPerson:
if self._requestHTTP():
if not self.server.session:
print('Starting new session during person lookup')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during person lookup')
self._404()
self.server.GETbusy = False
return True
msg = \
htmlProfile(self.server.rssIconAtTop,
self.server.cssCache,
2020-10-29 12:48:58 +00:00
self.server.iconsAsButtons,
self.server.defaultTimeline,
2020-09-01 16:47:27 +00:00
self.server.recentPostsCache,
self.server.maxRecentPosts,
self.server.translate,
self.server.projectVersion,
baseDir,
httpPrefix,
authorized,
getPerson, 'posts',
self.server.session,
self.server.cachedWebfingers,
self.server.personCache,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
2020-09-01 16:47:27 +00:00
None, None).encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile 4 done',
'show profile posts')
else:
if self._fetchAuthenticated():
msg = json.dumps(getPerson,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json', len(msg),
None, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-09-02 09:44:37 +00:00
def _showBlogPage(self, authorized: bool,
callingDomain: str, path: str,
baseDir: str, httpPrefix: str,
domain: str, domainFull: str, port: int,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
proxyType: str, cookie: str,
translate: {}, debug: str) -> bool:
"""Shows a blog page
"""
pageNumber = 1
nickname = path.split('/blog/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
if '?' in nickname:
nickname = nickname.split('?')[0]
if '?page=' in path:
pageNumberStr = path.split('?page=')[1]
if '?' in pageNumberStr:
pageNumberStr = pageNumberStr.split('?')[0]
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
if pageNumberStr.isdigit():
pageNumber = int(pageNumberStr)
if pageNumber < 1:
pageNumber = 1
elif pageNumber > 10:
pageNumber = 10
if not self.server.session:
print('Starting new session during blog page')
self.server.session = createSession(proxyType)
if not self.server.session:
print('ERROR: GET failed to create session ' +
'during blog page')
self._404()
return True
msg = htmlBlogPage(authorized,
self.server.session,
baseDir,
httpPrefix,
translate,
nickname,
domain, port,
maxPostsInBlogsFeed, pageNumber)
if msg is not None:
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog view done', 'blog page')
return True
self._404()
return True
def _redirectToLoginScreen(self, callingDomain: str, path: str,
httpPrefix: str, domainFull: str,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {},
authorized: bool, debug: bool):
"""Redirects to the login screen if necessary
"""
divertToLoginScreen = False
if '/media/' not in path and \
'/sharefiles/' not in path and \
'/statuses/' not in path and \
'/emoji/' not in path and \
'/tags/' not in path and \
'/avatars/' not in path and \
'/fonts/' not in path and \
'/icons/' not in path:
divertToLoginScreen = True
if path.startswith('/users/'):
nickStr = path.split('/users/')[1]
if '/' not in nickStr and '?' not in nickStr:
divertToLoginScreen = False
else:
if path.endswith('/following') or \
'/following?page=' in path or \
path.endswith('/followers') or \
'/followers?page=' in path or \
path.endswith('/skills') or \
path.endswith('/roles') or \
path.endswith('/shares'):
divertToLoginScreen = False
if divertToLoginScreen and not authorized:
divertPath = '/login'
if self.server.newsInstance:
# for news instances if not logged in then show the
# front page
divertPath = '/users/news'
2020-10-13 12:52:12 +00:00
# if debug:
print('DEBUG: divertToLoginScreen=' +
str(divertToLoginScreen))
print('DEBUG: authorized=' + str(authorized))
print('DEBUG: path=' + path)
if callingDomain.endswith('.onion') and onionDomain:
self._redirect_headers('http://' +
onionDomain + divertPath,
None, callingDomain)
elif callingDomain.endswith('.i2p') and i2pDomain:
self._redirect_headers('http://' +
i2pDomain + divertPath,
None, callingDomain)
else:
self._redirect_headers(httpPrefix + '://' +
domainFull +
divertPath, None, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'robots txt',
'show login screen')
return True
return False
2020-09-02 10:57:50 +00:00
def _getStyleSheet(self, callingDomain: str, path: str,
GETstartTime, GETtimings: {}) -> bool:
"""Returns the content of a css file
"""
# get the last part of the path
# eg. /my/path/file.css becomes file.css
if '/' in path:
path = path.split('/')[-1]
if os.path.isfile(path):
tries = 0
while tries < 5:
try:
with open(path, 'r') as cssfile:
css = cssfile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
msg = css.encode('utf-8')
self._set_headers('text/css', len(msg),
None, callingDomain)
self._write(msg)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show login screen done',
'show profile.css')
return True
self._404()
return True
2020-09-02 11:20:12 +00:00
def _showQRcode(self, callingDomain: str, path: str,
baseDir: str, domain: str, port: int,
GETstartTime, GETtimings: {}) -> bool:
"""Shows a QR code for an account
"""
nickname = getNicknameFromActor(path)
savePersonQrcode(baseDir, nickname, domain, port)
qrFilename = \
baseDir + '/accounts/' + nickname + '@' + domain + '/qrcode.png'
if os.path.isfile(qrFilename):
if self._etag_exists(qrFilename):
# The file has not changed
self._304()
return
tries = 0
mediaBinary = None
while tries < 5:
try:
with open(qrFilename, 'rb') as avFile:
mediaBinary = avFile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
if mediaBinary:
self._set_headers_etag(qrFilename, 'image/png',
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'login screen logo done',
'account qrcode')
return True
self._404()
return True
def _searchScreenBanner(self, callingDomain: str, path: str,
baseDir: str, domain: str, port: int,
GETstartTime, GETtimings: {}) -> bool:
"""Shows a banner image on the search screen
"""
nickname = getNicknameFromActor(path)
bannerFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + '/search_banner.png'
if os.path.isfile(bannerFilename):
if self._etag_exists(bannerFilename):
# The file has not changed
self._304()
return True
tries = 0
mediaBinary = None
while tries < 5:
try:
with open(bannerFilename, 'rb') as avFile:
mediaBinary = avFile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
if mediaBinary:
self._set_headers_etag(bannerFilename, 'image/png',
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'account qrcode done',
'search screen banner')
return True
self._404()
return True
2020-10-02 14:37:05 +00:00
def _columImage(self, side: str, callingDomain: str, path: str,
baseDir: str, domain: str, port: int,
GETstartTime, GETtimings: {}) -> bool:
"""Shows an image at the top of the left/right column
"""
nickname = getNicknameFromActor(path)
2020-10-02 14:41:01 +00:00
if not nickname:
self._404()
return True
2020-10-02 14:37:05 +00:00
bannerFilename = \
baseDir + '/accounts/' + \
nickname + '@' + domain + '/' + side + '_col_image.png'
if os.path.isfile(bannerFilename):
if self._etag_exists(bannerFilename):
# The file has not changed
self._304()
return True
tries = 0
mediaBinary = None
while tries < 5:
try:
with open(bannerFilename, 'rb') as avFile:
mediaBinary = avFile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
if mediaBinary:
self._set_headers_etag(bannerFilename, 'image/png',
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'account qrcode done',
side + ' col image')
return True
self._404()
return True
def _showBackgroundImage(self, callingDomain: str, path: str,
baseDir: str,
GETstartTime, GETtimings: {}) -> bool:
"""Show a background image
"""
for ext in ('webp', 'gif', 'jpg', 'png', 'avif'):
for bg in ('follow', 'options', 'login'):
# follow screen background image
if path.endswith('/' + bg + '-background.' + ext):
bgFilename = \
baseDir + '/accounts/' + \
bg + '-background.' + ext
if os.path.isfile(bgFilename):
if self._etag_exists(bgFilename):
# The file has not changed
self._304()
return True
tries = 0
bgBinary = None
while tries < 5:
try:
with open(bgFilename, 'rb') as avFile:
bgBinary = avFile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
if bgBinary:
if ext == 'jpg':
ext = 'jpeg'
self._set_headers_etag(bgFilename,
'image/' + ext,
bgBinary, None,
callingDomain)
self._write(bgBinary)
self._benchmarkGETtimings(GETstartTime,
GETtimings,
'search screen ' +
'banner done',
'background shown')
return True
self._404()
return True
def _showShareImage(self, callingDomain: str, path: str,
baseDir: str,
GETstartTime, GETtimings: {}) -> bool:
"""Show a shared item image
"""
if self._pathIsImage(path):
mediaStr = path.split('/sharefiles/')[1]
mediaFilename = \
baseDir + '/sharefiles/' + mediaStr
if os.path.isfile(mediaFilename):
if self._etag_exists(mediaFilename):
# The file has not changed
self._304()
return True
mediaFileType = 'png'
if mediaFilename.endswith('.png'):
mediaFileType = 'png'
elif mediaFilename.endswith('.jpg'):
mediaFileType = 'jpeg'
elif mediaFilename.endswith('.webp'):
mediaFileType = 'webp'
elif mediaFilename.endswith('.avif'):
mediaFileType = 'avif'
else:
mediaFileType = 'gif'
with open(mediaFilename, 'rb') as avFile:
mediaBinary = avFile.read()
self._set_headers_etag(mediaFilename,
'image/' + mediaFileType,
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show media done',
'share files shown')
return True
self._404()
return True
2020-09-02 12:18:49 +00:00
def _showAvatarOrBackground(self, callingDomain: str, path: str,
baseDir: str, domain: str,
GETstartTime, GETtimings: {}) -> bool:
"""Shows an avatar or profile background image
"""
if '/users/' in path:
if self._pathIsImage(path):
avatarStr = path.split('/users/')[1]
if '/' in avatarStr and '.temp.' not in path:
avatarNickname = avatarStr.split('/')[0]
avatarFile = avatarStr.split('/')[1]
# remove any numbers, eg. avatar123.png becomes avatar.png
if avatarFile.startswith('avatar'):
avatarFile = 'avatar.' + avatarFile.split('.')[1]
elif avatarFile.startswith('image'):
avatarFile = 'image.' + avatarFile.split('.')[1]
avatarFilename = \
baseDir + '/accounts/' + \
avatarNickname + '@' + domain + '/' + avatarFile
if os.path.isfile(avatarFilename):
if self._etag_exists(avatarFilename):
# The file has not changed
self._304()
return True
mediaImageType = 'png'
if avatarFile.endswith('.png'):
mediaImageType = 'png'
elif avatarFile.endswith('.jpg'):
mediaImageType = 'jpeg'
elif avatarFile.endswith('.gif'):
mediaImageType = 'gif'
elif avatarFile.endswith('.avif'):
mediaImageType = 'avif'
2020-09-02 12:18:49 +00:00
else:
mediaImageType = 'webp'
with open(avatarFilename, 'rb') as avFile:
mediaBinary = avFile.read()
self._set_headers_etag(avatarFilename,
'image/' + mediaImageType,
mediaBinary, None,
callingDomain)
self._write(mediaBinary)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'icon shown done',
'avatar background shown')
return True
return False
2020-09-02 12:32:05 +00:00
def _confirmDeleteEvent(self, callingDomain: str, path: str,
baseDir: str, httpPrefix: str, cookie: str,
translate: {}, domainFull: str,
onionDomain: str, i2pDomain: str,
GETstartTime, GETtimings: {}) -> bool:
"""Confirm whether to delete a calendar event
"""
postId = path.split('?id=')[1]
if '?' in postId:
postId = postId.split('?')[0]
postTime = path.split('?time=')[1]
if '?' in postTime:
postTime = postTime.split('?')[0]
postYear = path.split('?year=')[1]
if '?' in postYear:
postYear = postYear.split('?')[0]
postMonth = path.split('?month=')[1]
if '?' in postMonth:
postMonth = postMonth.split('?')[0]
postDay = path.split('?day=')[1]
if '?' in postDay:
postDay = postDay.split('?')[0]
# show the confirmation screen screen
2020-10-29 12:48:58 +00:00
msg = htmlCalendarDeleteConfirm(self.server.cssCache,
translate,
baseDir, path,
2020-09-02 12:32:05 +00:00
httpPrefix,
domainFull,
postId, postTime,
postYear, postMonth, postDay,
callingDomain)
if not msg:
actor = \
httpPrefix + '://' + \
domainFull + \
path.split('/eventdelete')[0]
if callingDomain.endswith('.onion') and onionDomain:
actor = \
'http://' + onionDomain + \
path.split('/eventdelete')[0]
elif callingDomain.endswith('.i2p') and i2pDomain:
actor = \
'http://' + i2pDomain + \
path.split('/eventdelete')[0]
self._redirect_headers(actor + '/calendar',
cookie, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'calendar shown done',
'calendar delete shown')
return True
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
2020-09-02 17:09:51 +00:00
def _showNewPost(self, callingDomain: str, path: str,
mediaInstance: bool, translate: {},
baseDir: str, httpPrefix: str,
inReplyToUrl: str, replyToList: [],
shareDescription: str, replyPageNumber: int,
domain: str, domainFull: str,
GETstartTime, GETtimings: {}, cookie) -> bool:
"""Shows the new post screen
"""
isNewPostEndpoint = False
if '/users/' in path and '/new' in path:
# Various types of new post in the web interface
newPostEnd = ('newpost', 'newblog', 'newunlisted',
'newfollowers', 'newdm', 'newreminder',
'newevent', 'newreport', 'newquestion',
'newshare')
for postType in newPostEnd:
if path.endswith('/' + postType):
isNewPostEndpoint = True
break
if isNewPostEndpoint:
nickname = getNicknameFromActor(path)
2020-10-29 12:48:58 +00:00
msg = htmlNewPost(self.server.cssCache,
mediaInstance,
2020-09-02 17:09:51 +00:00
translate,
baseDir,
httpPrefix,
path, inReplyToUrl,
replyToList,
shareDescription,
replyPageNumber,
nickname, domain,
domainFull,
2020-11-04 15:27:13 +00:00
self.server.defaultTimeline,
self.server.newswire).encode('utf-8')
2020-09-02 17:09:51 +00:00
if not msg:
print('Error replying to ' + inReplyToUrl)
self._404()
self.server.GETbusy = False
return True
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unmute activated done',
'new post made')
return True
return False
def _editProfile(self, callingDomain: str, path: str,
translate: {}, baseDir: str,
httpPrefix: str, domain: str, port: int,
cookie: str) -> bool:
"""Show the edit profile screen
"""
if '/users/' in path and path.endswith('/editprofile'):
2020-10-29 12:48:58 +00:00
msg = htmlEditProfile(self.server.cssCache,
translate,
baseDir,
path, domain,
port,
2020-11-02 17:48:35 +00:00
httpPrefix,
self.server.defaultTimeline).encode('utf-8')
if msg:
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-10-01 19:42:10 +00:00
def _editLinks(self, callingDomain: str, path: str,
translate: {}, baseDir: str,
httpPrefix: str, domain: str, port: int,
cookie: str) -> bool:
"""Show the links from the left column
"""
if '/users/' in path and path.endswith('/editlinks'):
2020-10-29 12:48:58 +00:00
msg = htmlEditLinks(self.server.cssCache,
translate,
2020-10-01 19:42:10 +00:00
baseDir,
path, domain,
port,
httpPrefix,
self.server.defaultTimeline).encode('utf-8')
2020-10-01 19:42:10 +00:00
if msg:
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-10-04 09:22:27 +00:00
def _editNewswire(self, callingDomain: str, path: str,
translate: {}, baseDir: str,
httpPrefix: str, domain: str, port: int,
cookie: str) -> bool:
"""Show the newswire from the right column
"""
if '/users/' in path and path.endswith('/editnewswire'):
2020-10-29 12:48:58 +00:00
msg = htmlEditNewswire(self.server.cssCache,
translate,
2020-10-04 09:22:27 +00:00
baseDir,
path, domain,
port,
2020-11-02 17:58:35 +00:00
httpPrefix,
self.server.defaultTimeline).encode('utf-8')
2020-10-04 09:22:27 +00:00
if msg:
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
2020-10-10 19:14:36 +00:00
def _editNewsPost(self, callingDomain: str, path: str,
translate: {}, baseDir: str,
httpPrefix: str, domain: str, port: int,
2020-10-10 21:27:58 +00:00
domainFull: str,
2020-10-10 19:14:36 +00:00
cookie: str) -> bool:
"""Show the edit screen for a news post
"""
if '/users/' in path and '/editnewspost=' in path:
2020-10-10 21:27:58 +00:00
postId = path.split('/editnewspost=')[1]
if '?' in postId:
postId = postId.split('?')[0]
postUrl = httpPrefix + '://' + domainFull + \
2020-10-10 21:38:29 +00:00
'/users/news/statuses/' + postId
2020-10-10 22:11:25 +00:00
path = path.split('/editnewspost=')[0]
2020-10-29 12:48:58 +00:00
msg = htmlEditNewsPost(self.server.cssCache,
translate, baseDir,
2020-10-10 19:14:36 +00:00
path, domain, port,
2020-10-10 21:27:58 +00:00
httpPrefix,
postUrl).encode('utf-8')
2020-10-10 19:14:36 +00:00
if msg:
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
else:
self._404()
self.server.GETbusy = False
return True
return False
def _editEvent(self, callingDomain: str, path: str,
httpPrefix: str, domain: str, domainFull: str,
baseDir: str, translate: {},
mediaInstance: bool,
cookie: str) -> bool:
"""Show edit event screen
"""
messageId = path.split('?editeventpost=')[1]
if '?' in messageId:
messageId = messageId.split('?')[0]
actor = path.split('?actor=')[1]
if '?' in actor:
actor = actor.split('?')[0]
nickname = getNicknameFromActor(path)
if nickname == actor:
# postUrl = \
# httpPrefix + '://' + \
# domainFull + '/users/' + nickname + \
# '/statuses/' + messageId
msg = None
# TODO
# htmlEditEvent(mediaInstance,
# translate,
# baseDir,
# httpPrefix,
# path,
# nickname, domain,
# postUrl)
if msg:
msg = msg.encode('utf-8')
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
return True
return False
2019-09-05 11:44:09 +00:00
def do_GET(self):
2020-04-02 21:35:06 +00:00
callingDomain = self.server.domainFull
2020-03-27 11:46:36 +00:00
if self.headers.get('Host'):
2020-04-02 21:35:06 +00:00
callingDomain = self.headers['Host']
2020-03-28 15:42:27 +00:00
if self.server.onionDomain:
if callingDomain != self.server.domain and \
callingDomain != self.server.domainFull and \
callingDomain != self.server.onionDomain:
2020-04-02 21:35:06 +00:00
print('GET domain blocked: ' + callingDomain)
2020-03-28 15:42:27 +00:00
self._400()
return
else:
if callingDomain != self.server.domain and \
callingDomain != self.server.domainFull:
2020-04-02 21:35:06 +00:00
print('GET domain blocked: ' + callingDomain)
2020-03-28 15:42:27 +00:00
self._400()
return
2020-03-28 10:33:04 +00:00
2020-04-02 21:35:06 +00:00
GETstartTime = time.time()
2020-08-28 20:07:29 +00:00
GETtimings = {}
self._benchmarkGETtimings(GETstartTime, GETtimings, None, 'start')
2019-11-15 18:59:15 +00:00
2020-04-02 21:35:06 +00:00
# Since fediverse crawlers are quite active,
# make returning info to them high priority
2019-11-15 13:17:28 +00:00
# get nodeinfo endpoint
2020-03-28 17:24:40 +00:00
if self._nodeinfo(callingDomain):
2019-11-15 13:17:28 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'start', '_nodeinfo(callingDomain)')
2019-11-16 13:25:44 +00:00
2019-11-15 13:17:28 +00:00
# minimal mastodon api
2020-03-28 17:24:40 +00:00
if self._mastoApi(callingDomain):
2019-11-15 13:17:28 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'_nodeinfo(callingDomain)',
'_mastoApi(callingDomain)')
2019-11-16 13:25:44 +00:00
2020-04-02 21:35:06 +00:00
if self.path == '/logout':
if not self.server.newsInstance:
msg = \
2020-10-29 12:48:58 +00:00
htmlLogin(self.server.cssCache,
self.server.translate,
self.server.baseDir, False).encode('utf-8')
2020-10-13 12:40:08 +00:00
self._logout_headers('text/html', len(msg), callingDomain)
self._write(msg)
else:
2020-10-13 12:30:12 +00:00
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
2020-10-13 12:40:08 +00:00
self._logout_redirect('http://' +
self.server.onionDomain +
'/users/news', None,
callingDomain)
2020-10-13 12:30:12 +00:00
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
2020-10-13 12:40:08 +00:00
self._logout_redirect('http://' +
self.server.i2pDomain +
'/users/news', None,
callingDomain)
2020-10-13 12:30:12 +00:00
else:
2020-10-13 12:40:08 +00:00
self._logout_redirect(self.server.httpPrefix +
'://' +
self.server.domainFull +
'/users/news',
None, callingDomain)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'_nodeinfo(callingDomain)',
'logout')
2019-10-30 11:35:40 +00:00
return
2019-11-15 12:56:07 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'_nodeinfo(callingDomain)',
'show logout')
2019-11-16 13:25:44 +00:00
2019-11-06 18:51:51 +00:00
# replace https://domain/@nick with https://domain/users/nick
if self.path.startswith('/@'):
2020-04-02 21:35:06 +00:00
self.path = self.path.replace('/@', '/users/')
2019-11-06 18:51:51 +00:00
2019-09-05 11:44:09 +00:00
# redirect music to #nowplaying list
2020-04-02 21:35:06 +00:00
if self.path == '/music' or self.path == '/nowplaying':
self.path = '/tags/nowplaying'
2019-09-05 11:44:09 +00:00
2019-07-03 16:14:45 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: GET from ' + self.server.baseDir +
' path: ' + self.path + ' busy: ' +
2019-07-06 17:00:22 +00:00
str(self.server.GETbusy))
2019-07-25 11:18:35 +00:00
if self.server.debug:
print(str(self.headers))
2020-05-25 16:29:27 +00:00
cookie = None
if self.headers.get('Cookie'):
cookie = self.headers['Cookie']
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show logout', 'get cookie')
2020-08-13 20:08:15 +00:00
# manifest for progressive web apps
2020-08-13 18:45:41 +00:00
if '/manifest.json' in self.path:
2020-08-31 14:31:00 +00:00
self._progressiveWebAppManifest(callingDomain,
GETstartTime, GETtimings)
2020-08-13 18:45:41 +00:00
return
2020-05-29 19:21:08 +00:00
# favicon image
if 'favicon.ico' in self.path:
2020-08-31 14:36:44 +00:00
self._getFavicon(callingDomain, self.server.baseDir,
self.server.debug)
2020-05-29 19:21:08 +00:00
return
# check authorization
authorized = self._isAuthorized()
if self.server.debug:
if authorized:
print('GET Authorization granted')
else:
print('GET Not authorized')
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show logout', 'isAuthorized')
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('Starting new session during GET')
2020-06-09 11:03:59 +00:00
self.server.session = createSession(self.server.proxyType)
2020-06-08 17:10:53 +00:00
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('ERROR: GET failed to create session duing GET')
2020-06-08 17:10:53 +00:00
self._404()
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'isAuthorized', 'session fail')
2020-06-08 17:10:53 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'isAuthorized', 'create session')
# is this a html request?
htmlGET = False
2020-06-18 20:56:29 +00:00
if self._hasAccept(callingDomain):
if self._requestHTTP():
htmlGET = True
else:
if self.headers.get('Connection'):
# https://developer.mozilla.org/en-US/
# docs/Web/HTTP/Protocol_upgrade_mechanism
if self.headers.get('Upgrade'):
print('HTTP Connection request: ' +
self.headers['Upgrade'])
else:
print('HTTP Connection request: ' +
self.headers['Connection'])
self._200()
else:
print('WARN: No Accept header ' + str(self.headers))
self._400()
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'create session', 'hasAccept')
2020-05-25 16:21:53 +00:00
# get fonts
2020-07-18 19:23:32 +00:00
if '/fonts/' in self.path:
2020-08-31 14:44:51 +00:00
self._getFonts(callingDomain, self.path,
self.server.baseDir, self.server.debug,
GETstartTime, GETtimings)
2020-05-25 16:21:53 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'hasAccept', 'fonts')
2019-11-16 13:25:44 +00:00
2019-08-05 11:08:52 +00:00
# treat shared inbox paths consistently
2020-04-02 21:35:06 +00:00
if self.path == '/sharedInbox' or \
self.path == '/users/inbox' or \
self.path == '/actor/inbox' or \
self.path == '/users/'+self.server.domain:
2019-11-15 21:43:20 +00:00
# if shared inbox is not enabled
if not self.server.enableSharedInbox:
self._503()
return
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
self.path = '/inbox'
2019-08-05 11:08:52 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'fonts', 'sharedInbox enabled')
2019-11-16 13:25:44 +00:00
2020-10-04 12:29:07 +00:00
if self.path == '/newswire.xml':
self._getNewswireFeed(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
self.server.proxyType,
GETstartTime, GETtimings,
self.server.debug)
return
2020-05-23 09:41:50 +00:00
# RSS 2.0
2020-04-02 21:35:06 +00:00
if self.path.startswith('/blog/') and \
self.path.endswith('/rss.xml'):
2020-10-13 16:58:45 +00:00
if not self.path == '/blog/rss.xml':
self._getRSS2feed(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
self.server.proxyType,
GETstartTime, GETtimings,
self.server.debug)
else:
self._getRSS2site(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
2020-10-13 17:02:50 +00:00
self.server.domainFull,
2020-10-13 16:58:45 +00:00
self.server.port,
self.server.proxyType,
2020-10-13 17:02:50 +00:00
self.server.translate,
2020-10-13 16:58:45 +00:00
GETstartTime, GETtimings,
self.server.debug)
2020-08-15 10:11:02 +00:00
return
2020-02-27 20:23:27 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'sharedInbox enabled', 'rss2 done')
2020-05-23 09:41:50 +00:00
# RSS 3.0
if self.path.startswith('/blog/') and \
self.path.endswith('/rss.txt'):
2020-08-31 15:55:00 +00:00
self._getRSS3feed(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
self.server.proxyType,
GETstartTime, GETtimings,
self.server.debug)
2020-08-15 10:11:02 +00:00
return
2020-05-23 09:41:50 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'sharedInbox enabled', 'rss3 done')
2020-02-25 13:35:41 +00:00
# show the main blog page
2020-04-02 21:35:06 +00:00
if htmlGET and (self.path == '/blog' or
self.path == '/blog/' or
self.path == '/blogs' or
self.path == '/blogs/'):
2020-02-27 20:33:49 +00:00
if '/rss.xml' not in self.path:
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('Starting new session during blog view')
2020-04-02 21:35:06 +00:00
self.server.session = \
2020-06-09 11:03:59 +00:00
createSession(self.server.proxyType)
2020-06-08 17:10:53 +00:00
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('ERROR: GET failed to create session ' +
'during blog view')
2020-06-08 17:10:53 +00:00
self._404()
return
2020-04-02 21:35:06 +00:00
msg = htmlBlogView(authorized,
self.server.session,
self.server.baseDir,
self.server.httpPrefix,
self.server.translate,
self.server.domain,
self.server.port,
maxPostsInBlogsFeed)
if msg is not None:
2020-06-14 19:06:10 +00:00
msg = msg.encode('utf-8')
2020-04-02 21:35:06 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
2020-02-27 20:33:49 +00:00
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'rss3 done', 'blog view')
2020-02-27 20:33:49 +00:00
return
self._404()
2020-02-25 13:35:41 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'rss3 done', 'blog view done')
2020-02-25 13:49:38 +00:00
# show a particular page of blog entries
# for a particular account
2020-02-25 13:35:41 +00:00
if htmlGET and self.path.startswith('/blog/'):
2020-02-27 20:33:49 +00:00
if '/rss.xml' not in self.path:
2020-09-02 09:44:37 +00:00
if self._showBlogPage(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.translate,
self.server.debug):
2020-02-27 20:33:49 +00:00
return
2020-02-25 13:35:41 +00:00
2020-08-05 12:05:39 +00:00
# list of registered devices for e2ee
# see https://github.com/tootsuite/mastodon/pull/13820
2020-08-05 12:07:00 +00:00
if authorized and '/users/' in self.path:
2020-08-05 12:05:39 +00:00
if self.path.endswith('/collections/devices'):
nickname = self.path.split('/users/')
if '/' in nickname:
nickname = nickname.split('/')[0]
2020-08-06 20:16:42 +00:00
devJson = E2EEdevicesCollection(self.server.baseDir,
nickname,
self.server.domain,
self.server.domainFull,
self.server.httpPrefix)
2020-08-05 12:05:39 +00:00
msg = json.dumps(devJson,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog page',
'registered devices')
return
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog view done',
'registered devices done')
2020-08-05 12:05:39 +00:00
2020-03-22 21:16:02 +00:00
if htmlGET and '/users/' in self.path:
2020-02-25 09:34:45 +00:00
# show the person options screen with view/follow/block/report
if '?options=' in self.path:
2020-08-31 16:13:48 +00:00
self._showPersonOptions(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
GETstartTime, GETtimings,
self.server.onionDomain,
self.server.i2pDomain,
cookie, self.server.debug)
2020-02-25 09:34:45 +00:00
return
2019-08-24 21:14:33 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'registered devices done',
'person options done')
2020-02-25 13:35:41 +00:00
# show blog post
2020-04-02 21:35:06 +00:00
blogFilename, nickname = \
self._pathContainsBlogLink(self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
2020-02-25 13:35:41 +00:00
self.path)
if blogFilename and nickname:
2020-04-02 21:35:06 +00:00
postJsonObject = loadJson(blogFilename)
2020-02-25 13:35:41 +00:00
if isBlogPost(postJsonObject):
2020-05-18 14:00:47 +00:00
msg = htmlBlogPost(authorized,
2020-04-02 21:35:06 +00:00
self.server.baseDir,
self.server.httpPrefix,
self.server.translate,
nickname, self.server.domain,
self.server.domainFull,
postJsonObject)
if msg is not None:
2020-06-14 19:06:10 +00:00
msg = msg.encode('utf-8')
2020-04-02 21:35:06 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
2020-02-25 13:35:41 +00:00
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'person options done',
'blog post 2')
2020-02-25 13:35:41 +00:00
return
self._404()
return
2020-03-22 21:16:02 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'person options done',
'blog post 2 done')
2019-11-16 13:25:44 +00:00
2019-08-26 09:15:48 +00:00
# remove a shared item
if htmlGET and '?rmshare=' in self.path:
2020-04-02 21:35:06 +00:00
shareName = self.path.split('?rmshare=')[1]
2020-08-05 21:43:28 +00:00
shareName = urllib.parse.unquote_plus(shareName.strip())
2020-04-02 21:35:06 +00:00
usersPath = self.path.split('?rmshare=')[0]
actor = \
self.server.httpPrefix + '://' + \
self.server.domainFull + usersPath
2020-10-29 12:48:58 +00:00
msg = htmlRemoveSharedItem(self.server.cssCache,
self.server.translate,
2020-04-02 21:35:06 +00:00
self.server.baseDir,
2020-07-11 20:31:25 +00:00
actor, shareName,
callingDomain).encode('utf-8')
2019-08-26 09:15:48 +00:00
if not msg:
2020-04-02 21:35:06 +00:00
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
actor = 'http://' + self.server.onionDomain + usersPath
2020-06-03 19:14:24 +00:00
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
2020-06-19 09:50:00 +00:00
actor = 'http://' + self.server.i2pDomain + usersPath
2020-04-02 21:35:06 +00:00
self._redirect_headers(actor + '/tlshares',
cookie, callingDomain)
return
self._set_headers('text/html', len(msg),
cookie, callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog post 2 done',
'remove shared item')
2019-08-26 09:15:48 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog post 2 done',
'remove shared item done')
2019-11-16 13:25:44 +00:00
if self.path.startswith('/terms'):
2020-06-03 19:14:24 +00:00
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
2020-10-29 12:48:58 +00:00
msg = htmlTermsOfService(self.server.cssCache,
self.server.baseDir, 'http',
2020-06-14 19:06:10 +00:00
self.server.onionDomain)
2020-06-03 19:14:24 +00:00
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
2020-10-29 12:48:58 +00:00
msg = htmlTermsOfService(self.server.cssCache,
self.server.baseDir, 'http',
2020-06-14 19:06:10 +00:00
self.server.i2pDomain)
2020-06-03 19:14:24 +00:00
else:
2020-10-29 12:48:58 +00:00
msg = htmlTermsOfService(self.server.cssCache,
self.server.baseDir,
2020-04-02 21:35:06 +00:00
self.server.httpPrefix,
2020-06-14 19:06:10 +00:00
self.server.domainFull)
msg = msg.encode('utf-8')
2020-04-02 21:35:06 +00:00
self._login_headers('text/html', len(msg), callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog post 2 done',
'terms of service shown')
2019-08-26 16:07:04 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'blog post 2 done',
'terms of service done')
2019-11-16 13:25:44 +00:00
# show a list of who you are following
if htmlGET and authorized and '/users/' in self.path and \
self.path.endswith('/followingaccounts'):
nickname = getNicknameFromActor(self.path)
followingFilename = \
2020-06-17 16:52:45 +00:00
self.server.baseDir + '/accounts/' + \
nickname + '@' + self.server.domain + '/following.txt'
if not os.path.isfile(followingFilename):
self._404()
return
2020-10-29 12:48:58 +00:00
msg = htmlFollowingList(self.server.cssCache,
self.server.baseDir, followingFilename)
2020-06-28 21:25:53 +00:00
self._login_headers('text/html', len(msg), callingDomain)
2020-06-28 21:30:02 +00:00
self._write(msg.encode('utf-8'))
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'terms of service done',
'following accounts shown')
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'terms of service done',
'following accounts done')
2020-07-26 12:57:51 +00:00
if self.path.endswith('/about'):
2020-06-03 19:14:24 +00:00
if callingDomain.endswith('.onion'):
msg = \
2020-10-29 12:48:58 +00:00
htmlAbout(self.server.cssCache,
self.server.baseDir, 'http',
2020-06-03 19:14:24 +00:00
self.server.onionDomain,
2020-06-14 19:06:10 +00:00
None)
2020-06-03 19:14:24 +00:00
elif callingDomain.endswith('.i2p'):
msg = \
2020-10-29 12:48:58 +00:00
htmlAbout(self.server.cssCache,
self.server.baseDir, 'http',
2020-06-03 19:14:24 +00:00
self.server.i2pDomain,
2020-06-14 19:06:10 +00:00
None)
2020-06-03 19:14:24 +00:00
else:
2020-04-02 21:35:06 +00:00
msg = \
2020-10-29 12:48:58 +00:00
htmlAbout(self.server.cssCache,
self.server.baseDir,
2020-04-02 21:35:06 +00:00
self.server.httpPrefix,
2020-04-17 16:30:06 +00:00
self.server.domainFull,
2020-06-14 19:06:10 +00:00
self.server.onionDomain)
msg = msg.encode('utf-8')
2020-04-02 21:35:06 +00:00
self._login_headers('text/html', len(msg), callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'following accounts done',
'show about screen')
2019-08-26 16:07:04 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'following accounts done',
'show about screen done')
2019-11-16 13:25:44 +00:00
2019-09-03 20:27:49 +00:00
# send robots.txt if asked
if self._robotsTxt():
return
2020-03-22 21:16:02 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
2020-08-28 20:29:41 +00:00
'show about screen done',
2020-08-28 20:07:29 +00:00
'robots txt')
2019-11-16 13:25:44 +00:00
2019-08-18 20:11:26 +00:00
# if not authorized then show the login screen
2020-06-19 19:40:40 +00:00
if htmlGET and self.path != '/login' and \
not self._pathIsImage(self.path) and \
self.path != '/' and \
self.path != '/users/news/linksmobile' and \
self.path != '/users/news/newswiremobile':
if self._redirectToLoginScreen(callingDomain, self.path,
self.server.httpPrefix,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
authorized, self.server.debug):
return
2019-11-15 14:34:11 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'robots txt',
'show login screen done')
2019-11-16 13:25:44 +00:00
2019-07-21 09:09:28 +00:00
# get css
# Note that this comes before the busy flag to avoid conflicts
if self.path.endswith('.css'):
2020-09-02 10:57:50 +00:00
if self._getStyleSheet(callingDomain, self.path,
GETstartTime, GETtimings):
2019-07-21 09:09:28 +00:00
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show login screen done',
'profile.css done')
2019-11-16 13:25:44 +00:00
2020-08-13 20:08:15 +00:00
# manifest images used to create a home screen icon
# when selecting "add to home screen" in browsers
# which support progressive web apps
2020-08-13 19:46:45 +00:00
if self.path == '/logo72.png' or \
2020-08-13 18:45:41 +00:00
self.path == '/logo96.png' or \
self.path == '/logo128.png' or \
self.path == '/logo144.png' or \
self.path == '/logo152.png' or \
self.path == '/logo192.png' or \
self.path == '/logo256.png' or \
2020-08-13 19:46:45 +00:00
self.path == '/logo512.png':
mediaFilename = \
self.server.baseDir + '/img' + self.path
if os.path.isfile(mediaFilename):
if self._etag_exists(mediaFilename):
# The file has not changed
self._304()
return
tries = 0
mediaBinary = None
while tries < 5:
try:
with open(mediaFilename, 'rb') as avFile:
mediaBinary = avFile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
if mediaBinary:
self._set_headers_etag(mediaFilename,
'image/png',
mediaBinary, cookie,
callingDomain)
self._write(mediaBinary)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'profile.css done',
'manifest logo shown')
2020-08-13 19:46:45 +00:00
return
self._404()
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'profile.css done',
'manifest logo done')
2020-08-14 11:46:07 +00:00
# manifest images used to show example screenshots
# for use by app stores
if self.path == '/screenshot1.jpg' or \
self.path == '/screenshot2.jpg':
screenFilename = \
self.server.baseDir + '/img' + self.path
if os.path.isfile(screenFilename):
if self._etag_exists(screenFilename):
# The file has not changed
self._304()
return
tries = 0
mediaBinary = None
while tries < 5:
try:
with open(screenFilename, 'rb') as avFile:
mediaBinary = avFile.read()
break
except Exception as e:
print(e)
time.sleep(1)
tries += 1
if mediaBinary:
self._set_headers_etag(screenFilename,
'image/png',
mediaBinary, cookie,
callingDomain)
self._write(mediaBinary)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'manifest logo done',
'show screenshot')
2020-08-14 11:46:07 +00:00
return
self._404()
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'manifest logo done',
'show screenshot done')
2020-08-13 19:46:45 +00:00
# image on login screen or qrcode
if self.path == '/login.png' or \
self.path == '/login.gif' or \
self.path == '/login.webp' or \
self.path == '/login.avif' or \
2020-08-13 19:46:45 +00:00
self.path == '/login.jpeg' or \
self.path == '/login.jpg' or \
2020-06-20 19:37:44 +00:00
self.path == '/qrcode.png':
2020-08-14 11:46:07 +00:00
iconFilename = \
2020-04-02 21:35:06 +00:00
self.server.baseDir + '/accounts' + self.path
2020-08-14 11:46:07 +00:00
if os.path.isfile(iconFilename):
if self._etag_exists(iconFilename):
2020-04-13 19:44:01 +00:00
# The file has not changed
self._304()
return
2020-04-02 21:35:06 +00:00
tries = 0
mediaBinary = None
while tries < 5:
2019-10-14 21:38:03 +00:00
try:
2020-08-14 11:46:07 +00:00
with open(iconFilename, 'rb') as avFile:
2020-04-02 21:35:06 +00:00
mediaBinary = avFile.read()
2019-10-14 21:38:03 +00:00
break
except Exception as e:
print(e)
time.sleep(1)
2020-04-02 21:35:06 +00:00
tries += 1
2019-10-14 21:38:03 +00:00
if mediaBinary:
2020-08-14 11:46:07 +00:00
self._set_headers_etag(iconFilename,
2020-04-13 19:28:35 +00:00
'image/png',
mediaBinary, cookie,
callingDomain)
2019-10-22 12:35:51 +00:00
self._write(mediaBinary)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show screenshot done',
'login screen logo')
2019-10-14 21:29:46 +00:00
return
2019-08-16 22:21:34 +00:00
self._404()
2019-10-23 12:04:08 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show screenshot done',
'login screen logo done')
2019-11-16 13:25:44 +00:00
2020-06-21 16:12:42 +00:00
# QR code for account handle
if '/users/' in self.path and \
self.path.endswith('/qrcode.png'):
2020-09-02 11:20:12 +00:00
if self._showQRcode(callingDomain, self.path,
self.server.baseDir,
self.server.domain,
self.server.port,
GETstartTime, GETtimings):
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'login screen logo done',
'account qrcode done')
2020-06-10 12:59:01 +00:00
# search screen banner image
2020-10-02 14:37:05 +00:00
if '/users/' in self.path:
if self.path.endswith('/search_banner.png'):
if self._searchScreenBanner(callingDomain, self.path,
self.server.baseDir,
self.server.domain,
self.server.port,
GETstartTime, GETtimings):
return
2020-10-02 14:41:01 +00:00
if self.path.endswith('/left_col_image.png'):
if self._columImage('left', callingDomain, self.path,
self.server.baseDir,
self.server.domain,
self.server.port,
GETstartTime, GETtimings):
return
2020-10-02 14:37:05 +00:00
2020-10-02 14:41:01 +00:00
if self.path.endswith('/right_col_image.png'):
if self._columImage('right', callingDomain, self.path,
self.server.baseDir,
self.server.domain,
self.server.port,
GETstartTime, GETtimings):
return
2020-06-10 12:59:01 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'account qrcode done',
'search screen banner done')
2019-11-16 13:25:44 +00:00
2020-07-25 22:42:07 +00:00
if '-background.' in self.path:
if self._showBackgroundImage(callingDomain, self.path,
self.server.baseDir,
GETstartTime, GETtimings):
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'search screen banner done',
'background shown done')
2019-11-16 13:25:44 +00:00
2019-08-09 12:50:49 +00:00
# emoji images
if '/emoji/' in self.path:
2020-08-31 18:00:40 +00:00
self._showEmoji(callingDomain, self.path,
self.server.baseDir,
GETstartTime, GETtimings)
2019-08-09 12:50:49 +00:00
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'background shown done',
'show emoji done')
2019-11-16 13:25:44 +00:00
2019-07-12 19:33:34 +00:00
# show media
# Note that this comes before the busy flag to avoid conflicts
if '/media/' in self.path:
2020-08-31 17:55:13 +00:00
self._showMedia(callingDomain,
self.path, self.server.baseDir,
GETstartTime, GETtimings)
2019-07-16 16:10:52 +00:00
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show emoji done',
'show media done')
2019-11-16 13:25:44 +00:00
2019-07-23 12:33:09 +00:00
# show shared item images
# Note that this comes before the busy flag to avoid conflicts
if '/sharefiles/' in self.path:
if self._showShareImage(callingDomain, self.path,
self.server.baseDir,
GETstartTime, GETtimings):
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show media done',
'share files done')
2019-11-16 13:25:44 +00:00
2019-07-26 10:30:13 +00:00
# icon images
# Note that this comes before the busy flag to avoid conflicts
if self.path.startswith('/icons/'):
2020-08-31 18:15:53 +00:00
self._showIcon(callingDomain, self.path,
self.server.baseDir,
GETstartTime, GETtimings)
2019-07-26 10:30:13 +00:00
return
2019-10-23 12:04:08 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show files done',
'icon shown done')
2019-11-16 13:25:44 +00:00
2019-09-14 17:29:55 +00:00
# cached avatar images
# Note that this comes before the busy flag to avoid conflicts
if self.path.startswith('/avatars/'):
self._showCachedAvatar(callingDomain, self.path,
self.server.baseDir,
GETstartTime, GETtimings)
2019-09-14 17:29:55 +00:00
return
2019-11-14 14:40:51 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'icon shown done',
'avatar shown done')
2019-11-16 13:25:44 +00:00
2019-07-12 16:09:25 +00:00
# show avatar or background image
# Note that this comes before the busy flag to avoid conflicts
2020-09-02 12:18:49 +00:00
if self._showAvatarOrBackground(callingDomain, self.path,
self.server.baseDir,
self.server.domain,
GETstartTime, GETtimings):
return
2019-07-22 15:14:39 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'icon shown done',
'avatar background shown done')
2019-11-16 13:25:44 +00:00
2019-07-22 15:14:39 +00:00
# This busy state helps to avoid flooding
# Resources which are expected to be called from a web page
# should be above this
if self.server.GETbusy:
2020-04-02 21:35:06 +00:00
currTimeGET = int(time.time())
if currTimeGET - self.server.lastGET == 0:
if self.server.debug:
print('DEBUG: GET Busy')
2020-03-22 21:16:02 +00:00
self.send_response(429)
self.end_headers()
return
2020-04-02 21:35:06 +00:00
self.server.lastGET = currTimeGET
self.server.GETbusy = True
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
2020-08-28 20:29:41 +00:00
'avatar background shown done',
2020-08-28 20:07:29 +00:00
'GET busy time')
2019-11-16 13:25:44 +00:00
if not self._permittedDir(self.path):
if self.server.debug:
print('DEBUG: GET Not permitted')
self._404()
2020-04-02 21:35:06 +00:00
self.server.GETbusy = False
return
2020-08-28 20:07:29 +00:00
# get webfinger endpoint for a person
2020-03-27 12:18:11 +00:00
if self._webfinger(callingDomain):
2020-04-02 21:35:06 +00:00
self.server.GETbusy = False
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'GET busy time',
'webfinger called')
return
2019-08-08 13:38:33 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'GET busy time',
'permitted directory')
2019-11-16 13:25:44 +00:00
2020-10-13 13:03:52 +00:00
# show the login screen
if (self.path.startswith('/login') or
(self.path == '/' and
not authorized and
not self.server.newsInstance)):
2019-07-24 22:38:42 +00:00
# request basic auth
2020-10-29 12:48:58 +00:00
msg = htmlLogin(self.server.cssCache,
self.server.translate,
2020-04-02 21:35:06 +00:00
self.server.baseDir).encode('utf-8')
self._login_headers('text/html', len(msg), callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2020-04-02 21:35:06 +00:00
self.server.GETbusy = False
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'permitted directory',
'login shown')
2019-07-25 21:39:09 +00:00
return
2020-10-13 13:03:52 +00:00
# show the news front page
if self.path == '/' and \
not authorized and \
self.server.newsInstance:
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
self._logout_redirect('http://' +
self.server.onionDomain +
'/users/news', None,
callingDomain)
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
self._logout_redirect('http://' +
self.server.i2pDomain +
'/users/news', None,
callingDomain)
else:
self._logout_redirect(self.server.httpPrefix +
'://' +
self.server.domainFull +
'/users/news',
None, callingDomain)
self._benchmarkGETtimings(GETstartTime, GETtimings,
'permitted directory',
'news front page shown')
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'permitted directory',
'login shown done')
2019-11-16 13:25:44 +00:00
2020-10-27 20:10:02 +00:00
if htmlGET and self.path.startswith('/users/') and \
2020-10-12 13:17:38 +00:00
self.path.endswith('/newswiremobile'):
2020-10-27 20:10:02 +00:00
if (authorized or
(not authorized and
self.path.startswith('/users/news/') and
self.server.newsInstance)):
2020-10-27 20:01:30 +00:00
nickname = getNicknameFromActor(self.path)
if not nickname:
self._404()
self.server.GETbusy = False
return
timelinePath = \
'/users/' + nickname + '/' + self.server.defaultTimeline
showPublishAsIcon = self.server.showPublishAsIcon
rssIconAtTop = self.server.rssIconAtTop
2020-10-31 16:22:07 +00:00
iconsAsButtons = self.server.iconsAsButtons
2020-11-01 11:09:52 +00:00
defaultTimeline = self.server.defaultTimeline
2020-10-29 12:48:58 +00:00
msg = htmlNewswireMobile(self.server.cssCache,
self.server.baseDir,
2020-10-27 20:01:30 +00:00
nickname,
self.server.domain,
self.server.domainFull,
self.server.httpPrefix,
self.server.translate,
self.server.newswire,
self.server.positiveVoting,
timelinePath,
showPublishAsIcon,
authorized,
rssIconAtTop,
2020-11-01 11:09:52 +00:00
iconsAsButtons,
defaultTimeline).encode('utf-8')
2020-10-27 20:01:30 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
2020-10-12 13:17:38 +00:00
self.server.GETbusy = False
return
2020-10-30 18:16:07 +00:00
if htmlGET and self.path.startswith('/users/') and \
2020-10-12 19:41:53 +00:00
self.path.endswith('/linksmobile'):
2020-10-30 18:16:07 +00:00
if (authorized or
(not authorized and
self.path.startswith('/users/news/') and
self.server.newsInstance)):
2020-10-30 18:25:19 +00:00
nickname = getNicknameFromActor(self.path)
if not nickname:
self._404()
self.server.GETbusy = False
return
timelinePath = \
'/users/' + nickname + '/' + self.server.defaultTimeline
iconsAsButtons = self.server.iconsAsButtons
2020-11-01 11:09:52 +00:00
defaultTimeline = self.server.defaultTimeline
2020-10-30 18:25:19 +00:00
msg = htmlLinksMobile(self.server.cssCache,
self.server.baseDir, nickname,
self.server.domainFull,
self.server.httpPrefix,
self.server.translate,
2020-10-30 18:33:33 +00:00
timelinePath,
authorized,
self.server.rssIconAtTop,
2020-11-01 11:09:52 +00:00
iconsAsButtons,
defaultTimeline).encode('utf-8')
2020-10-30 18:25:19 +00:00
self._set_headers('text/html', len(msg), cookie, callingDomain)
self._write(msg)
2020-10-12 19:41:53 +00:00
self.server.GETbusy = False
return
2019-08-10 10:54:52 +00:00
# hashtag search
2019-12-13 10:33:33 +00:00
if self.path.startswith('/tags/') or \
(authorized and '/tags/' in self.path):
2020-09-26 18:23:43 +00:00
if self.path.startswith('/tags/rss2/'):
self._hashtagSearchRSS2(callingDomain,
self.path, cookie,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings)
return
2020-08-31 21:19:57 +00:00
self._hashtagSearch(callingDomain,
2020-08-31 21:18:00 +00:00
self.path, cookie,
2020-08-31 21:16:21 +00:00
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings)
2019-08-10 10:54:52 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'login shown done',
'hashtag search done')
2019-11-16 13:25:44 +00:00
2020-05-23 14:23:56 +00:00
# show or hide buttons in the web interface
if htmlGET and '/users/' in self.path and \
self.path.endswith('/minimal') and \
authorized:
nickname = self.path.split('/users/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
self._setMinimal(nickname, not self._isMinimal(nickname))
if not (self.server.mediaInstance or
2020-10-26 22:05:30 +00:00
self.server.blogsInstance):
2020-05-23 14:23:56 +00:00
self.path = '/users/' + nickname + '/inbox'
else:
if self.server.blogsInstance:
self.path = '/users/' + nickname + '/tlblogs'
2020-10-07 09:10:42 +00:00
elif self.server.mediaInstance:
2020-05-23 14:23:56 +00:00
self.path = '/users/' + nickname + '/tlmedia'
2020-10-07 09:10:42 +00:00
else:
self.path = '/users/' + nickname + '/tlnews'
2020-05-23 14:23:56 +00:00
2019-11-03 15:27:29 +00:00
# search for a fediverse address, shared item or emoji
# from the web interface by selecting search icon
2019-08-18 20:11:26 +00:00
if htmlGET and '/users/' in self.path:
2020-04-02 21:35:06 +00:00
if self.path.endswith('/search') or \
'/search?' in self.path:
if '?' in self.path:
self.path = self.path.split('?')[0]
# show the search screen
2020-10-29 12:48:58 +00:00
msg = htmlSearch(self.server.cssCache,
self.server.translate,
2020-06-10 12:53:16 +00:00
self.server.baseDir, self.path,
2020-11-02 10:19:43 +00:00
self.server.domain,
self.server.defaultTimeline).encode('utf-8')
2020-04-02 21:35:06 +00:00
self._set_headers('text/html', len(msg), cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'hashtag search done',
'search screen shown')
2020-04-02 21:35:06 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'hashtag search done',
'search screen shown done')
2019-11-16 13:25:44 +00:00
2019-10-10 14:43:21 +00:00
# Show the calendar for a user
if htmlGET and '/users/' in self.path:
2020-04-02 21:35:06 +00:00
if '/calendar' in self.path:
# show the calendar screen
2020-10-29 12:48:58 +00:00
msg = htmlCalendar(self.server.cssCache,
self.server.translate,
2020-04-02 21:35:06 +00:00
self.server.baseDir, self.path,
self.server.httpPrefix,
2020-06-14 19:06:10 +00:00
self.server.domainFull).encode('utf-8')
2020-04-02 21:35:06 +00:00
self._set_headers('text/html', len(msg), cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'search screen shown done',
'calendar shown')
2020-04-02 21:35:06 +00:00
return
2019-07-30 22:34:04 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'search screen shown done',
'calendar shown done')
2020-02-23 12:20:54 +00:00
# Show confirmation for deleting a calendar event
if htmlGET and '/users/' in self.path:
2020-04-02 21:35:06 +00:00
if '/eventdelete' in self.path and \
'?time=' in self.path and \
'?id=' in self.path:
2020-09-02 12:32:05 +00:00
if self._confirmDeleteEvent(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
cookie,
self.server.translate,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings):
2020-04-02 21:35:06 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'calendar shown done',
'calendar delete shown done')
2019-11-16 13:25:44 +00:00
2019-08-19 20:01:29 +00:00
# search for emoji by name
if htmlGET and '/users/' in self.path:
2020-04-02 21:35:06 +00:00
if self.path.endswith('/searchemoji'):
# show the search screen
2020-10-29 12:48:58 +00:00
msg = htmlSearchEmojiTextEntry(self.server.cssCache,
self.server.translate,
2020-04-02 21:35:06 +00:00
self.server.baseDir,
2020-06-14 19:06:10 +00:00
self.path).encode('utf-8')
2020-04-02 21:35:06 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
self._write(msg)
self.server.GETbusy = False
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'calendar delete shown done',
'emoji search shown')
2020-04-02 21:35:06 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'calendar delete shown done',
'emoji search shown done')
2020-04-02 21:35:06 +00:00
repeatPrivate = False
2020-02-14 17:16:01 +00:00
if htmlGET and '?repeatprivate=' in self.path:
2020-04-02 21:35:06 +00:00
repeatPrivate = True
self.path = self.path.replace('?repeatprivate=', '?repeat=')
2020-08-31 21:50:43 +00:00
# announce/repeat button was pressed
2019-08-18 20:11:26 +00:00
if htmlGET and '?repeat=' in self.path:
2020-08-31 21:50:43 +00:00
self._announceButton(callingDomain, self.path,
self.server.baseDir,
cookie, self.server.proxyType,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
repeatPrivate,
self.server.debug)
2019-07-31 16:47:45 +00:00
return
2019-08-01 09:05:09 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'emoji search shown done',
'show announce done')
2019-11-16 13:25:44 +00:00
2020-02-14 17:16:01 +00:00
if htmlGET and '?unrepeatprivate=' in self.path:
2020-04-02 21:35:06 +00:00
self.path = self.path.replace('?unrepeatprivate=', '?unrepeat=')
2020-08-23 11:13:35 +00:00
2019-08-01 12:18:22 +00:00
# undo an announce/repeat from the web interface
2019-08-18 20:11:26 +00:00
if htmlGET and '?unrepeat=' in self.path:
2020-08-31 21:58:52 +00:00
self._undoAnnounceButton(callingDomain, self.path,
self.server.baseDir,
cookie, self.server.proxyType,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
repeatPrivate,
self.server.debug)
2019-08-01 12:18:22 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show announce done',
'unannounce done')
2019-11-16 13:25:44 +00:00
2020-10-06 20:17:34 +00:00
# send a newswire moderation vote from the web interface
if authorized and '/newswirevote=' in self.path and \
self.path.startswith('/users/'):
self._newswireVote(callingDomain, self.path,
cookie,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
2020-10-06 20:22:48 +00:00
self.server.debug,
self.server.newswire)
2020-10-06 20:17:34 +00:00
return
# send a newswire moderation unvote from the web interface
if authorized and '/newswireunvote=' in self.path and \
2019-11-03 15:27:29 +00:00
self.path.startswith('/users/'):
2020-10-06 20:17:34 +00:00
self._newswireUnvote(callingDomain, self.path,
cookie,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
2020-10-06 20:22:48 +00:00
self.server.debug,
self.server.newswire)
2020-10-06 20:17:34 +00:00
return
# send a follow request approval from the web interface
if authorized and '/followapprove=' in self.path and \
self.path.startswith('/users/'):
self._followApproveButton(callingDomain, self.path,
cookie,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
self.server.debug)
2019-08-07 11:57:14 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unannounce done',
'follow approve done')
2019-11-16 13:25:44 +00:00
2019-08-07 11:58:01 +00:00
# deny a follow request from the web interface
if authorized and '/followdeny=' in self.path and \
2019-11-03 15:27:29 +00:00
self.path.startswith('/users/'):
2020-08-31 22:29:40 +00:00
self._followDenyButton(callingDomain, self.path,
cookie,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
self.server.debug)
2019-08-07 11:57:14 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'follow approve done',
'follow deny done')
2019-11-16 13:25:44 +00:00
2019-08-01 09:05:09 +00:00
# like from the web interface icon
2019-11-14 17:58:46 +00:00
if htmlGET and '?like=' in self.path:
2020-09-01 09:03:50 +00:00
self._likeButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie,
self.server.debug)
2019-08-01 09:05:09 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'follow deny done',
'like shown done')
2019-11-16 13:25:44 +00:00
2019-08-01 09:05:09 +00:00
# undo a like from the web interface icon
2019-11-14 17:58:46 +00:00
if htmlGET and '?unlike=' in self.path:
2020-09-01 09:20:13 +00:00
self._undoLikeButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug)
2019-08-04 18:29:26 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'like shown done',
'unlike shown done')
2019-11-17 14:01:49 +00:00
# bookmark from the web interface icon
if htmlGET and '?bookmark=' in self.path:
2020-09-01 09:27:58 +00:00
self._bookmarkButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug)
2019-11-17 14:01:49 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unlike shown done',
'bookmark shown done')
2019-11-17 14:01:49 +00:00
# undo a bookmark from the web interface icon
if htmlGET and '?unbookmark=' in self.path:
self._undoBookmarkButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType, cookie,
self.server.debug)
2019-11-17 14:01:49 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'bookmark shown done',
'unbookmark shown done')
2019-11-16 13:25:44 +00:00
2020-09-01 09:42:44 +00:00
# delete button is pressed on a post
2019-08-18 20:11:26 +00:00
if htmlGET and '?delete=' in self.path:
2020-09-01 09:42:44 +00:00
self._deleteButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType, cookie,
self.server.debug)
2019-12-01 13:45:30 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unbookmark shown done',
'delete shown done')
# The mute button is pressed
2019-12-01 13:45:30 +00:00
if htmlGET and '?mute=' in self.path:
2020-09-01 09:50:34 +00:00
self._muteButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType, cookie,
self.server.debug)
2019-12-01 13:45:30 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'delete shown done',
'post muted done')
2019-12-01 13:45:30 +00:00
# unmute a post from the web interface icon
if htmlGET and '?unmute=' in self.path:
2020-09-01 09:56:10 +00:00
self._undoMuteButton(callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType, cookie,
self.server.debug)
2019-08-01 09:05:09 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'post muted done',
'unmute activated done')
2019-08-01 09:05:09 +00:00
# reply from the web interface icon
2020-04-02 21:35:06 +00:00
inReplyToUrl = None
# replyWithDM = False
replyToList = []
replyPageNumber = 1
shareDescription = None
# replytoActor = None
if htmlGET:
# public reply
if '?replyto=' in self.path:
2020-04-02 21:35:06 +00:00
inReplyToUrl = self.path.split('?replyto=')[1]
if '?' in inReplyToUrl:
2020-04-02 21:35:06 +00:00
mentionsList = inReplyToUrl.split('?')
for m in mentionsList:
if m.startswith('mention='):
2020-04-02 21:35:06 +00:00
replyHandle = m.replace('mention=', '')
2019-09-22 17:48:52 +00:00
if replyHandle not in replyToList:
replyToList.append(replyHandle)
2019-09-04 11:29:44 +00:00
if m.startswith('page='):
2020-04-02 21:35:06 +00:00
replyPageStr = m.replace('page=', '')
2019-09-04 11:29:44 +00:00
if replyPageStr.isdigit():
2020-04-02 21:35:06 +00:00
replyPageNumber = int(replyPageStr)
# if m.startswith('actor='):
# replytoActor = m.replace('actor=', '')
inReplyToUrl = mentionsList[0]
self.path = self.path.split('?replyto=')[0] + '/newpost'
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: replyto path ' + self.path)
# reply to followers
if '?replyfollowers=' in self.path:
2020-04-02 21:35:06 +00:00
inReplyToUrl = self.path.split('?replyfollowers=')[1]
if '?' in inReplyToUrl:
2020-04-02 21:35:06 +00:00
mentionsList = inReplyToUrl.split('?')
for m in mentionsList:
if m.startswith('mention='):
2020-04-02 21:35:06 +00:00
replyHandle = m.replace('mention=', '')
if m.replace('mention=', '') not in replyToList:
2019-09-22 17:48:52 +00:00
replyToList.append(replyHandle)
2019-09-04 11:29:44 +00:00
if m.startswith('page='):
2020-04-02 21:35:06 +00:00
replyPageStr = m.replace('page=', '')
2019-09-04 11:29:44 +00:00
if replyPageStr.isdigit():
2020-04-02 21:35:06 +00:00
replyPageNumber = int(replyPageStr)
# if m.startswith('actor='):
# replytoActor = m.replace('actor=', '')
inReplyToUrl = mentionsList[0]
self.path = self.path.split('?replyfollowers=')[0] + \
'/newfollowers'
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: replyfollowers path ' + self.path)
2019-08-02 09:52:12 +00:00
2020-04-02 21:35:06 +00:00
# replying as a direct message,
# for moderation posts or the dm timeline
if '?replydm=' in self.path:
2020-04-02 21:35:06 +00:00
inReplyToUrl = self.path.split('?replydm=')[1]
if '?' in inReplyToUrl:
2020-04-02 21:35:06 +00:00
mentionsList = inReplyToUrl.split('?')
for m in mentionsList:
if m.startswith('mention='):
2020-04-02 21:35:06 +00:00
replyHandle = m.replace('mention=', '')
if m.replace('mention=', '') not in replyToList:
replyToList.append(m.replace('mention=', ''))
2019-09-04 11:29:44 +00:00
if m.startswith('page='):
2020-04-02 21:35:06 +00:00
replyPageStr = m.replace('page=', '')
2019-09-04 11:29:44 +00:00
if replyPageStr.isdigit():
2020-04-02 21:35:06 +00:00
replyPageNumber = int(replyPageStr)
# if m.startswith('actor='):
# replytoActor = m.replace('actor=', '')
inReplyToUrl = mentionsList[0]
if inReplyToUrl.startswith('sharedesc:'):
2020-04-02 21:35:06 +00:00
shareDescription = \
inReplyToUrl.replace('sharedesc:', '')
2020-04-15 10:57:04 +00:00
shareDescription = \
2020-08-05 21:43:28 +00:00
urllib.parse.unquote_plus(shareDescription.strip())
2020-04-02 21:35:06 +00:00
self.path = self.path.split('?replydm=')[0]+'/newdm'
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: replydm path ' + self.path)
2020-03-01 11:05:20 +00:00
# Edit a blog post
2020-03-01 11:15:27 +00:00
if authorized and \
2020-03-01 12:43:20 +00:00
'/tlblogs' in self.path and \
2020-03-01 11:15:27 +00:00
'?editblogpost=' in self.path and \
2020-03-01 11:05:20 +00:00
'?actor=' in self.path:
2020-04-02 21:35:06 +00:00
messageId = self.path.split('?editblogpost=')[1]
2020-03-01 11:05:20 +00:00
if '?' in messageId:
2020-04-02 21:35:06 +00:00
messageId = messageId.split('?')[0]
actor = self.path.split('?actor=')[1]
2020-03-01 11:05:20 +00:00
if '?' in actor:
2020-04-02 21:35:06 +00:00
actor = actor.split('?')[0]
nickname = getNicknameFromActor(self.path)
if nickname == actor:
postUrl = \
self.server.httpPrefix + '://' + \
self.server.domainFull + '/users/' + nickname + \
'/statuses/' + messageId
msg = htmlEditBlog(self.server.mediaInstance,
self.server.translate,
self.server.baseDir,
self.server.httpPrefix,
self.path,
replyPageNumber,
nickname, self.server.domain,
postUrl)
2020-03-01 12:33:20 +00:00
if msg:
2020-06-14 19:06:10 +00:00
msg = msg.encode('utf-8')
2020-04-02 21:35:06 +00:00
self._set_headers('text/html', len(msg),
cookie, callingDomain)
2020-03-01 12:33:20 +00:00
self._write(msg)
2020-04-02 21:35:06 +00:00
self.server.GETbusy = False
2020-03-01 12:33:20 +00:00
return
2020-03-01 11:05:20 +00:00
2020-08-26 18:06:02 +00:00
# Edit an event
if authorized and \
'/tlevents' in self.path and \
'?editeventpost=' in self.path and \
'?actor=' in self.path:
if self._editEvent(callingDomain, self.path,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.baseDir,
self.server.translate,
self.server.mediaInstance,
cookie):
return
2020-08-26 18:06:02 +00:00
# edit profile in web interface
if self._editProfile(callingDomain, self.path,
self.server.translate,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
cookie):
return
2020-10-01 19:42:10 +00:00
# edit links from the left column of the timeline in web interface
if self._editLinks(callingDomain, self.path,
self.server.translate,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
cookie):
return
2020-10-04 09:22:27 +00:00
# edit newswire from the right column of the timeline
if self._editNewswire(callingDomain, self.path,
self.server.translate,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
cookie):
return
2020-10-10 19:14:36 +00:00
# edit news post
if self._editNewsPost(callingDomain, self.path,
self.server.translate,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
2020-10-10 21:27:58 +00:00
self.server.domainFull,
2020-10-10 19:14:36 +00:00
cookie):
return
2020-09-02 17:09:51 +00:00
if self._showNewPost(callingDomain, self.path,
self.server.mediaInstance,
self.server.translate,
self.server.baseDir,
self.server.httpPrefix,
inReplyToUrl, replyToList,
shareDescription, replyPageNumber,
self.server.domain,
self.server.domainFull,
GETstartTime, GETtimings,
cookie):
return
2019-07-24 22:38:42 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'unmute activated done',
'new post done')
2019-11-16 13:25:44 +00:00
2019-07-06 21:33:46 +00:00
# get an individual post from the path /@nickname/statusnumber
if self._showIndividualAtPost(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-09-28 11:29:42 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'new post done',
'individual post done')
2019-11-16 13:25:44 +00:00
2019-07-13 19:28:14 +00:00
# get replies to a post /users/nickname/statuses/number/replies
2019-07-13 20:23:42 +00:00
if self.path.endswith('/replies') or '/replies?page=' in self.path:
2020-09-01 13:12:00 +00:00
if self._showRepliesToPost(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType, cookie,
self.server.debug):
return
2019-07-13 19:28:14 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'individual post done',
'post replies done')
2019-11-16 13:25:44 +00:00
2019-07-22 17:21:45 +00:00
if self.path.endswith('/roles') and '/users/' in self.path:
if self._showRoles(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-07-22 20:01:46 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'post replies done',
'show roles done')
2019-08-27 17:33:11 +00:00
# show skills on the profile page
2019-07-22 20:01:46 +00:00
if self.path.endswith('/skills') and '/users/' in self.path:
if self._showSkills(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-07-22 20:01:46 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'post roles done',
'show skills done')
2019-11-16 13:25:44 +00:00
2020-04-02 21:35:06 +00:00
# get an individual post from the path
# /users/nickname/statuses/number
2019-07-06 21:24:47 +00:00
if '/statuses/' in self.path and '/users/' in self.path:
2020-09-01 12:40:40 +00:00
if self._showIndividualPost(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-08-12 13:22:17 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show skills done',
'show status done')
2019-11-16 13:25:44 +00:00
2020-09-01 13:35:05 +00:00
# get the inbox timeline for a given person
2019-08-12 13:22:17 +00:00
if self.path.endswith('/inbox') or '/inbox?page=' in self.path:
2020-09-01 13:05:48 +00:00
if self._showInbox(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
2020-09-01 19:34:52 +00:00
cookie, self.server.debug,
self.server.recentPostsCache,
self.server.session,
self.server.defaultTimeline,
self.server.maxRecentPosts,
self.server.translate,
self.server.cachedWebfingers,
self.server.personCache,
self.server.allowDeletion,
self.server.projectVersion,
self.server.YTReplacementDomain):
2019-08-05 16:05:08 +00:00
return
2019-08-25 16:09:56 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show status done',
'show inbox done')
2019-11-16 13:25:44 +00:00
2020-09-01 13:35:05 +00:00
# get the direct messages timeline for a given person
2019-08-25 16:09:56 +00:00
if self.path.endswith('/dm') or '/dm?page=' in self.path:
2020-09-01 13:05:48 +00:00
if self._showDMs(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2019-08-25 16:09:56 +00:00
return
2019-09-23 20:09:11 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show inbox done',
'show dms done')
2019-11-16 13:25:44 +00:00
2020-09-01 13:35:05 +00:00
# get the replies timeline for a given person
2019-09-23 20:43:18 +00:00
if self.path.endswith('/tlreplies') or '/tlreplies?page=' in self.path:
if self._showReplies(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2019-09-28 11:29:42 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show dms done',
'show replies 2 done')
2019-11-16 13:25:44 +00:00
2020-09-01 13:35:05 +00:00
# get the media timeline for a given person
2019-09-28 11:29:42 +00:00
if self.path.endswith('/tlmedia') or '/tlmedia?page=' in self.path:
2020-09-01 13:26:45 +00:00
if self._showMediaTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2019-09-23 20:09:11 +00:00
return
2019-08-25 16:09:56 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show replies 2 done',
'show media 2 done')
2020-02-24 14:39:25 +00:00
# get the blogs for a given person
if self.path.endswith('/tlblogs') or '/tlblogs?page=' in self.path:
2020-09-01 13:35:05 +00:00
if self._showBlogsTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2020-02-24 14:39:25 +00:00
return
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show media 2 done',
'show blogs 2 done')
2019-11-16 13:25:44 +00:00
2020-10-07 09:39:18 +00:00
# get the news for a given person
if self.path.endswith('/tlnews') or '/tlnews?page=' in self.path:
if self._showNewsTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show blogs 2 done',
'show news 2 done')
2019-11-02 14:31:39 +00:00
# get the shared items timeline for a given person
if self.path.endswith('/tlshares') or '/tlshares?page=' in self.path:
2020-09-01 14:06:15 +00:00
if self._showSharesTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-11-17 14:01:49 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show blogs 2 done',
'show shares 2 done')
# get the bookmarks timeline for a given person
2020-04-02 21:35:06 +00:00
if self.path.endswith('/tlbookmarks') or \
2020-05-21 19:58:21 +00:00
'/tlbookmarks?page=' in self.path or \
self.path.endswith('/bookmarks') or \
'/bookmarks?page=' in self.path:
if self._showBookmarksTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-11-17 14:01:49 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show shares 2 done',
'show bookmarks 2 done')
2020-08-23 11:13:35 +00:00
# get the events for a given person
if self.path.endswith('/tlevents') or \
'/tlevents?page=' in self.path or \
self.path.endswith('/events') or \
'/events?page=' in self.path:
2020-09-01 14:31:39 +00:00
if self._showEventsTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2020-08-23 11:13:35 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show bookmarks 2 done',
'show events done')
2019-11-16 13:25:44 +00:00
2020-09-01 14:31:39 +00:00
# outbox timeline
if self._showOutboxTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2019-06-29 20:21:37 +00:00
return
2019-07-23 12:33:09 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show events done',
'show outbox done')
2019-11-16 13:25:44 +00:00
2019-08-12 13:22:17 +00:00
# get the moderation feed for a moderator
2019-11-03 15:27:29 +00:00
if self.path.endswith('/moderation') or \
'/moderation?page=' in self.path:
2020-09-01 15:13:50 +00:00
if self._showModTimeline(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2020-03-22 21:16:02 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show outbox done',
'show moderation done')
2019-11-16 13:25:44 +00:00
2020-09-01 16:21:41 +00:00
if self._showSharesFeed(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-07-23 12:33:09 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show moderation done',
'show profile 2 done')
2019-11-16 13:25:44 +00:00
2020-09-01 16:28:22 +00:00
if self._showFollowingFeed(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
return
2019-11-16 13:25:44 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile 2 done',
'show profile 3 done')
2019-11-16 13:25:44 +00:00
2020-09-01 16:35:10 +00:00
if self._showFollowersFeed(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2019-07-04 14:36:29 +00:00
return
2019-11-16 13:25:44 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile 3 done',
'show profile 4 done')
2019-11-16 13:25:44 +00:00
2019-06-28 18:55:29 +00:00
# look up a person
2020-09-01 16:47:27 +00:00
if self._showPersonProfile(authorized,
callingDomain, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
GETstartTime, GETtimings,
self.server.proxyType,
cookie, self.server.debug):
2019-06-28 18:55:29 +00:00
return
2019-11-16 13:25:44 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile 4 done',
'show profile posts done')
2019-11-16 13:25:44 +00:00
2019-06-28 18:55:29 +00:00
# check that a json file was requested
if not self.path.endswith('.json'):
2019-07-03 16:14:45 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: GET Not json: ' + self.path +
' ' + self.server.baseDir)
2019-06-28 18:55:29 +00:00
self._404()
2020-04-02 21:35:06 +00:00
self.server.GETbusy = False
2019-06-28 18:55:29 +00:00
return
2019-09-25 09:22:10 +00:00
if not self._fetchAuthenticated():
if self.server.debug:
print('WARN: Unauthenticated GET')
self._404()
2020-04-16 11:48:00 +00:00
self.server.GETbusy = False
2019-11-15 18:59:15 +00:00
return
2020-03-22 21:16:02 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'show profile posts done',
'authenticated fetch')
2019-11-16 13:25:44 +00:00
2019-06-28 18:55:29 +00:00
# check that the file exists
2020-04-02 21:35:06 +00:00
filename = self.server.baseDir + self.path
2019-06-28 18:55:29 +00:00
if os.path.isfile(filename):
2019-07-03 19:15:42 +00:00
with open(filename, 'r', encoding='utf-8') as File:
2020-04-02 21:35:06 +00:00
content = File.read()
contentJson = json.loads(content)
msg = json.dumps(contentJson,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
2019-10-22 12:35:51 +00:00
self._write(msg)
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'authenticated fetch',
'arbitrary json')
2019-06-28 18:55:29 +00:00
else:
2019-07-03 16:14:45 +00:00
if self.server.debug:
print('DEBUG: GET Unknown file')
2019-06-28 18:55:29 +00:00
self._404()
2020-04-02 21:35:06 +00:00
self.server.GETbusy = False
2019-06-28 18:55:29 +00:00
2020-08-28 20:07:29 +00:00
self._benchmarkGETtimings(GETstartTime, GETtimings,
'arbitrary json', 'end benchmarks')
2019-11-16 13:25:44 +00:00
2019-06-28 18:55:29 +00:00
def do_HEAD(self):
2020-04-02 21:35:06 +00:00
callingDomain = self.server.domainFull
2020-03-28 17:24:40 +00:00
if self.headers.get('Host'):
2020-04-02 21:35:06 +00:00
callingDomain = self.headers['Host']
2020-03-28 17:24:40 +00:00
if self.server.onionDomain:
if callingDomain != self.server.domain and \
callingDomain != self.server.domainFull and \
callingDomain != self.server.onionDomain:
2020-04-02 21:35:06 +00:00
print('HEAD domain blocked: ' + callingDomain)
2020-03-28 17:24:40 +00:00
self._400()
return
else:
if callingDomain != self.server.domain and \
callingDomain != self.server.domainFull:
2020-04-02 21:35:06 +00:00
print('HEAD domain blocked: ' + callingDomain)
2020-03-28 17:24:40 +00:00
self._400()
return
2020-04-02 21:35:06 +00:00
checkPath = self.path
etag = None
fileLength = -1
2019-12-04 13:54:59 +00:00
if '/media/' in self.path:
2020-08-31 17:55:13 +00:00
if self._pathIsImage(self.path) or \
self._pathIsVideo(self.path) or \
self._pathIsAudio(self.path):
2020-04-02 21:35:06 +00:00
mediaStr = self.path.split('/media/')[1]
mediaFilename = \
self.server.baseDir + '/media/' + mediaStr
2019-12-04 13:54:59 +00:00
if os.path.isfile(mediaFilename):
2020-04-02 21:35:06 +00:00
checkPath = mediaFilename
fileLength = os.path.getsize(mediaFilename)
mediaTagFilename = mediaFilename + '.etag'
if os.path.isfile(mediaTagFilename):
2019-12-04 13:54:59 +00:00
try:
2020-04-02 21:35:06 +00:00
with open(mediaTagFilename, 'r') as etagFile:
etag = etagFile.read()
except BaseException:
2019-12-04 13:54:59 +00:00
pass
else:
with open(mediaFilename, 'rb') as avFile:
2020-04-02 21:35:06 +00:00
mediaBinary = avFile.read()
2020-07-08 15:17:00 +00:00
etag = sha1(mediaBinary).hexdigest() # nosec
2019-12-04 13:54:59 +00:00
try:
2020-07-12 20:04:58 +00:00
with open(mediaTagFilename, 'w+') as etagFile:
2019-12-04 13:54:59 +00:00
etagFile.write(etag)
2020-04-02 21:35:06 +00:00
except BaseException:
2019-12-04 13:54:59 +00:00
pass
2020-04-02 21:35:06 +00:00
mediaFileType = 'application/json'
2019-12-04 13:54:59 +00:00
if checkPath.endswith('.png'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'image/png'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.jpg'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'image/jpeg'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.gif'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'image/gif'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.webp'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'image/webp'
elif checkPath.endswith('.avif'):
mediaFileType = 'image/avif'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.mp4'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'video/mp4'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.ogv'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'video/ogv'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.mp3'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'audio/mpeg'
2019-12-04 13:54:59 +00:00
elif checkPath.endswith('.ogg'):
2020-04-02 21:35:06 +00:00
mediaFileType = 'audio/ogg'
2019-12-04 13:54:59 +00:00
2020-04-02 21:35:06 +00:00
self._set_headers_head(mediaFileType, fileLength,
etag, callingDomain)
2019-07-01 21:01:43 +00:00
2020-04-16 09:01:33 +00:00
def _receiveNewPostProcess(self, postType: str, path: str, headers: {},
length: int, postBytes, boundary: str,
2020-09-28 14:10:54 +00:00
callingDomain: str, cookie: str,
authorized: bool) -> int:
2019-11-01 21:23:59 +00:00
# Note: this needs to happen synchronously
2020-03-22 20:36:19 +00:00
# 0=this is not a new post
# 1=new post success
# -1=new post failed
# 2=new post canceled
2019-11-10 12:00:05 +00:00
if self.server.debug:
print('DEBUG: receiving POST')
2019-09-29 14:16:09 +00:00
if ' boundary=' in headers['Content-Type']:
2019-11-10 12:00:05 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: receiving POST headers ' +
headers['Content-Type'])
nickname = None
nicknameStr = path.split('/users/')[1]
2019-09-29 13:30:24 +00:00
if '/' in nicknameStr:
2020-04-02 21:35:06 +00:00
nickname = nicknameStr.split('/')[0]
2019-09-29 13:30:24 +00:00
else:
return -1
2020-04-02 21:35:06 +00:00
length = int(headers['Content-Length'])
if length > self.server.maxPostLength:
2019-09-29 13:30:24 +00:00
print('POST size too large')
return -1
2020-04-02 21:35:06 +00:00
boundary = headers['Content-Type'].split('boundary=')[1]
2019-09-29 13:30:24 +00:00
if ';' in boundary:
2020-04-02 21:35:06 +00:00
boundary = boundary.split(';')[0]
2019-09-29 13:30:24 +00:00
# Note: we don't use cgi here because it's due to be deprecated
# in Python 3.8/3.10
# Instead we use the multipart mime parser from the email module
2019-11-10 11:54:45 +00:00
if self.server.debug:
print('DEBUG: extracting media from POST')
2020-04-02 21:35:06 +00:00
mediaBytes, postBytes = \
extractMediaInFormPOST(postBytes, boundary, 'attachpic')
2019-11-10 11:54:45 +00:00
if self.server.debug:
if mediaBytes:
2020-04-02 21:35:06 +00:00
print('DEBUG: media was found. ' +
str(len(mediaBytes)) + ' bytes')
2019-11-10 11:54:45 +00:00
else:
print('DEBUG: no media was found in POST')
2019-11-10 13:31:55 +00:00
# Note: a .temp extension is used here so that at no time is
# an image with metadata publicly exposed, even for a few mS
2020-04-02 21:35:06 +00:00
filenameBase = \
self.server.baseDir + '/accounts/' + \
nickname + '@' + self.server.domain + '/upload.temp'
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType = \
saveMediaInFormPOST(mediaBytes, self.server.debug,
filenameBase)
2019-11-10 11:54:45 +00:00
if self.server.debug:
if filename:
2020-04-02 21:35:06 +00:00
print('DEBUG: POST media filename is ' + filename)
2019-11-10 11:54:45 +00:00
else:
print('DEBUG: no media filename in POST')
2019-11-10 13:31:55 +00:00
if filename:
if filename.endswith('.png') or \
filename.endswith('.jpg') or \
2019-12-04 18:52:27 +00:00
filename.endswith('.webp') or \
filename.endswith('.avif') or \
2019-11-10 13:31:55 +00:00
filename.endswith('.gif'):
2020-04-02 21:35:06 +00:00
postImageFilename = filename.replace('.temp', '')
2020-11-05 22:30:03 +00:00
print('Removing metadata from ' + postImageFilename)
2020-04-02 21:35:06 +00:00
removeMetaData(filename, postImageFilename)
2019-11-10 13:31:55 +00:00
if os.path.isfile(postImageFilename):
2020-04-02 21:35:06 +00:00
print('POST media saved to ' + postImageFilename)
2019-11-10 13:31:55 +00:00
else:
2020-04-02 21:35:06 +00:00
print('ERROR: POST media could not be saved to ' +
2020-03-29 10:31:59 +00:00
postImageFilename)
2019-11-10 13:31:55 +00:00
else:
if os.path.isfile(filename):
2020-04-02 21:35:06 +00:00
os.rename(filename, filename.replace('.temp', ''))
2019-11-10 13:31:55 +00:00
2020-04-02 21:35:06 +00:00
fields = \
extractTextFieldsInPOST(postBytes, boundary,
self.server.debug)
2019-11-10 11:54:45 +00:00
if self.server.debug:
if fields:
2020-04-02 21:35:06 +00:00
print('DEBUG: text field extracted from POST ' +
str(fields))
2019-11-10 11:54:45 +00:00
else:
print('WARN: no text fields could be extracted from POST')
2019-11-10 11:37:24 +00:00
2020-11-05 22:51:01 +00:00
# was the citations button pressed on the newblog screen?
citationsButtonPress = False
if postType == 'newblog' and fields.get('submitCitations'):
2020-11-06 10:40:07 +00:00
if fields['submitCitations'] == \
self.server.translate['Citations']:
2020-11-05 22:51:01 +00:00
citationsButtonPress = True
2020-11-05 23:18:48 +00:00
if not citationsButtonPress:
# process the received text fields from the POST
if not fields.get('message') and \
not fields.get('imageDescription'):
2020-11-05 22:51:01 +00:00
return -1
2020-11-05 23:18:48 +00:00
if fields.get('submitPost'):
2020-11-06 10:40:07 +00:00
if fields['submitPost'] != \
self.server.translate['Submit']:
2020-11-05 22:51:01 +00:00
return -1
2020-11-05 23:18:48 +00:00
else:
return 2
2019-07-27 20:30:58 +00:00
2019-09-29 13:30:24 +00:00
if not fields.get('imageDescription'):
2020-04-02 21:35:06 +00:00
fields['imageDescription'] = None
2019-09-29 13:30:24 +00:00
if not fields.get('subject'):
2020-04-02 21:35:06 +00:00
fields['subject'] = None
2019-09-29 13:30:24 +00:00
if not fields.get('replyTo'):
2020-04-02 21:35:06 +00:00
fields['replyTo'] = None
2020-08-21 19:51:35 +00:00
if not fields.get('schedulePost'):
2020-04-02 21:35:06 +00:00
fields['schedulePost'] = False
2020-01-12 20:35:39 +00:00
else:
2020-04-02 21:35:06 +00:00
fields['schedulePost'] = True
print('DEBUG: shedulePost ' + str(fields['schedulePost']))
2020-08-21 19:51:35 +00:00
2019-10-10 13:12:13 +00:00
if not fields.get('eventDate'):
2020-04-02 21:35:06 +00:00
fields['eventDate'] = None
2019-10-10 13:12:13 +00:00
if not fields.get('eventTime'):
2020-04-02 21:35:06 +00:00
fields['eventTime'] = None
2019-10-10 13:12:13 +00:00
if not fields.get('location'):
2020-04-02 21:35:06 +00:00
fields['location'] = None
2019-07-27 22:48:34 +00:00
2020-11-05 23:05:18 +00:00
if not citationsButtonPress:
# Store a file which contains the time in seconds
# since epoch when an attempt to post something was made.
# This is then used for active monthly users counts
lastUsedFilename = \
self.server.baseDir + '/accounts/' + \
nickname + '@' + self.server.domain + '/.lastUsed'
try:
lastUsedFile = open(lastUsedFilename, 'w+')
if lastUsedFile:
lastUsedFile.write(str(int(time.time())))
lastUsedFile.close()
except BaseException:
pass
2019-11-13 15:32:46 +00:00
mentionsStr = ''
if fields.get('mentions'):
mentionsStr = fields['mentions'].strip() + ' '
2020-08-21 20:08:17 +00:00
if not fields.get('commentsEnabled'):
commentsEnabled = False
else:
commentsEnabled = True
2020-08-22 13:40:48 +00:00
if not fields.get('privateEvent'):
privateEvent = False
else:
privateEvent = True
2020-04-02 21:35:06 +00:00
if postType == 'newpost':
messageJson = \
createPublicPost(self.server.baseDir,
nickname,
self.server.domain,
self.server.port,
self.server.httpPrefix,
mentionsStr + fields['message'],
2020-08-21 16:10:47 +00:00
False, False, False, commentsEnabled,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['replyTo'], fields['replyTo'],
fields['subject'], fields['schedulePost'],
fields['eventDate'], fields['eventTime'],
2019-11-03 15:27:29 +00:00
fields['location'])
2019-09-29 13:30:24 +00:00
if messageJson:
2020-02-24 13:32:19 +00:00
if fields['schedulePost']:
return 1
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__, nickname):
populateReplies(self.server.baseDir,
self.server.httpPrefix,
self.server.domainFull,
messageJson,
self.server.maxReplies,
2020-02-24 13:32:19 +00:00
self.server.debug)
return 1
else:
2020-03-22 21:16:02 +00:00
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newblog':
2020-11-05 22:30:03 +00:00
# citations button on newblog screen
2020-11-05 22:51:01 +00:00
if citationsButtonPress:
messageJson = \
htmlCitations(self.server.baseDir,
nickname,
self.server.domain,
self.server.httpPrefix,
self.server.defaultTimeline,
self.server.translate,
self.server.newswire,
self.server.cssCache,
fields['subject'],
fields['message'],
filename, attachmentMediaType,
fields['imageDescription'])
if messageJson:
messageJson = messageJson.encode('utf-8')
self._set_headers('text/html',
len(messageJson),
cookie, callingDomain)
self._write(messageJson)
return 1
else:
return -1
2020-11-05 22:30:03 +00:00
# submit button on newblog screen
2020-04-02 21:35:06 +00:00
messageJson = \
createBlogPost(self.server.baseDir, nickname,
self.server.domain, self.server.port,
self.server.httpPrefix,
fields['message'],
2020-08-21 16:10:47 +00:00
False, False, False, commentsEnabled,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['replyTo'], fields['replyTo'],
2020-11-05 22:30:03 +00:00
fields['subject'],
fields['schedulePost'],
fields['eventDate'],
fields['eventTime'],
2020-02-24 13:32:19 +00:00
fields['location'])
if messageJson:
2020-01-12 13:02:39 +00:00
if fields['schedulePost']:
return 1
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__, nickname):
populateReplies(self.server.baseDir,
self.server.httpPrefix,
self.server.domainFull,
messageJson,
self.server.maxReplies,
2019-09-29 13:30:24 +00:00
self.server.debug)
return 1
else:
2020-03-22 21:16:02 +00:00
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'editblogpost':
2020-03-01 19:44:40 +00:00
print('Edited blog post received')
2020-04-02 21:35:06 +00:00
postFilename = \
locatePost(self.server.baseDir,
nickname, self.server.domain,
2020-03-01 19:44:40 +00:00
fields['postUrl'])
if os.path.isfile(postFilename):
2020-04-02 21:35:06 +00:00
postJsonObject = loadJson(postFilename)
2020-03-01 19:44:40 +00:00
if postJsonObject:
2020-04-02 21:35:06 +00:00
cachedFilename = \
self.server.baseDir + '/accounts/' + \
nickname + '@' + self.server.domain + \
'/postcache/' + \
fields['postUrl'].replace('/', '#') + '.html'
2020-03-01 19:44:40 +00:00
if os.path.isfile(cachedFilename):
print('Edited blog post, removing cached html')
try:
os.remove(cachedFilename)
2020-04-02 21:35:06 +00:00
except BaseException:
2020-03-01 19:44:40 +00:00
pass
2020-03-01 20:15:07 +00:00
# remove from memory cache
2020-04-02 21:35:06 +00:00
removePostFromCache(postJsonObject,
2020-03-01 20:15:07 +00:00
self.server.recentPostsCache)
# change the blog post title
2020-04-02 21:35:06 +00:00
postJsonObject['object']['summary'] = fields['subject']
2020-03-01 20:15:07 +00:00
# format message
2020-04-02 21:35:06 +00:00
tags = []
hashtagsDict = {}
mentionedRecipients = []
fields['message'] = \
addHtmlTags(self.server.baseDir,
self.server.httpPrefix,
nickname, self.server.domain,
fields['message'],
mentionedRecipients,
hashtagsDict, True)
2020-03-01 20:15:07 +00:00
# replace emoji with unicode
2020-04-02 21:35:06 +00:00
tags = []
for tagName, tag in hashtagsDict.items():
2020-03-01 20:15:07 +00:00
tags.append(tag)
# get list of tags
2020-04-02 21:35:06 +00:00
fields['message'] = \
replaceEmojiFromTags(fields['message'],
tags, 'content')
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
postJsonObject['object']['content'] = fields['message']
2020-03-01 20:57:53 +00:00
2020-04-02 21:35:06 +00:00
imgDescription = ''
2020-03-01 20:57:53 +00:00
if fields.get('imageDescription'):
2020-04-02 21:35:06 +00:00
imgDescription = fields['imageDescription']
2020-03-01 20:57:53 +00:00
if filename:
2020-04-02 21:35:06 +00:00
postJsonObject['object'] = \
attachMedia(self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.port,
postJsonObject['object'],
filename,
attachmentMediaType,
imgDescription,
2020-03-01 21:14:57 +00:00
self.server.useBlurHash)
2020-03-01 20:57:53 +00:00
replaceYouTube(postJsonObject,
self.server.YTReplacementDomain)
2020-04-02 21:35:06 +00:00
saveJson(postJsonObject, postFilename)
print('Edited blog post, resaved ' + postFilename)
2020-03-01 19:44:40 +00:00
return 1
else:
2020-04-02 21:35:06 +00:00
print('Edited blog post, unable to load json for ' +
2020-03-29 10:31:59 +00:00
postFilename)
2020-03-01 19:44:40 +00:00
else:
2020-04-02 21:35:06 +00:00
print('Edited blog post not found ' +
str(fields['postUrl']))
2020-03-22 21:16:02 +00:00
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newunlisted':
messageJson = \
createUnlistedPost(self.server.baseDir,
nickname,
self.server.domain, self.server.port,
self.server.httpPrefix,
mentionsStr + fields['message'],
2020-08-21 16:10:47 +00:00
False, False, False, commentsEnabled,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['replyTo'],
fields['replyTo'],
fields['subject'],
fields['schedulePost'],
fields['eventDate'],
fields['eventTime'],
2019-11-03 15:27:29 +00:00
fields['location'])
2019-09-29 13:30:24 +00:00
if messageJson:
2020-01-12 13:02:39 +00:00
if fields['schedulePost']:
return 1
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__, nickname):
populateReplies(self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
messageJson,
self.server.maxReplies,
2019-09-29 13:30:24 +00:00
self.server.debug)
return 1
else:
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newfollowers':
messageJson = \
createFollowersOnlyPost(self.server.baseDir,
nickname,
self.server.domain,
self.server.port,
self.server.httpPrefix,
mentionsStr + fields['message'],
2020-04-02 21:35:06 +00:00
True, False, False,
2020-08-21 16:10:47 +00:00
commentsEnabled,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['replyTo'],
fields['replyTo'],
fields['subject'],
fields['schedulePost'],
fields['eventDate'],
fields['eventTime'],
2019-11-03 15:27:29 +00:00
fields['location'])
2019-09-29 13:30:24 +00:00
if messageJson:
2020-01-12 13:02:39 +00:00
if fields['schedulePost']:
return 1
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__, nickname):
populateReplies(self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
messageJson,
self.server.maxReplies,
2019-09-29 13:30:24 +00:00
self.server.debug)
return 1
else:
return -1
2020-08-21 11:08:31 +00:00
elif postType == 'newevent':
# A Mobilizon-type event is posted
# if there is no image dscription then make it the same
# as the event title
if not fields.get('imageDescription'):
fields['imageDescription'] = fields['subject']
# Events are public by default, with opt-in
# followers only status
if not fields.get('followersOnlyEvent'):
fields['followersOnlyEvent'] = False
2020-08-21 21:42:49 +00:00
if not fields.get('anonymousParticipationEnabled'):
anonymousParticipationEnabled = False
else:
anonymousParticipationEnabled = True
2020-08-22 18:02:43 +00:00
maximumAttendeeCapacity = 999999
if fields.get('maximumAttendeeCapacity'):
maximumAttendeeCapacity = \
2020-08-22 21:04:50 +00:00
int(fields['maximumAttendeeCapacity'])
2020-08-21 21:42:49 +00:00
2020-08-21 11:08:31 +00:00
messageJson = \
createEventPost(self.server.baseDir,
nickname,
self.server.domain,
self.server.port,
self.server.httpPrefix,
mentionsStr + fields['message'],
2020-08-22 13:40:48 +00:00
privateEvent,
2020-08-21 16:10:47 +00:00
False, False, commentsEnabled,
2020-08-21 11:08:31 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['subject'],
fields['schedulePost'],
fields['eventDate'],
fields['eventTime'],
2020-08-21 21:13:50 +00:00
fields['location'],
fields['category'],
fields['joinMode'],
fields['endDate'],
fields['endTime'],
2020-08-22 18:02:43 +00:00
maximumAttendeeCapacity,
2020-08-21 21:13:50 +00:00
fields['repliesModerationOption'],
2020-08-21 21:42:49 +00:00
anonymousParticipationEnabled,
2020-08-23 17:50:49 +00:00
fields['eventStatus'],
fields['ticketUrl'])
2020-08-21 11:08:31 +00:00
if messageJson:
if fields['schedulePost']:
return 1
if self._postToOutbox(messageJson, __version__, nickname):
return 1
else:
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newdm':
messageJson = None
2020-02-21 13:03:35 +00:00
print('A DM was posted')
if '@' in mentionsStr:
2020-04-02 21:35:06 +00:00
messageJson = \
createDirectMessagePost(self.server.baseDir,
nickname,
self.server.domain,
self.server.port,
self.server.httpPrefix,
mentionsStr +
2020-04-02 21:35:06 +00:00
fields['message'],
True, False, False,
2020-08-21 16:10:47 +00:00
commentsEnabled,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['replyTo'],
fields['replyTo'],
fields['subject'],
True, fields['schedulePost'],
fields['eventDate'],
fields['eventTime'],
2020-03-22 21:16:02 +00:00
fields['location'])
2019-09-29 13:30:24 +00:00
if messageJson:
2020-01-12 13:02:39 +00:00
if fields['schedulePost']:
return 1
2020-09-28 10:07:04 +00:00
print('Sending new DM to ' +
2020-04-02 21:35:06 +00:00
str(messageJson['object']['to']))
if self._postToOutbox(messageJson, __version__, nickname):
populateReplies(self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
messageJson,
self.server.maxReplies,
2019-09-29 13:30:24 +00:00
self.server.debug)
return 1
else:
return -1
elif postType == 'newreminder':
messageJson = None
handle = nickname + '@' + self.server.domainFull
print('A reminder was posted for ' + handle)
if '@' + handle not in mentionsStr:
mentionsStr = '@' + handle + ' ' + mentionsStr
messageJson = \
createDirectMessagePost(self.server.baseDir,
nickname,
self.server.domain,
self.server.port,
self.server.httpPrefix,
mentionsStr + fields['message'],
2020-08-21 16:10:47 +00:00
True, False, False, False,
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
None, None,
fields['subject'],
True, fields['schedulePost'],
fields['eventDate'],
fields['eventTime'],
fields['location'])
if messageJson:
if fields['schedulePost']:
return 1
print('DEBUG: new reminder to ' +
str(messageJson['object']['to']))
if self._postToOutbox(messageJson, __version__, nickname):
return 1
else:
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newreport':
2019-09-29 13:30:24 +00:00
if attachmentMediaType:
2020-04-02 21:35:06 +00:00
if attachmentMediaType != 'image':
2019-09-29 13:30:24 +00:00
return -1
# So as to be sure that this only goes to moderators
# and not accounts being reported we disable any
# included fediverse addresses by replacing '@' with '-at-'
2020-04-02 21:35:06 +00:00
fields['message'] = fields['message'].replace('@', '-at-')
messageJson = \
createReportPost(self.server.baseDir,
nickname,
self.server.domain, self.server.port,
self.server.httpPrefix,
mentionsStr + fields['message'],
2020-08-21 16:10:47 +00:00
True, False, False, True,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
self.server.debug, fields['subject'])
2019-09-29 13:30:24 +00:00
if messageJson:
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__, nickname):
2019-09-29 13:30:24 +00:00
return 1
else:
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newquestion':
2019-11-25 22:34:26 +00:00
if not fields.get('duration'):
return -1
if not fields.get('message'):
return -1
2020-04-02 21:35:06 +00:00
# questionStr = fields['message']
qOptions = []
2019-11-26 12:17:52 +00:00
for questionCtr in range(8):
2020-04-02 21:35:06 +00:00
if fields.get('questionOption' + str(questionCtr)):
qOptions.append(fields['questionOption' +
str(questionCtr)])
2019-11-25 22:34:26 +00:00
if not qOptions:
return -1
2020-04-02 21:35:06 +00:00
messageJson = \
createQuestionPost(self.server.baseDir,
nickname,
self.server.domain,
self.server.port,
self.server.httpPrefix,
fields['message'], qOptions,
False, False, False,
2020-08-21 16:10:47 +00:00
commentsEnabled,
2020-04-02 21:35:06 +00:00
filename, attachmentMediaType,
fields['imageDescription'],
self.server.useBlurHash,
fields['subject'],
int(fields['duration']))
2019-11-25 22:34:26 +00:00
if messageJson:
if self.server.debug:
print('DEBUG: new Question')
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__, nickname):
2019-11-25 22:34:26 +00:00
return 1
return -1
2020-04-02 21:35:06 +00:00
elif postType == 'newshare':
2019-09-29 13:30:24 +00:00
if not fields.get('itemType'):
return -1
if not fields.get('category'):
return -1
if not fields.get('location'):
return -1
if not fields.get('duration'):
return -1
if attachmentMediaType:
2020-04-02 21:35:06 +00:00
if attachmentMediaType != 'image':
2019-09-29 13:30:24 +00:00
return -1
2020-04-02 21:35:06 +00:00
durationStr = fields['duration']
2019-11-02 10:46:56 +00:00
if durationStr:
if ' ' not in durationStr:
2020-04-02 21:35:06 +00:00
durationStr = durationStr + ' days'
addShare(self.server.baseDir,
self.server.httpPrefix,
nickname,
self.server.domain, self.server.port,
fields['subject'],
fields['message'],
filename,
fields['itemType'],
fields['category'],
fields['location'],
2019-11-02 10:46:56 +00:00
durationStr,
2019-09-29 13:30:24 +00:00
self.server.debug)
if filename:
if os.path.isfile(filename):
os.remove(filename)
2020-04-02 21:35:06 +00:00
self.postToNickname = nickname
2019-09-29 13:30:24 +00:00
return 1
return -1
def _receiveNewPost(self, postType: str, path: str,
2020-09-28 14:10:54 +00:00
callingDomain: str, cookie: str,
authorized: bool) -> int:
2019-09-29 13:30:24 +00:00
"""A new post has been created
This creates a thread to send the new post
"""
2020-04-02 21:35:06 +00:00
pageNumber = 1
2019-11-12 13:53:14 +00:00
if '/users/' not in path:
2020-04-02 21:35:06 +00:00
print('Not receiving new post for ' + path +
2020-03-29 10:31:59 +00:00
' because /users/ not in path')
2019-11-12 13:53:14 +00:00
return None
2020-04-02 21:35:06 +00:00
if '?' + postType + '?' not in path:
print('Not receiving new post for ' + path +
' because ?' + postType + '? not in path')
2019-09-29 13:58:05 +00:00
return None
2020-04-02 21:35:06 +00:00
print('New post begins: ' + postType + ' ' + path)
2019-09-29 14:06:53 +00:00
2019-09-29 13:58:05 +00:00
if '?page=' in path:
2020-04-02 21:35:06 +00:00
pageNumberStr = path.split('?page=')[1]
2019-09-29 13:58:05 +00:00
if '?' in pageNumberStr:
2020-04-02 21:35:06 +00:00
pageNumberStr = pageNumberStr.split('?')[0]
2020-05-22 12:41:47 +00:00
if '#' in pageNumberStr:
pageNumberStr = pageNumberStr.split('#')[0]
2019-09-29 13:58:05 +00:00
if pageNumberStr.isdigit():
2020-04-02 21:35:06 +00:00
pageNumber = int(pageNumberStr)
path = path.split('?page=')[0]
2019-09-29 13:58:05 +00:00
2020-02-21 12:29:51 +00:00
# get the username who posted
2020-04-02 21:35:06 +00:00
newPostThreadName = None
2020-02-21 12:29:51 +00:00
if '/users/' in path:
2020-04-02 21:35:06 +00:00
newPostThreadName = path.split('/users/')[1]
2020-02-21 12:48:08 +00:00
if '/' in newPostThreadName:
2020-04-02 21:35:06 +00:00
newPostThreadName = newPostThreadName.split('/')[0]
2019-09-29 13:58:05 +00:00
if not newPostThreadName:
2020-04-02 21:35:06 +00:00
newPostThreadName = '*'
2020-02-21 12:29:51 +00:00
2019-09-29 13:58:05 +00:00
if self.server.newPostThread.get(newPostThreadName):
print('Waiting for previous new post thread to end')
2020-04-02 21:35:06 +00:00
waitCtr = 0
while (self.server.newPostThread[newPostThreadName].isAlive() and
waitCtr < 8):
2019-09-29 13:58:05 +00:00
time.sleep(1)
2020-04-02 21:35:06 +00:00
waitCtr += 1
if waitCtr >= 8:
print('Killing previous new post thread for ' +
newPostThreadName)
2019-09-29 13:58:05 +00:00
self.server.newPostThread[newPostThreadName].kill()
# make a copy of self.headers
2020-04-02 21:35:06 +00:00
headers = {}
headersWithoutCookie = {}
for dictEntryName, headerLine in self.headers.items():
headers[dictEntryName] = headerLine
if dictEntryName.lower() != 'cookie':
headersWithoutCookie[dictEntryName] = headerLine
print('New post headers: ' + str(headersWithoutCookie))
length = int(headers['Content-Length'])
if length > self.server.maxPostLength:
2019-11-16 11:29:57 +00:00
print('POST size too large')
2019-11-16 12:49:34 +00:00
return None
2019-11-16 11:29:57 +00:00
2019-11-22 18:37:07 +00:00
if not headers.get('Content-Type'):
if headers.get('Content-type'):
2020-04-02 21:35:06 +00:00
headers['Content-Type'] = headers['Content-type']
2019-11-22 18:37:07 +00:00
elif headers.get('content-type'):
2020-04-02 21:35:06 +00:00
headers['Content-Type'] = headers['content-type']
2019-11-22 18:37:07 +00:00
if headers.get('Content-Type'):
if ' boundary=' in headers['Content-Type']:
2020-04-02 21:35:06 +00:00
boundary = headers['Content-Type'].split('boundary=')[1]
2019-11-22 18:37:07 +00:00
if ';' in boundary:
2020-04-02 21:35:06 +00:00
boundary = boundary.split(';')[0]
2019-11-22 18:37:07 +00:00
2020-06-08 18:52:18 +00:00
try:
postBytes = self.rfile.read(length)
2020-06-23 14:25:03 +00:00
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST postBytes ' +
'connection reset by peer')
else:
print('WARN: POST postBytes socket error')
return None
2020-06-23 21:27:13 +00:00
except ValueError as e:
2020-06-08 18:52:18 +00:00
print('ERROR: POST postBytes rfile.read failed')
2020-06-23 21:27:13 +00:00
print(e)
2020-06-08 18:52:18 +00:00
return None
2020-03-22 21:16:02 +00:00
2019-11-22 18:37:07 +00:00
# second length check from the bytes received
# since Content-Length could be untruthful
2020-04-02 21:35:06 +00:00
length = len(postBytes)
if length > self.server.maxPostLength:
2019-11-22 18:37:07 +00:00
print('POST size too large')
return None
2020-03-22 21:16:02 +00:00
2020-03-29 10:31:59 +00:00
# Note sending new posts needs to be synchronous,
# otherwise any attachments can get mangled if
# other events happen during their decoding
2020-04-02 21:35:06 +00:00
print('Creating new post from: ' + newPostThreadName)
2020-04-16 09:01:33 +00:00
self._receiveNewPostProcess(postType,
2020-04-02 21:35:06 +00:00
path, headers, length,
postBytes, boundary,
2020-09-28 14:10:54 +00:00
callingDomain, cookie,
authorized)
2019-09-29 13:58:05 +00:00
return pageNumber
2020-03-22 21:16:02 +00:00
2020-08-11 17:18:22 +00:00
def _cryptoAPIreadHandle(self):
"""Reads handle
"""
messageBytes = None
maxDeviceIdLength = 2048
length = int(self.headers['Content-length'])
if length >= maxDeviceIdLength:
print('WARN: handle post to crypto API is too long ' +
str(length) + ' bytes')
return {}
try:
messageBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: handle POST messageBytes ' +
'connection reset by peer')
else:
print('WARN: handle POST messageBytes socket error')
return {}
except ValueError as e:
print('ERROR: handle POST messageBytes rfile.read failed')
print(e)
return {}
lenMessage = len(messageBytes)
if lenMessage > 2048:
print('WARN: handle post to crypto API is too long ' +
str(lenMessage) + ' bytes')
return {}
handle = messageBytes.decode("utf-8")
if not handle:
return None
if '@' not in handle:
return None
if '[' in handle:
return json.loads(messageBytes)
if handle.startswith('@'):
handle = handle[1:]
if '@' not in handle:
return None
return handle.strip()
2020-08-06 20:56:14 +00:00
def _cryptoAPIreadJson(self) -> {}:
2020-08-06 21:23:17 +00:00
"""Obtains json from POST to the crypto API
"""
2020-08-06 20:56:14 +00:00
messageBytes = None
maxCryptoMessageLength = 10240
length = int(self.headers['Content-length'])
if length >= maxCryptoMessageLength:
print('WARN: post to crypto API is too long ' +
str(length) + ' bytes')
return {}
try:
messageBytes = self.rfile.read(length)
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST messageBytes ' +
'connection reset by peer')
else:
print('WARN: POST messageBytes socket error')
return {}
except ValueError as e:
print('ERROR: POST messageBytes rfile.read failed')
print(e)
return {}
lenMessage = len(messageBytes)
if lenMessage > 10240:
print('WARN: post to crypto API is too long ' +
str(lenMessage) + ' bytes')
return {}
return json.loads(messageBytes)
2020-08-11 17:18:22 +00:00
def _cryptoAPIQuery(self, callingDomain: str) -> bool:
handle = self._cryptoAPIreadHandle()
if not handle:
return False
if isinstance(handle, str):
personDir = self.server.baseDir + '/accounts/' + handle
if not os.path.isdir(personDir + '/devices'):
return False
devicesList = []
for subdir, dirs, files in os.walk(personDir + '/devices'):
for f in files:
deviceFilename = os.path.join(personDir + '/devices', f)
if not os.path.isfile(deviceFilename):
continue
contentJson = loadJson(deviceFilename)
if contentJson:
devicesList.append(contentJson)
# return the list of devices for this handle
msg = \
json.dumps(devicesList,
ensure_ascii=False).encode('utf-8')
self._set_headers('application/json',
len(msg),
None, callingDomain)
self._write(msg)
return True
return False
2020-08-06 16:49:13 +00:00
def _cryptoAPI(self, path: str, authorized: bool) -> None:
2020-08-06 21:24:47 +00:00
"""POST or GET with the crypto API
"""
if authorized and path.startswith('/api/v1/crypto/keys/upload'):
2020-08-11 17:21:56 +00:00
# register a device to an authorized account
2020-08-06 21:23:17 +00:00
if not self.authorizedNickname:
self._400()
return
2020-08-06 20:56:14 +00:00
deviceKeys = self._cryptoAPIreadJson()
if not deviceKeys:
self._400()
return
2020-08-11 17:21:56 +00:00
if isinstance(deviceKeys, dict):
2020-08-11 12:57:34 +00:00
if not E2EEvalidDevice(deviceKeys):
self._400()
return
E2EEaddDevice(self.server.baseDir,
self.authorizedNickname,
self.server.domain,
deviceKeys['deviceId'],
deviceKeys['name'],
deviceKeys['claim'],
deviceKeys['fingerprintKey']['publicKeyBase64'],
deviceKeys['identityKey']['publicKeyBase64'],
deviceKeys['fingerprintKey']['type'],
deviceKeys['identityKey']['type'])
self._200()
return
self._400()
2020-08-06 16:49:13 +00:00
elif path.startswith('/api/v1/crypto/keys/query'):
2020-08-13 16:19:35 +00:00
# given a handle (nickname@domain) return a list of the devices
2020-08-11 17:24:03 +00:00
# registered to that handle
2020-08-11 17:18:22 +00:00
if not self._cryptoAPIQuery():
self._400()
2020-08-06 16:49:13 +00:00
elif path.startswith('/api/v1/crypto/keys/claim'):
2020-08-06 21:23:17 +00:00
# TODO
2020-08-06 16:49:13 +00:00
self._200()
elif authorized and path.startswith('/api/v1/crypto/delivery'):
2020-08-06 21:23:17 +00:00
# TODO
2020-08-06 16:49:13 +00:00
self._200()
elif (authorized and
path.startswith('/api/v1/crypto/encrypted_messages/clear')):
2020-08-06 21:23:17 +00:00
# TODO
2020-08-06 16:49:13 +00:00
self._200()
elif path.startswith('/api/v1/crypto/encrypted_messages'):
2020-08-06 21:23:17 +00:00
# TODO
2020-08-06 16:49:13 +00:00
self._200()
else:
self._400()
2019-06-28 18:55:29 +00:00
def do_POST(self):
2020-04-02 21:35:06 +00:00
POSTstartTime = time.time()
POSTtimings = []
2019-11-15 18:59:15 +00:00
2019-08-20 11:30:41 +00:00
if not self.server.session:
2019-11-15 19:44:20 +00:00
print('Starting new session from POST')
2020-04-02 21:35:06 +00:00
self.server.session = \
2020-06-09 11:03:59 +00:00
createSession(self.server.proxyType)
2020-06-08 17:10:53 +00:00
if not self.server.session:
2020-06-24 09:04:58 +00:00
print('ERROR: POST failed to create session during POST')
2020-06-08 17:10:53 +00:00
self._404()
return
2019-08-20 11:30:41 +00:00
2019-07-04 17:56:25 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: POST to ' + self.server.baseDir +
' path: ' + self.path + ' busy: ' +
2019-07-06 17:00:22 +00:00
str(self.server.POSTbusy))
2019-07-01 14:30:48 +00:00
if self.server.POSTbusy:
2020-04-02 21:35:06 +00:00
currTimePOST = int(time.time())
if currTimePOST - self.server.lastPOST == 0:
2019-06-29 17:28:43 +00:00
self.send_response(429)
2019-06-29 17:27:32 +00:00
self.end_headers()
2020-03-22 21:16:02 +00:00
return
2020-04-02 21:35:06 +00:00
self.server.lastPOST = currTimePOST
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
callingDomain = self.server.domainFull
2020-03-27 11:46:36 +00:00
if self.headers.get('Host'):
2020-04-02 21:35:06 +00:00
callingDomain = self.headers['Host']
2020-03-28 15:42:27 +00:00
if self.server.onionDomain:
if callingDomain != self.server.domain and \
callingDomain != self.server.domainFull and \
callingDomain != self.server.onionDomain:
2020-04-02 21:35:06 +00:00
print('POST domain blocked: ' + callingDomain)
2020-03-28 15:42:27 +00:00
self._400()
return
else:
if callingDomain != self.server.domain and \
callingDomain != self.server.domainFull:
2020-04-02 21:35:06 +00:00
print('POST domain blocked: ' + callingDomain)
2020-03-28 15:42:27 +00:00
self._400()
return
2020-03-28 10:33:04 +00:00
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = True
2019-07-01 11:48:54 +00:00
if not self.headers.get('Content-type'):
2019-07-03 16:14:45 +00:00
print('Content-type header missing')
2020-04-16 11:48:00 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-01 11:48:54 +00:00
return
2019-07-03 16:14:45 +00:00
2019-07-03 21:37:46 +00:00
# remove any trailing slashes from the path
if not self.path.endswith('confirm'):
2020-04-02 21:35:06 +00:00
self.path = self.path.replace('/outbox/', '/outbox')
self.path = self.path.replace('/tlblogs/', '/tlblogs')
2020-08-25 21:51:30 +00:00
self.path = self.path.replace('/tlevents/', '/tlevents')
2020-04-02 21:35:06 +00:00
self.path = self.path.replace('/inbox/', '/inbox')
self.path = self.path.replace('/shares/', '/shares')
self.path = self.path.replace('/sharedInbox/', '/sharedInbox')
2019-07-03 21:37:46 +00:00
2020-04-02 21:35:06 +00:00
if self.path == '/inbox':
2019-11-15 21:43:20 +00:00
if not self.server.enableSharedInbox:
self._503()
2020-04-16 11:48:00 +00:00
self.server.POSTbusy = False
2019-11-15 21:43:20 +00:00
return
2020-04-02 21:35:06 +00:00
cookie = None
2019-07-29 16:13:48 +00:00
if self.headers.get('Cookie'):
2020-04-02 21:35:06 +00:00
cookie = self.headers['Cookie']
2019-07-29 16:13:48 +00:00
2019-07-27 20:30:58 +00:00
# check authorization
2020-04-02 21:35:06 +00:00
authorized = self._isAuthorized()
2020-07-11 15:49:26 +00:00
if not authorized:
2020-07-11 15:45:22 +00:00
print('POST Not authorized')
print(str(self.headers))
2019-07-27 20:30:58 +00:00
2020-08-06 16:49:13 +00:00
if self.path.startswith('/api/v1/crypto/'):
self._cryptoAPI(self.path, authorized)
self.server.POSTbusy = False
return
2020-02-24 22:50:55 +00:00
# if this is a POST to the outbox then check authentication
2020-04-02 21:35:06 +00:00
self.outboxAuthenticated = False
self.postToNickname = None
2019-07-24 22:38:42 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 1)
2020-03-22 21:16:02 +00:00
2020-08-30 14:40:55 +00:00
# login screen
2019-07-24 22:38:42 +00:00
if self.path.startswith('/login'):
2020-08-30 14:40:55 +00:00
self._loginScreen(self.path, callingDomain, cookie,
self.server.baseDir, self.server.httpPrefix,
self.server.domain, self.server.domainFull,
self.server.port,
self.server.onionDomain, self.server.i2pDomain,
self.server.debug)
2019-11-15 18:59:15 +00:00
return
2019-07-24 22:38:42 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 2)
2019-11-16 10:40:35 +00:00
2020-08-30 10:42:44 +00:00
# update of profile/avatar from web interface,
# after selecting Edit button then Submit
2019-08-02 09:52:12 +00:00
if authorized and self.path.endswith('/profiledata'):
2020-08-30 10:42:44 +00:00
self._profileUpdate(callingDomain, cookie, authorized, self.path,
self.server.baseDir, self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain, self.server.debug)
2019-08-02 09:52:12 +00:00
return
2020-10-01 19:34:39 +00:00
if authorized and self.path.endswith('/linksdata'):
self._linksUpdate(callingDomain, cookie, authorized, self.path,
self.server.baseDir, self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain, self.server.debug,
self.server.defaultTimeline)
return
2020-10-04 09:22:27 +00:00
if authorized and self.path.endswith('/newswiredata'):
self._newswireUpdate(callingDomain, cookie, authorized, self.path,
self.server.baseDir, self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain, self.server.debug,
self.server.defaultTimeline)
return
2020-11-05 18:47:03 +00:00
if authorized and self.path.endswith('/citationsdata'):
self._citationsUpdate(callingDomain, cookie, authorized, self.path,
self.server.baseDir, self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain, self.server.debug,
self.server.defaultTimeline,
self.server.newswire)
2020-11-05 18:47:03 +00:00
return
2020-10-10 19:14:36 +00:00
if authorized and self.path.endswith('/newseditdata'):
self._newsPostEdit(callingDomain, cookie, authorized, self.path,
self.server.baseDir, self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain, self.server.debug,
self.server.defaultTimeline)
return
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 3)
2019-11-16 10:40:35 +00:00
2019-08-13 10:48:16 +00:00
# moderator action buttons
if authorized and '/users/' in self.path and \
2019-08-13 11:59:38 +00:00
self.path.endswith('/moderationaction'):
2020-08-30 14:09:47 +00:00
self._moderatorActions(self.path, callingDomain, cookie,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
2019-08-13 10:48:16 +00:00
return
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 4)
2019-11-16 10:40:35 +00:00
2020-04-02 21:35:06 +00:00
searchForEmoji = False
2019-08-19 20:11:38 +00:00
if self.path.endswith('/searchhandleemoji'):
2020-04-02 21:35:06 +00:00
searchForEmoji = True
self.path = self.path.replace('/searchhandleemoji',
'/searchhandle')
2019-08-19 20:11:38 +00:00
if self.server.debug:
print('DEBUG: searching for emoji')
2020-04-02 21:35:06 +00:00
print('authorized: ' + str(authorized))
2019-08-19 20:01:29 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 5)
2019-11-16 10:40:35 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 6)
2019-11-16 10:40:35 +00:00
2019-08-14 09:45:51 +00:00
# a search was made
2020-04-02 21:35:06 +00:00
if ((authorized or searchForEmoji) and
(self.path.endswith('/searchhandle') or
'/searchhandle?page=' in self.path)):
2020-08-30 20:25:36 +00:00
self._receiveSearchQuery(callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
searchForEmoji,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
2019-07-30 22:34:04 +00:00
return
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 7)
2019-11-16 10:40:35 +00:00
2020-10-19 10:33:02 +00:00
if not authorized:
if self.path.endswith('/rmpost'):
print('ERROR: attempt to remove post was not authorized. ' +
self.path)
self._400()
self.server.POSTbusy = False
return
else:
2020-08-30 20:04:21 +00:00
# a vote/question/poll is posted
if self.path.endswith('/question') or \
'/question?page=' in self.path:
self._receiveVote(callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
return
2019-11-16 10:40:35 +00:00
2020-08-30 20:04:21 +00:00
# removes a shared item
if self.path.endswith('/rmshare'):
self._removeShare(callingDomain, cookie,
2020-08-30 16:00:33 +00:00
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
2020-08-30 20:04:21 +00:00
return
2019-07-29 16:13:48 +00:00
2020-08-30 20:04:21 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 8)
2019-11-16 10:40:35 +00:00
2020-08-30 20:04:21 +00:00
# removes a post
if self.path.endswith('/rmpost'):
2020-10-19 10:33:02 +00:00
if '/users/' not in self.path:
print('ERROR: attempt to remove post ' +
'was not authorized. ' + self.path)
2020-10-19 10:33:02 +00:00
self._400()
self.server.POSTbusy = False
return
2020-08-30 20:04:21 +00:00
if self.path.endswith('/rmpost'):
self._removePost(callingDomain, cookie,
2020-08-30 16:21:35 +00:00
authorized, self.path,
2020-08-30 20:04:21 +00:00
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
2020-08-30 16:21:35 +00:00
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
2020-08-30 20:04:21 +00:00
return
2020-08-30 20:04:21 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 9)
2019-11-16 10:40:35 +00:00
2020-08-30 20:04:21 +00:00
# decision to follow in the web interface is confirmed
if self.path.endswith('/followconfirm'):
self._followConfirm(callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
return
2019-08-24 23:00:03 +00:00
2020-08-30 20:04:21 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 10)
2019-11-16 10:40:35 +00:00
2020-08-30 20:04:21 +00:00
# decision to unfollow in the web interface is confirmed
if self.path.endswith('/unfollowconfirm'):
self._unfollowConfirm(callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
return
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 11)
# decision to unblock in the web interface is confirmed
if self.path.endswith('/unblockconfirm'):
self._unblockConfirm(callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
return
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 12)
# decision to block in the web interface is confirmed
if self.path.endswith('/blockconfirm'):
self._blockConfirm(callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
return
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 13)
# an option was chosen from person options screen
# view/follow/block/report
if self.path.endswith('/personoptions'):
2020-08-30 20:07:01 +00:00
self._personOptions(self.path,
callingDomain, cookie,
2020-08-30 20:04:21 +00:00
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.port,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
return
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 14)
2019-11-16 10:40:35 +00:00
2019-11-29 13:07:20 +00:00
# receive different types of post created by htmlNewPost
2020-04-02 21:35:06 +00:00
postTypes = ("newpost", "newblog", "newunlisted", "newfollowers",
"newdm", "newreport", "newshare", "newquestion",
2020-08-22 21:19:18 +00:00
"editblogpost", "newreminder", "newevent")
2019-11-29 13:07:20 +00:00
for currPostType in postTypes:
2020-04-16 08:55:53 +00:00
if not authorized:
break
postRedirect = self.server.defaultTimeline
if currPostType == 'newshare':
2020-04-02 21:35:06 +00:00
postRedirect = 'shares'
elif currPostType == 'newevent':
postRedirect = 'tlevents'
2019-11-29 13:07:20 +00:00
pageNumber = \
self._receiveNewPost(currPostType, self.path,
2020-09-28 14:10:54 +00:00
callingDomain, cookie,
authorized)
2019-11-29 13:07:20 +00:00
if pageNumber:
2020-04-02 21:35:06 +00:00
nickname = self.path.split('/users/')[1]
2020-02-21 12:55:11 +00:00
if '/' in nickname:
2020-04-02 21:35:06 +00:00
nickname = nickname.split('/')[0]
2020-02-21 12:55:11 +00:00
2020-06-03 19:14:24 +00:00
if callingDomain.endswith('.onion') and \
self.server.onionDomain:
self._redirect_headers('http://' +
self.server.onionDomain +
'/users/' + nickname +
'/' + postRedirect +
'?page=' + str(pageNumber), cookie,
callingDomain)
elif (callingDomain.endswith('.i2p') and
self.server.i2pDomain):
2020-06-19 09:50:00 +00:00
self._redirect_headers('http://' +
2020-06-03 19:14:24 +00:00
self.server.i2pDomain +
2020-04-02 21:35:06 +00:00
'/users/' + nickname +
'/' + postRedirect +
'?page=' + str(pageNumber), cookie,
2020-03-28 17:24:40 +00:00
callingDomain)
else:
2020-06-03 19:14:24 +00:00
self._redirect_headers(self.server.httpPrefix + '://' +
self.server.domainFull +
2020-04-02 21:35:06 +00:00
'/users/' + nickname +
'/' + postRedirect +
'?page=' + str(pageNumber), cookie,
2020-03-28 17:24:40 +00:00
callingDomain)
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-11-29 13:07:20 +00:00
return
2019-07-29 16:13:48 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 15)
2019-11-16 10:40:35 +00:00
2019-07-23 19:02:26 +00:00
if self.path.endswith('/outbox') or self.path.endswith('/shares'):
if '/users/' in self.path:
2019-07-27 20:30:58 +00:00
if authorized:
2020-04-02 21:35:06 +00:00
self.outboxAuthenticated = True
pathUsersSection = self.path.split('/users/')[1]
self.postToNickname = pathUsersSection.split('/')[0]
2019-07-03 21:37:46 +00:00
if not self.outboxAuthenticated:
self.send_response(405)
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-03 21:37:46 +00:00
return
2019-07-03 16:14:45 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 16)
2019-11-16 10:40:35 +00:00
2019-07-03 16:14:45 +00:00
# check that the post is to an expected path
2020-04-02 21:35:06 +00:00
if not (self.path.endswith('/outbox') or
self.path.endswith('/inbox') or
self.path.endswith('/shares') or
self.path.endswith('/moderationaction') or
self.path == '/sharedInbox'):
print('Attempt to POST to invalid path ' + self.path)
2020-04-16 11:48:00 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-03 16:14:45 +00:00
return
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 17)
2019-11-16 10:40:35 +00:00
2019-06-28 18:55:29 +00:00
# read the message and convert it into a python dictionary
2020-04-02 21:35:06 +00:00
length = int(self.headers['Content-length'])
2019-07-03 16:14:45 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: content-length: ' + str(length))
if not self.headers['Content-type'].startswith('image/') and \
not self.headers['Content-type'].startswith('video/') and \
not self.headers['Content-type'].startswith('audio/'):
2020-04-02 21:35:06 +00:00
if length > self.server.maxMessageLength:
print('Maximum message length exceeded ' + str(length))
2020-04-16 11:48:00 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-16 14:23:06 +00:00
return
else:
2020-04-02 21:35:06 +00:00
if length > self.server.maxMediaSize:
print('Maximum media size exceeded ' + str(length))
2020-04-16 11:48:00 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-16 14:23:06 +00:00
return
# receive images to the outbox
if self.headers['Content-type'].startswith('image/') and \
'/users/' in self.path:
2020-08-30 19:37:44 +00:00
self._receiveImage(length, callingDomain, cookie,
authorized, self.path,
self.server.baseDir,
self.server.httpPrefix,
self.server.domain,
self.server.domainFull,
self.server.onionDomain,
self.server.i2pDomain,
self.server.debug)
2020-03-22 21:16:02 +00:00
return
2019-07-16 14:23:06 +00:00
# refuse to receive non-json content
2019-11-09 21:39:04 +00:00
if self.headers['Content-type'] != 'application/json' and \
self.headers['Content-type'] != 'application/activity+json':
2020-04-02 21:35:06 +00:00
print("POST is not json: " + self.headers['Content-type'])
2019-08-14 22:33:55 +00:00
if self.server.debug:
print(str(self.headers))
2020-04-02 21:35:06 +00:00
length = int(self.headers['Content-length'])
if length < self.server.maxPostLength:
2020-06-08 18:52:18 +00:00
try:
unknownPost = self.rfile.read(length).decode('utf-8')
2020-06-23 14:25:03 +00:00
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST unknownPost ' +
'connection reset by peer')
else:
print('WARN: POST unknownPost socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
2020-06-23 21:27:13 +00:00
except ValueError as e:
2020-06-08 18:52:18 +00:00
print('ERROR: POST unknownPost rfile.read failed')
2020-06-23 21:27:13 +00:00
print(e)
2020-06-08 18:52:18 +00:00
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
2019-08-14 22:33:55 +00:00
print(str(unknownPost))
2020-04-16 11:48:00 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-06-28 21:06:05 +00:00
return
2019-07-03 16:14:45 +00:00
if self.server.debug:
print('DEBUG: Reading message')
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 18)
2019-11-16 10:40:35 +00:00
2019-11-16 00:07:07 +00:00
# check content length before reading bytes
if self.path == '/sharedInbox' or self.path == '/inbox':
2020-04-02 21:35:06 +00:00
length = 0
2019-11-16 00:07:07 +00:00
if self.headers.get('Content-length'):
2020-04-02 21:35:06 +00:00
length = int(self.headers['Content-length'])
2019-11-16 09:53:52 +00:00
elif self.headers.get('Content-Length'):
2020-04-02 21:35:06 +00:00
length = int(self.headers['Content-Length'])
2019-11-16 09:53:52 +00:00
elif self.headers.get('content-length'):
2020-04-02 21:35:06 +00:00
length = int(self.headers['content-length'])
if length > 10240:
print('WARN: post to shared inbox is too long ' +
str(length) + ' bytes')
2019-11-16 00:07:07 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-11-16 00:07:07 +00:00
return
2020-06-08 18:52:18 +00:00
try:
messageBytes = self.rfile.read(length)
2020-06-23 14:25:03 +00:00
except SocketError as e:
if e.errno == errno.ECONNRESET:
print('WARN: POST messageBytes ' +
'connection reset by peer')
else:
print('WARN: POST messageBytes socket error')
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
2020-06-23 21:27:13 +00:00
except ValueError as e:
2020-06-08 18:52:18 +00:00
print('ERROR: POST messageBytes rfile.read failed')
2020-06-23 21:27:13 +00:00
print(e)
2020-06-08 18:52:18 +00:00
self.send_response(400)
self.end_headers()
self.server.POSTbusy = False
return
2019-11-16 00:07:07 +00:00
# check content length after reading bytes
2019-11-16 00:01:00 +00:00
if self.path == '/sharedInbox' or self.path == '/inbox':
2020-04-02 21:35:06 +00:00
lenMessage = len(messageBytes)
if lenMessage > 10240:
print('WARN: post to shared inbox is too long ' +
str(lenMessage) + ' bytes')
2019-11-16 00:01:00 +00:00
self._400()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-11-16 00:01:00 +00:00
return
2020-10-15 08:59:08 +00:00
if containsInvalidChars(messageBytes.decode("utf-8")):
self._400()
self.server.POSTbusy = False
return
2019-11-16 00:01:00 +00:00
# convert the raw bytes to json
2020-04-02 21:35:06 +00:00
messageJson = json.loads(messageBytes)
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 19)
2019-11-16 10:40:35 +00:00
2019-07-03 21:37:46 +00:00
# https://www.w3.org/TR/activitypub/#object-without-create
if self.outboxAuthenticated:
2020-04-02 21:35:06 +00:00
if self._postToOutbox(messageJson, __version__):
2019-07-16 19:07:45 +00:00
if messageJson.get('id'):
2020-08-23 11:13:35 +00:00
locnStr = removeIdEnding(messageJson['id'])
2020-04-02 21:35:06 +00:00
self.headers['Location'] = locnStr
self.send_response(201)
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
return
else:
2019-08-16 18:32:26 +00:00
if self.server.debug:
print('Failed to post to outbox')
self.send_response(403)
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
return
2019-07-03 21:37:46 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 20)
2019-11-16 11:55:14 +00:00
2019-07-02 15:07:27 +00:00
# check the necessary properties are available
2019-07-03 16:14:45 +00:00
if self.server.debug:
print('DEBUG: Check message has params')
2019-07-05 15:10:21 +00:00
if self.path.endswith('/inbox') or \
2020-04-02 21:35:06 +00:00
self.path == '/sharedInbox':
2019-07-03 21:37:46 +00:00
if not inboxMessageHasParams(messageJson):
2019-07-06 13:49:25 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print("DEBUG: inbox message doesn't have the " +
2020-03-29 10:31:59 +00:00
"required parameters")
2019-07-03 21:37:46 +00:00
self.send_response(403)
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-03 21:37:46 +00:00
return
2019-07-02 15:07:27 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 21)
2019-11-16 11:55:14 +00:00
2019-11-16 12:07:57 +00:00
if not self.headers.get('signature'):
if 'keyId=' not in self.headers['signature']:
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('DEBUG: POST to inbox has no keyId in ' +
2020-03-29 10:31:59 +00:00
'header signature parameter')
2019-11-16 12:07:57 +00:00
self.send_response(403)
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-11-16 12:07:57 +00:00
return
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 22)
2019-11-16 12:07:57 +00:00
2020-08-23 14:45:58 +00:00
if not self.server.unitTest:
if not inboxPermittedMessage(self.server.domain,
messageJson,
self.server.federationList):
if self.server.debug:
# https://www.youtube.com/watch?v=K3PrSj9XEu4
print('DEBUG: Ah Ah Ah')
self.send_response(403)
self.end_headers()
self.server.POSTbusy = False
return
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
self._benchmarkPOSTtimings(POSTstartTime, POSTtimings, 23)
2019-11-16 11:55:14 +00:00
2019-07-03 16:14:45 +00:00
if self.server.debug:
2019-07-06 13:49:25 +00:00
print('DEBUG: POST saving to inbox queue')
2019-07-04 10:02:56 +00:00
if '/users/' in self.path:
2020-04-02 21:35:06 +00:00
pathUsersSection = self.path.split('/users/')[1]
2019-07-04 10:02:56 +00:00
if '/' not in pathUsersSection:
if self.server.debug:
print('DEBUG: This is not a users endpoint')
else:
2020-04-02 21:35:06 +00:00
self.postToNickname = pathUsersSection.split('/')[0]
2019-07-04 10:02:56 +00:00
if self.postToNickname:
2020-04-02 21:35:06 +00:00
queueStatus = \
self._updateInboxQueue(self.postToNickname,
messageJson, messageBytes)
2020-04-16 19:17:19 +00:00
if queueStatus >= 0 and queueStatus <= 3:
2019-08-14 23:04:41 +00:00
return
2019-08-16 18:32:26 +00:00
if self.server.debug:
2020-04-02 21:35:06 +00:00
print('_updateInboxQueue exited ' +
'without doing anything')
2019-08-14 23:04:41 +00:00
else:
if self.server.debug:
print('self.postToNickname is None')
2019-07-04 12:23:53 +00:00
self.send_response(403)
2019-07-01 14:30:48 +00:00
self.end_headers()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-01 14:30:48 +00:00
return
2019-07-04 12:23:53 +00:00
else:
2019-07-05 15:10:21 +00:00
if self.path == '/sharedInbox' or self.path == '/inbox':
2019-07-05 10:24:20 +00:00
print('DEBUG: POST to shared inbox')
2020-04-02 21:35:06 +00:00
queueStatus = \
self._updateInboxQueue('inbox', messageJson, messageBytes)
2020-04-16 19:17:19 +00:00
if queueStatus >= 0 and queueStatus <= 3:
2020-03-22 21:16:02 +00:00
return
2020-04-16 11:49:17 +00:00
self._200()
2020-04-02 21:35:06 +00:00
self.server.POSTbusy = False
2019-07-03 16:14:45 +00:00
2019-08-17 15:16:27 +00:00
class PubServerUnitTest(PubServer):
2020-04-02 21:35:06 +00:00
protocol_version = 'HTTP/1.0'
2019-10-16 18:19:18 +00:00
2020-04-02 21:35:06 +00:00
class EpicyonServer(ThreadingHTTPServer):
def handle_error(self, request, client_address):
# surpress connection reset errors
cls, e = sys.exc_info()[:2]
2020-08-24 20:05:10 +00:00
if cls is ConnectionResetError:
2020-08-24 19:48:35 +00:00
print('ERROR: ' + str(cls) + ", " + str(e))
pass
else:
return HTTPServer.handle_error(self, request, client_address)
2020-04-02 21:35:06 +00:00
def runPostsQueue(baseDir: str, sendThreads: [], debug: bool) -> None:
2019-10-16 18:19:18 +00:00
"""Manages the threads used to send posts
"""
while True:
time.sleep(1)
2020-04-02 21:35:06 +00:00
removeDormantThreads(baseDir, sendThreads, debug)
2019-10-16 18:19:18 +00:00
2020-04-02 21:35:06 +00:00
def runSharesExpire(versionNumber: str, baseDir: str) -> None:
2019-10-17 09:58:30 +00:00
"""Expires shares as needed
"""
while True:
time.sleep(120)
expireShares(baseDir)
2020-04-02 21:35:06 +00:00
def runPostsWatchdog(projectVersion: str, httpd) -> None:
2019-10-16 18:19:18 +00:00
"""This tries to keep the posts thread running even if it dies
"""
print('Starting posts queue watchdog')
2020-04-02 21:35:06 +00:00
postsQueueOriginal = httpd.thrPostsQueue.clone(runPostsQueue)
2019-10-16 18:19:18 +00:00
httpd.thrPostsQueue.start()
while True:
2020-03-22 21:16:02 +00:00
time.sleep(20)
2019-10-16 18:19:18 +00:00
if not httpd.thrPostsQueue.isAlive():
httpd.thrPostsQueue.kill()
2020-04-02 21:35:06 +00:00
httpd.thrPostsQueue = postsQueueOriginal.clone(runPostsQueue)
2019-10-16 18:19:18 +00:00
httpd.thrPostsQueue.start()
print('Restarting posts queue...')
2020-04-02 21:35:06 +00:00
def runSharesExpireWatchdog(projectVersion: str, httpd) -> None:
2019-10-17 09:58:30 +00:00
"""This tries to keep the shares expiry thread running even if it dies
"""
print('Starting shares expiry watchdog')
2020-04-02 21:35:06 +00:00
sharesExpireOriginal = httpd.thrSharesExpire.clone(runSharesExpire)
2019-10-17 09:58:30 +00:00
httpd.thrSharesExpire.start()
while True:
2020-03-22 21:16:02 +00:00
time.sleep(20)
2019-10-17 09:58:30 +00:00
if not httpd.thrSharesExpire.isAlive():
httpd.thrSharesExpire.kill()
2020-04-02 21:35:06 +00:00
httpd.thrSharesExpire = sharesExpireOriginal.clone(runSharesExpire)
2019-10-17 09:58:30 +00:00
httpd.thrSharesExpire.start()
print('Restarting shares expiry...')
2020-04-02 21:35:06 +00:00
def loadTokens(baseDir: str, tokensDict: {}, tokensLookup: {}) -> None:
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
2019-10-25 16:48:53 +00:00
for handle in dirs:
if '@' in handle:
2020-04-02 21:35:06 +00:00
tokenFilename = baseDir + '/accounts/' + handle + '/.token'
2019-10-25 16:48:53 +00:00
if not os.path.isfile(tokenFilename):
continue
2020-04-02 21:35:06 +00:00
nickname = handle.split('@')[0]
token = None
2019-10-25 16:48:53 +00:00
try:
with open(tokenFilename, 'r') as fp:
2020-04-02 21:35:06 +00:00
token = fp.read()
2019-10-25 16:48:53 +00:00
except Exception as e:
2020-04-02 21:35:06 +00:00
print('WARN: Unable to read token for ' +
nickname + ' ' + str(e))
2019-10-25 16:48:53 +00:00
if not token:
continue
2020-04-02 21:35:06 +00:00
tokensDict[nickname] = token
tokensLookup[token] = nickname
def runDaemon(maxFeedItemSizeKb: int,
publishButtonAtTop: bool,
2020-10-26 21:32:08 +00:00
rssIconAtTop: bool,
iconsAsButtons: bool,
2020-10-25 20:38:01 +00:00
fullWidthTimelineButtonHeader: bool,
showPublishAsIcon: bool,
maxFollowers: int,
allowNewsFollowers: bool,
maxNewsPosts: int,
2020-10-21 10:39:09 +00:00
maxMirroredArticles: int,
2020-10-19 16:33:58 +00:00
maxNewswireFeedSizeKb: int,
maxNewswirePostsPerSource: int,
showPublishedDateOnly: bool,
votingTimeMins: int,
2020-10-09 12:15:20 +00:00
positiveVoting: bool,
2020-10-08 17:49:03 +00:00
newswireVotesThreshold: int,
newsInstance: bool,
2020-10-07 09:10:42 +00:00
blogsInstance: bool,
mediaInstance: bool,
2020-04-02 21:35:06 +00:00
maxRecentPosts: int,
enableSharedInbox: bool, registration: bool,
language: str, projectVersion: str,
instanceId: str, clientToServer: bool,
baseDir: str, domain: str,
onionDomain: str, i2pDomain: str,
YTReplacementDomain: str,
2020-04-02 21:35:06 +00:00
port=80, proxyPort=80, httpPrefix='https',
fedList=[], maxMentions=10, maxEmoji=10,
authenticatedFetch=False,
2020-06-09 11:03:59 +00:00
proxyType=None, maxReplies=64,
2020-04-02 21:35:06 +00:00
domainMaxPostsPerDay=8640, accountMaxPostsPerDay=864,
allowDeletion=False, debug=False, unitTest=False,
instanceOnlySkillsSearch=False, sendThreads=[],
2020-07-12 12:31:28 +00:00
useBlurHash=False,
manualFollowerApproval=True) -> None:
2020-04-02 21:35:06 +00:00
if len(domain) == 0:
domain = 'localhost'
2019-06-28 18:55:29 +00:00
if '.' not in domain:
2019-07-03 12:24:54 +00:00
if domain != 'localhost':
print('Invalid domain: ' + domain)
return
2019-06-28 18:55:29 +00:00
2020-03-22 21:16:02 +00:00
if unitTest:
2020-04-02 21:35:06 +00:00
serverAddress = (domain, proxyPort)
pubHandler = partial(PubServerUnitTest)
2019-08-17 15:16:27 +00:00
else:
2020-04-02 21:35:06 +00:00
serverAddress = ('', proxyPort)
pubHandler = partial(PubServer)
2020-02-19 11:37:33 +00:00
if not os.path.isdir(baseDir + '/accounts'):
print('Creating accounts directory')
os.mkdir(baseDir + '/accounts')
2020-02-19 10:37:40 +00:00
try:
httpd = EpicyonServer(serverAddress, pubHandler)
2020-02-19 10:37:40 +00:00
except Exception as e:
2020-04-02 21:35:06 +00:00
if e.errno == 98:
print('ERROR: HTTP server address is already in use. ' +
2020-03-29 10:31:59 +00:00
str(serverAddress))
2020-02-19 13:16:38 +00:00
return False
2020-03-22 21:16:02 +00:00
2020-04-02 21:35:06 +00:00
print('ERROR: HTTP server failed to start. ' + str(e))
2020-02-19 10:37:40 +00:00
return False
2019-09-07 08:57:52 +00:00
2020-08-23 14:45:58 +00:00
httpd.unitTest = unitTest
httpd.YTReplacementDomain = YTReplacementDomain
2020-10-04 12:29:07 +00:00
# newswire storing rss feeds
httpd.newswire = {}
2020-03-28 10:33:04 +00:00
# This counter is used to update the list of blocked domains in memory.
# It helps to avoid touching the disk and so improves flooding resistance
2020-04-02 21:35:06 +00:00
httpd.blocklistUpdateCtr = 0
httpd.blocklistUpdateInterval = 100
httpd.domainBlocklist = getDomainBlocklist(baseDir)
2020-07-12 12:31:28 +00:00
httpd.manualFollowerApproval = manualFollowerApproval
2020-04-02 21:35:06 +00:00
httpd.onionDomain = onionDomain
httpd.i2pDomain = i2pDomain
2020-04-02 21:35:06 +00:00
httpd.useBlurHash = useBlurHash
httpd.mediaInstance = mediaInstance
httpd.blogsInstance = blogsInstance
2020-10-07 09:10:42 +00:00
httpd.newsInstance = newsInstance
2020-04-02 21:35:06 +00:00
httpd.defaultTimeline = 'inbox'
2019-11-28 16:16:43 +00:00
if mediaInstance:
2020-04-02 21:35:06 +00:00
httpd.defaultTimeline = 'tlmedia'
2020-02-24 14:39:25 +00:00
if blogsInstance:
2020-04-02 21:35:06 +00:00
httpd.defaultTimeline = 'tlblogs'
2020-10-07 09:10:42 +00:00
if newsInstance:
httpd.defaultTimeline = 'tlnews'
2019-11-28 16:16:43 +00:00
2019-09-07 08:57:52 +00:00
# load translations dictionary
2020-04-02 21:35:06 +00:00
httpd.translate = {}
httpd.systemLanguage = 'en'
2019-09-07 08:57:52 +00:00
if not unitTest:
2020-04-02 21:35:06 +00:00
if not os.path.isdir(baseDir + '/translations'):
2019-09-07 08:57:52 +00:00
print('ERROR: translations directory not found')
return
2019-11-11 17:49:08 +00:00
if not language:
2020-04-02 21:35:06 +00:00
systemLanguage = locale.getdefaultlocale()[0]
2019-11-11 17:49:08 +00:00
else:
2020-04-02 21:35:06 +00:00
systemLanguage = language
2019-11-12 21:38:02 +00:00
if not systemLanguage:
2020-04-02 21:35:06 +00:00
systemLanguage = 'en'
2019-09-07 08:57:52 +00:00
if '_' in systemLanguage:
2020-04-02 21:35:06 +00:00
systemLanguage = systemLanguage.split('_')[0]
2019-11-11 17:49:08 +00:00
while '/' in systemLanguage:
2020-04-02 21:35:06 +00:00
systemLanguage = systemLanguage.split('/')[1]
2019-09-07 08:57:52 +00:00
if '.' in systemLanguage:
2020-04-02 21:35:06 +00:00
systemLanguage = systemLanguage.split('.')[0]
translationsFile = baseDir + '/translations/' + \
systemLanguage + '.json'
2019-09-07 08:57:52 +00:00
if not os.path.isfile(translationsFile):
2020-04-02 21:35:06 +00:00
systemLanguage = 'en'
translationsFile = baseDir + '/translations/' + \
systemLanguage + '.json'
print('System language: ' + systemLanguage)
httpd.systemLanguage = systemLanguage
httpd.translate = loadJson(translationsFile)
2020-06-12 12:02:04 +00:00
if not httpd.translate:
print('ERROR: no translations loaded from ' + translationsFile)
2020-06-12 21:41:32 +00:00
sys.exit()
2020-04-02 21:35:06 +00:00
2020-10-09 12:15:20 +00:00
# For moderated newswire feeds this is the amount of time allowed
# for voting after the post arrives
httpd.votingTimeMins = votingTimeMins
# on the newswire, whether moderators vote positively for items
# or against them (veto)
httpd.positiveVoting = positiveVoting
2020-10-08 17:49:03 +00:00
# number of votes needed to remove a newswire item from the news timeline
# or if positive voting is anabled to add the item to the news timeline
httpd.newswireVotesThreshold = newswireVotesThreshold
# maximum overall size of an rss/atom feed read by the newswire daemon
# If the feed is too large then this is probably a DoS attempt
httpd.maxNewswireFeedSizeKb = maxNewswireFeedSizeKb
# For each newswire source (account or rss feed)
# this is the maximum number of posts to show for each.
# This avoids one or two sources from dominating the news,
# and also prevents big feeds from slowing down page load times
httpd.maxNewswirePostsPerSource = maxNewswirePostsPerSource
# Show only the date at the bottom of posts, and not the time
httpd.showPublishedDateOnly = showPublishedDateOnly
2020-10-19 16:33:58 +00:00
# maximum number of news articles to mirror
httpd.maxMirroredArticles = maxMirroredArticles
2020-10-21 10:39:09 +00:00
# maximum number of posts in the news timeline/outbox
httpd.maxNewsPosts = maxNewsPosts
# whether or not to allow followers of the news account
httpd.allowNewsFollowers = allowNewsFollowers
# The maximum number of tags per post which can be
# attached to RSS feeds pulled in via the newswire
httpd.maxTags = 32
# maximum number of followers per account
httpd.maxFollowers = maxFollowers
# whether to show an icon for publish on the
# newswire, or a 'Publish' button
httpd.showPublishAsIcon = showPublishAsIcon
# Whether to show the timeline header containing inbox, outbox
# calendar, etc as the full width of the screen or not
httpd.fullWidthTimelineButtonHeader = fullWidthTimelineButtonHeader
2020-10-25 20:38:01 +00:00
# whether to show icons in the header (eg calendar) as buttons
httpd.iconsAsButtons = iconsAsButtons
# whether to show the RSS icon at the top or the bottom of the timeline
httpd.rssIconAtTop = rssIconAtTop
2020-10-26 21:32:08 +00:00
# Whether to show the newswire publish button at the top,
# above the header image
httpd.publishButtonAtTop = publishButtonAtTop
# maximum size of individual RSS feed items, in K
httpd.maxFeedItemSizeKb = maxFeedItemSizeKb
2020-04-02 21:35:06 +00:00
if registration == 'open':
httpd.registration = True
2019-11-13 12:49:40 +00:00
else:
2020-04-02 21:35:06 +00:00
httpd.registration = False
httpd.enableSharedInbox = enableSharedInbox
httpd.outboxThread = {}
httpd.newPostThread = {}
httpd.projectVersion = projectVersion
httpd.authenticatedFetch = authenticatedFetch
2019-09-03 19:30:41 +00:00
# max POST size of 30M
2020-04-02 21:35:06 +00:00
httpd.maxPostLength = 1024 * 1024 * 30
httpd.maxMediaSize = httpd.maxPostLength
2020-02-24 11:50:50 +00:00
# Maximum text length is 32K - enough for a blog post
2020-04-02 21:35:06 +00:00
httpd.maxMessageLength = 32000
2020-02-24 11:50:50 +00:00
# Maximum overall number of posts per box
2020-04-02 21:35:06 +00:00
httpd.maxPostsInBox = 32000
httpd.domain = domain
httpd.port = port
httpd.domainFull = domain
if port:
2020-04-02 21:35:06 +00:00
if port != 80 and port != 443:
if ':' not in domain:
2020-04-02 21:35:06 +00:00
httpd.domainFull = domain + ':' + str(port)
2020-06-20 19:37:44 +00:00
saveDomainQrcode(baseDir, httpPrefix, httpd.domainFull)
2020-04-02 21:35:06 +00:00
httpd.httpPrefix = httpPrefix
httpd.debug = debug
httpd.federationList = fedList.copy()
httpd.baseDir = baseDir
httpd.instanceId = instanceId
httpd.personCache = {}
httpd.cachedWebfingers = {}
2020-06-09 11:03:59 +00:00
httpd.proxyType = proxyType
2020-04-02 21:35:06 +00:00
httpd.session = None
httpd.sessionLastUpdate = 0
httpd.lastGET = 0
httpd.lastPOST = 0
httpd.GETbusy = False
httpd.POSTbusy = False
httpd.receivedMessage = False
httpd.inboxQueue = []
httpd.sendThreads = sendThreads
httpd.postLog = []
2020-04-16 18:25:59 +00:00
httpd.maxQueueLength = 64
2020-04-02 21:35:06 +00:00
httpd.allowDeletion = allowDeletion
httpd.lastLoginTime = 0
httpd.maxReplies = maxReplies
httpd.tokens = {}
httpd.tokensLookup = {}
loadTokens(baseDir, httpd.tokens, httpd.tokensLookup)
httpd.instanceOnlySkillsSearch = instanceOnlySkillsSearch
2019-11-04 10:43:19 +00:00
# contains threads used to send posts to followers
2020-04-02 21:35:06 +00:00
httpd.followersThreads = []
2019-07-11 12:29:31 +00:00
2020-10-29 12:48:58 +00:00
# cache to store css files
httpd.cssCache = {}
2020-04-02 21:35:06 +00:00
if not os.path.isdir(baseDir + '/accounts/inbox@' + domain):
print('Creating shared inbox: inbox@' + domain)
createSharedInbox(baseDir, 'inbox', domain, port, httpPrefix)
2019-07-12 09:52:06 +00:00
2020-10-07 16:01:45 +00:00
if not os.path.isdir(baseDir + '/accounts/news@' + domain):
print('Creating news inbox: news@' + domain)
createNewsInbox(baseDir, domain, port, httpPrefix)
2020-10-13 21:43:16 +00:00
# set the avatar for the news account
themeName = getConfigParam(baseDir, 'theme')
if not themeName:
themeName = 'default'
setNewsAvatar(baseDir,
themeName,
httpPrefix,
domain,
httpd.domainFull)
2020-04-02 21:35:06 +00:00
if not os.path.isdir(baseDir + '/cache'):
os.mkdir(baseDir + '/cache')
if not os.path.isdir(baseDir + '/cache/actors'):
2019-08-20 10:10:33 +00:00
print('Creating actors cache')
2020-04-02 21:35:06 +00:00
os.mkdir(baseDir + '/cache/actors')
if not os.path.isdir(baseDir + '/cache/announce'):
2019-08-20 12:39:59 +00:00
print('Creating announce cache')
2020-04-02 21:35:06 +00:00
os.mkdir(baseDir + '/cache/announce')
if not os.path.isdir(baseDir + '/cache/avatars'):
2019-09-14 17:12:03 +00:00
print('Creating avatars cache')
2020-04-02 21:35:06 +00:00
os.mkdir(baseDir + '/cache/avatars')
2019-08-20 10:10:33 +00:00
2020-04-02 21:35:06 +00:00
archiveDir = baseDir + '/archive'
2019-08-20 11:51:29 +00:00
if not os.path.isdir(archiveDir):
print('Creating archive')
os.mkdir(archiveDir)
2020-03-22 21:16:02 +00:00
2019-08-20 10:28:05 +00:00
print('Creating cache expiry thread')
2020-04-02 21:35:06 +00:00
httpd.thrCache = \
threadWithTrace(target=expireCache,
args=(baseDir, httpd.personCache,
httpd.httpPrefix,
archiveDir,
httpd.maxPostsInBox), daemon=True)
2019-08-20 10:28:05 +00:00
httpd.thrCache.start()
2019-10-16 18:19:18 +00:00
print('Creating posts queue')
2020-04-02 21:35:06 +00:00
httpd.thrPostsQueue = \
threadWithTrace(target=runPostsQueue,
args=(baseDir, httpd.sendThreads, debug), daemon=True)
2020-03-22 21:16:02 +00:00
if not unitTest:
2020-04-02 21:35:06 +00:00
httpd.thrPostsWatchdog = \
threadWithTrace(target=runPostsWatchdog,
args=(projectVersion, httpd), daemon=True)
2019-10-16 18:19:18 +00:00
httpd.thrPostsWatchdog.start()
else:
httpd.thrPostsQueue.start()
2019-10-17 09:58:30 +00:00
print('Creating expire thread for shared items')
2020-04-02 21:35:06 +00:00
httpd.thrSharesExpire = \
threadWithTrace(target=runSharesExpire,
args=(__version__, baseDir), daemon=True)
2020-03-22 21:16:02 +00:00
if not unitTest:
2020-04-02 21:35:06 +00:00
httpd.thrSharesExpireWatchdog = \
threadWithTrace(target=runSharesExpireWatchdog,
args=(projectVersion, httpd), daemon=True)
2019-10-17 09:58:30 +00:00
httpd.thrSharesExpireWatchdog.start()
else:
httpd.thrSharesExpire.start()
2020-04-02 21:35:06 +00:00
httpd.recentPostsCache = {}
httpd.maxRecentPosts = maxRecentPosts
httpd.iconsCache = {}
2020-05-24 22:10:14 +00:00
httpd.fontsCache = {}
2019-07-12 09:52:06 +00:00
print('Creating inbox queue')
2020-04-02 21:35:06 +00:00
httpd.thrInboxQueue = \
threadWithTrace(target=runInboxQueue,
args=(httpd.recentPostsCache, httpd.maxRecentPosts,
projectVersion,
baseDir, httpPrefix, httpd.sendThreads,
httpd.postLog, httpd.cachedWebfingers,
httpd.personCache, httpd.inboxQueue,
2020-06-09 11:03:59 +00:00
domain, onionDomain, i2pDomain, port, proxyType,
2020-04-02 21:35:06 +00:00
httpd.federationList,
2020-09-27 19:27:24 +00:00
maxReplies,
2020-04-02 21:35:06 +00:00
domainMaxPostsPerDay, accountMaxPostsPerDay,
allowDeletion, debug, maxMentions, maxEmoji,
httpd.translate, unitTest,
httpd.YTReplacementDomain,
httpd.showPublishedDateOnly,
httpd.allowNewsFollowers,
httpd.maxFollowers), daemon=True)
2020-10-04 20:21:50 +00:00
2020-01-12 20:13:44 +00:00
print('Creating scheduled post thread')
2020-04-02 21:35:06 +00:00
httpd.thrPostSchedule = \
threadWithTrace(target=runPostSchedule,
args=(baseDir, httpd, 20), daemon=True)
2020-04-27 09:41:38 +00:00
2020-10-04 20:21:50 +00:00
print('Creating newswire thread')
httpd.thrNewswireDaemon = \
threadWithTrace(target=runNewswireDaemon,
args=(baseDir, httpd,
httpPrefix, domain, port,
httpd.translate), daemon=True)
2020-10-04 20:21:50 +00:00
2020-04-27 09:41:38 +00:00
# flags used when restarting the inbox queue
2020-05-02 10:19:24 +00:00
httpd.restartInboxQueueInProgress = False
httpd.restartInboxQueue = False
2020-04-27 09:41:38 +00:00
2020-03-22 21:16:02 +00:00
if not unitTest:
2020-01-12 20:13:44 +00:00
print('Creating inbox queue watchdog')
2020-04-02 21:35:06 +00:00
httpd.thrWatchdog = \
threadWithTrace(target=runInboxQueueWatchdog,
args=(projectVersion, httpd), daemon=True)
2019-09-05 12:43:59 +00:00
httpd.thrWatchdog.start()
2020-01-12 20:13:44 +00:00
print('Creating scheduled post watchdog')
2020-04-02 21:35:06 +00:00
httpd.thrWatchdogSchedule = \
threadWithTrace(target=runPostScheduleWatchdog,
args=(projectVersion, httpd), daemon=True)
2020-01-12 20:13:44 +00:00
httpd.thrWatchdogSchedule.start()
2020-10-04 20:21:50 +00:00
print('Creating newswire watchdog')
httpd.thrNewswireWatchdog = \
threadWithTrace(target=runNewswireWatchdog,
args=(projectVersion, httpd), daemon=True)
httpd.thrNewswireWatchdog.start()
2019-09-05 12:43:59 +00:00
else:
httpd.thrInboxQueue.start()
2020-01-12 20:13:44 +00:00
httpd.thrPostSchedule.start()
2019-09-05 12:43:59 +00:00
2019-07-13 09:37:17 +00:00
if clientToServer:
2020-04-02 21:35:06 +00:00
print('Running ActivityPub client on ' +
domain + ' port ' + str(proxyPort))
2019-07-13 09:37:17 +00:00
else:
2020-04-02 21:35:06 +00:00
print('Running ActivityPub server on ' +
domain + ' port ' + str(proxyPort))
2019-06-28 18:55:29 +00:00
httpd.serve_forever()