Merge branch 'main' of ssh://code.freedombone.net:2222/bashrc/epicyon into main

merge-requests/30/head
Bob Mottram 2020-12-22 23:48:43 +00:00
commit 0aed47f090
51 changed files with 2621 additions and 3067 deletions

View File

@ -15,10 +15,10 @@ from utils import domainPermitted
from utils import followPerson
def createAcceptReject(baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
toUrl: str, ccUrl: str, httpPrefix: str,
objectJson: {}, acceptType: str) -> {}:
def _createAcceptReject(baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
toUrl: str, ccUrl: str, httpPrefix: str,
objectJson: {}, acceptType: str) -> {}:
"""Accepts or rejects something (eg. a follow request or offer)
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
and ccUrl might be a specific person favorited or repeated and
@ -51,24 +51,24 @@ def createAccept(baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
toUrl: str, ccUrl: str, httpPrefix: str,
objectJson: {}) -> {}:
return createAcceptReject(baseDir, federationList,
nickname, domain, port,
toUrl, ccUrl, httpPrefix,
objectJson, 'Accept')
return _createAcceptReject(baseDir, federationList,
nickname, domain, port,
toUrl, ccUrl, httpPrefix,
objectJson, 'Accept')
def createReject(baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
toUrl: str, ccUrl: str, httpPrefix: str,
objectJson: {}) -> {}:
return createAcceptReject(baseDir, federationList,
nickname, domain, port,
toUrl, ccUrl,
httpPrefix, objectJson, 'Reject')
return _createAcceptReject(baseDir, federationList,
nickname, domain, port,
toUrl, ccUrl,
httpPrefix, objectJson, 'Reject')
def acceptFollow(baseDir: str, domain: str, messageJson: {},
federationList: [], debug: bool) -> None:
def _acceptFollow(baseDir: str, domain: str, messageJson: {},
federationList: [], debug: bool) -> None:
"""Receiving a follow Accept activity
"""
if not messageJson.get('object'):
@ -204,7 +204,7 @@ def receiveAcceptReject(session, baseDir: str,
' does not contain a nickname. ' +
'Assuming single user instance.')
# receive follow accept
acceptFollow(baseDir, domain, messageJson, federationList, debug)
_acceptFollow(baseDir, domain, messageJson, federationList, debug)
if debug:
print('DEBUG: Uh, ' + messageJson['type'] + ', I guess')
return True

View File

@ -183,135 +183,6 @@ def announcePublic(session, baseDir: str, federationList: [],
debug, projectVersion)
def repeatPost(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
announceNickname: str, announceDomain: str,
announcePort: int, announceHttpsPrefix: str,
announceStatusNumber: int, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
"""Repeats a given status post
"""
announcedDomain = getFullDomain(announceDomain, announcePort)
objectUrl = announceHttpsPrefix + '://' + announcedDomain + '/users/' + \
announceNickname + '/statuses/' + str(announceStatusNumber)
return announcePublic(session, baseDir, federationList,
nickname, domain, port, httpPrefix,
objectUrl, clientToServer,
sendThreads, postLog,
personCache, cachedWebfingers,
debug, projectVersion)
def undoAnnounce(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
toUrl: str, ccUrl: str, httpPrefix: str,
objectUrl: str, saveToFile: bool,
clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Undoes an announce message
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
and ccUrl might be a specific person whose post was repeated and the
objectUrl is typically the url of the message which was repeated,
corresponding to url or atomUri in createPostBase
"""
if not urlPermitted(objectUrl, federationList):
return None
if ':' in domain:
domain = domain.split(':')[0]
fullDomain = getFullDomain(domain, port)
newUndoAnnounce = {
"@context": "https://www.w3.org/ns/activitystreams",
'actor': httpPrefix+'://'+fullDomain+'/users/'+nickname,
'type': 'Undo',
'cc': [],
'to': [toUrl],
'object': {
'actor': httpPrefix+'://'+fullDomain+'/users/'+nickname,
'cc': [],
'object': objectUrl,
'to': [toUrl],
'type': 'Announce'
}
}
if ccUrl:
if len(ccUrl) > 0:
newUndoAnnounce['object']['cc'] = [ccUrl]
announceNickname = None
announceDomain = None
announcePort = None
if '/users/' in objectUrl or \
'/accounts/' in objectUrl or \
'/channel/' in objectUrl or \
'/profile/' in objectUrl:
announceNickname = getNicknameFromActor(objectUrl)
announceDomain, announcePort = getDomainFromActor(objectUrl)
if announceNickname and announceDomain:
sendSignedJson(newUndoAnnounce, session, baseDir,
nickname, domain, port,
announceNickname, announceDomain, announcePort,
'https://www.w3.org/ns/activitystreams#Public',
httpPrefix, True, clientToServer, federationList,
sendThreads, postLog, cachedWebfingers,
personCache, debug)
return newUndoAnnounce
def undoAnnouncePublic(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
objectUrl: str, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Undoes a public announcement
"""
fromDomain = getFullDomain(domain, port)
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
ccUrl = httpPrefix + '://' + fromDomain + '/users/' + nickname + \
'/followers'
return undoAnnounce(session, baseDir, federationList,
nickname, domain, port,
toUrl, ccUrl, httpPrefix,
objectUrl, True, clientToServer,
sendThreads, postLog,
personCache, cachedWebfingers,
debug)
def undoRepeatPost(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
announceNickname: str, announceDomain: str,
announcePort: int, announceHttpsPrefix: str,
announceStatusNumber: int, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Undoes a status post repeat
"""
announcedDomain = getFullDomain(announceDomain, announcePort)
objectUrl = announceHttpsPrefix + '://' + announcedDomain + '/users/' + \
announceNickname + '/statuses/' + str(announceStatusNumber)
return undoAnnouncePublic(session, baseDir, federationList,
nickname, domain, port, httpPrefix,
objectUrl, clientToServer,
sendThreads, postLog,
personCache, cachedWebfingers,
debug)
def sendAnnounceViaServer(baseDir: str, session,
fromNickname: str, password: str,
fromDomain: str, fromPort: int,

12
auth.py
View File

@ -14,7 +14,7 @@ import secrets
from utils import isSystemAccount
def hashPassword(password: str) -> str:
def _hashPassword(password: str) -> str:
"""Hash a password for storing
"""
salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')
@ -25,7 +25,7 @@ def hashPassword(password: str) -> str:
return (salt + pwdhash).decode('ascii')
def getPasswordHash(salt: str, providedPassword: str) -> str:
def _getPasswordHash(salt: str, providedPassword: str) -> str:
"""Returns the hash of a password
"""
pwdhash = hashlib.pbkdf2_hmac('sha512',
@ -57,7 +57,7 @@ def constantTimeStringCheck(string1: str, string2: str) -> bool:
return matched
def verifyPassword(storedPassword: str, providedPassword: str) -> bool:
def _verifyPassword(storedPassword: str, providedPassword: str) -> bool:
"""Verify a stored password against one provided by user
"""
if not storedPassword:
@ -66,7 +66,7 @@ def verifyPassword(storedPassword: str, providedPassword: str) -> bool:
return False
salt = storedPassword[:64]
storedPassword = storedPassword[64:]
pwHash = getPasswordHash(salt, providedPassword)
pwHash = _getPasswordHash(salt, providedPassword)
return constantTimeStringCheck(pwHash, storedPassword)
@ -137,7 +137,7 @@ def authorizeBasic(baseDir: str, path: str, authHeader: str,
if line.startswith(nickname+':'):
storedPassword = \
line.split(':')[1].replace('\n', '').replace('\r', '')
success = verifyPassword(storedPassword, providedPassword)
success = _verifyPassword(storedPassword, providedPassword)
if not success:
if debug:
print('DEBUG: Password check failed for ' + nickname)
@ -159,7 +159,7 @@ def storeBasicCredentials(baseDir: str, nickname: str, password: str) -> bool:
os.mkdir(baseDir + '/accounts')
passwordFile = baseDir + '/accounts/passwords'
storeStr = nickname + ':' + hashPassword(password)
storeStr = nickname + ':' + _hashPassword(password)
if os.path.isfile(passwordFile):
if nickname + ':' in open(passwordFile).read():
with open(passwordFile, "r") as fin:

148
blog.py
View File

@ -26,9 +26,9 @@ from newswire import rss2Header
from newswire import rss2Footer
def noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postId: str, depth=0) -> int:
def _noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postId: str, depth=0) -> int:
"""Returns the number of replies on the post
This is recursive, so can handle replies to replies
"""
@ -66,9 +66,10 @@ def noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
replyPostId = replyPostId.replace('.json', '')
if locatePost(baseDir, nickname, domain, replyPostId):
replyPostId = replyPostId.replace('.replies', '')
replies += 1 + noOfBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
replyPostId, depth+1)
replies += \
1 + _noOfBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
replyPostId, depth+1)
else:
# remove post which no longer exists
removals.append(replyPostId)
@ -86,9 +87,9 @@ def noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
return replies
def getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postId: str, depth=0) -> str:
def _getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postId: str, depth=0) -> str:
"""Returns a string containing html blog posts
"""
if depth > 4:
@ -136,9 +137,9 @@ def getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
continue
with open(postFilename, "r") as postFile:
repliesStr += postFile.read() + '\n'
rply = getBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
replyPostId, depth+1)
rply = _getBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
replyPostId, depth+1)
if rply not in repliesStr:
repliesStr += rply
@ -152,12 +153,12 @@ def getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
return ''
def htmlBlogPostContent(authorized: bool,
baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postJsonObject: {},
handle: str, restrictToDomain: bool,
blogSeparator='<hr>') -> str:
def _htmlBlogPostContent(authorized: bool,
baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postJsonObject: {},
handle: str, restrictToDomain: bool,
blogSeparator='<hr>') -> str:
"""Returns the content for a single blog post
"""
linkedAuthor = False
@ -269,9 +270,9 @@ def htmlBlogPostContent(authorized: bool,
'/users/' + nickname + '">' + translate['About the author'] + \
'</a></p>\n'
replies = noOfBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
postJsonObject['object']['id'])
replies = _noOfBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
postJsonObject['object']['id'])
# separator between blogs should be centered
if '<center>' not in blogSeparator:
@ -288,23 +289,23 @@ def htmlBlogPostContent(authorized: bool,
else:
blogStr += blogSeparator + '<h1>' + translate['Replies'] + '</h1>\n'
if not titleStr:
blogStr += getBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
postJsonObject['object']['id'])
blogStr += _getBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
postJsonObject['object']['id'])
else:
blogRepliesStr = getBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
postJsonObject['object']['id'])
blogRepliesStr = _getBlogReplies(baseDir, httpPrefix, translate,
nickname, domain, domainFull,
postJsonObject['object']['id'])
blogStr += blogRepliesStr.replace('>' + titleStr + '<', '')
return blogStr
def htmlBlogPostRSS2(authorized: bool,
baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postJsonObject: {},
handle: str, restrictToDomain: bool) -> str:
def _htmlBlogPostRSS2(authorized: bool,
baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postJsonObject: {},
handle: str, restrictToDomain: bool) -> str:
"""Returns the RSS version 2 feed for a single blog post
"""
rssStr = ''
@ -331,11 +332,11 @@ def htmlBlogPostRSS2(authorized: bool,
return rssStr
def htmlBlogPostRSS3(authorized: bool,
baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postJsonObject: {},
handle: str, restrictToDomain: bool) -> str:
def _htmlBlogPostRSS3(authorized: bool,
baseDir: str, httpPrefix: str, translate: {},
nickname: str, domain: str, domainFull: str,
postJsonObject: {},
handle: str, restrictToDomain: bool) -> str:
"""Returns the RSS version 3 feed for a single blog post
"""
rssStr = ''
@ -359,7 +360,7 @@ def htmlBlogPostRSS3(authorized: bool,
return rssStr
def htmlBlogRemoveCwButton(blogStr: str, translate: {}) -> str:
def _htmlBlogRemoveCwButton(blogStr: str, translate: {}) -> str:
"""Removes the CW button from blog posts, where the
summary field is instead used as the blog title
"""
@ -383,13 +384,13 @@ def htmlBlogPost(authorized: bool,
if os.path.isfile(baseDir + '/blog.css'):
cssFilename = baseDir + '/blog.css'
blogStr = htmlHeaderWithExternalStyle(cssFilename)
htmlBlogRemoveCwButton(blogStr, translate)
_htmlBlogRemoveCwButton(blogStr, translate)
blogStr += htmlBlogPostContent(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, postJsonObject,
None, False)
blogStr += _htmlBlogPostContent(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, postJsonObject,
None, False)
# show rss links
blogStr += '<p class="rssfeed">'
@ -428,7 +429,7 @@ def htmlBlogPage(authorized: bool, session,
if os.path.isfile(baseDir + '/epicyon.css'):
cssFilename = baseDir + '/epicyon.css'
blogStr = htmlHeaderWithExternalStyle(cssFilename)
htmlBlogRemoveCwButton(blogStr, translate)
_htmlBlogRemoveCwButton(blogStr, translate)
blogsIndex = baseDir + '/accounts/' + \
nickname + '@' + domain + '/tlblogs.index'
@ -472,11 +473,11 @@ def htmlBlogPage(authorized: bool, session,
if item['type'] != 'Create':
continue
blogStr += htmlBlogPostContent(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, item,
None, True)
blogStr += _htmlBlogPostContent(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, item,
None, True)
if len(timelineJson['orderedItems']) >= noOfItems:
blogStr += navigateStr
@ -544,11 +545,11 @@ def htmlBlogPageRSS2(authorized: bool, session,
continue
blogRSS2 += \
htmlBlogPostRSS2(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, item,
None, True)
_htmlBlogPostRSS2(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, item,
None, True)
if includeHeader:
return blogRSS2 + rss2Footer()
@ -590,35 +591,16 @@ def htmlBlogPageRSS3(authorized: bool, session,
continue
blogRSS3 += \
htmlBlogPostRSS3(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, item,
None, True)
_htmlBlogPostRSS3(authorized, baseDir,
httpPrefix, translate,
nickname, domain,
domainFull, item,
None, True)
return blogRSS3
def getBlogIndexesForAccounts(baseDir: str) -> {}:
""" Get the index files for blogs for each account
and add them to a dict
"""
blogIndexes = {}
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
for acct in dirs:
if '@' not in acct:
continue
if 'inbox@' in acct:
continue
accountDir = os.path.join(baseDir + '/accounts', acct)
blogsIndex = accountDir + '/tlblogs.index'
if os.path.isfile(blogsIndex):
blogIndexes[acct] = blogsIndex
break
return blogIndexes
def noOfBlogAccounts(baseDir: str) -> int:
def _noOfBlogAccounts(baseDir: str) -> int:
"""Returns the number of blog accounts
"""
ctr = 0
@ -636,7 +618,7 @@ def noOfBlogAccounts(baseDir: str) -> int:
return ctr
def singleBlogAccountNickname(baseDir: str) -> str:
def _singleBlogAccountNickname(baseDir: str) -> str:
"""Returns the nickname of a single blog account
"""
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
@ -666,8 +648,8 @@ def htmlBlogView(authorized: bool,
cssFilename = baseDir + '/epicyon.css'
blogStr = htmlHeaderWithExternalStyle(cssFilename)
if noOfBlogAccounts(baseDir) <= 1:
nickname = singleBlogAccountNickname(baseDir)
if _noOfBlogAccounts(baseDir) <= 1:
nickname = _singleBlogAccountNickname(baseDir)
if nickname:
return htmlBlogPage(authorized, session,
baseDir, httpPrefix, translate,

View File

@ -39,17 +39,7 @@ alphabet = \
alphabet_values = dict(zip(alphabet, range(len(alphabet))))
def base83_decode(base83_str):
"""
Decodes a base83 string, as used in blurhash, to an integer.
"""
value = 0
for base83_char in base83_str:
value = value * 83 + alphabet_values[base83_char]
return value
def base83_encode(value, length):
def _base83_encode(value, length):
"""
Decodes an integer to a base83 string, as used in blurhash.
@ -67,7 +57,7 @@ def base83_encode(value, length):
return result
def srgb_to_linear(value):
def _srgb_to_linear(value):
"""
srgb 0-255 integer to linear 0.0-1.0 floating point conversion.
"""
@ -77,14 +67,14 @@ def srgb_to_linear(value):
return math.pow((value + 0.055) / 1.055, 2.4)
def sign_pow(value, exp):
def _sign_pow(value, exp):
"""
Sign-preserving exponentiation.
"""
return math.copysign(math.pow(abs(value), exp), value)
def linear_to_srgb(value):
def _linear_to_srgb(value):
"""
linear 0.0-1.0 floating point to srgb 0-255 integer conversion.
"""
@ -94,101 +84,6 @@ def linear_to_srgb(value):
return int((1.055 * math.pow(value, 1 / 2.4) - 0.055) * 255 + 0.5)
def blurhash_components(blurhash):
"""
Decodes and returns the number of x and y components in the given blurhash.
"""
if len(blurhash) < 6:
raise ValueError("BlurHash must be at least 6 characters long.")
# Decode metadata
size_info = base83_decode(blurhash[0])
size_y = int(size_info / 9) + 1
size_x = (size_info % 9) + 1
return size_x, size_y
def blurhash_decode(blurhash, width, height, punch=1.0, linear=False):
"""
Decodes the given blurhash to an image of the specified size.
Returns the resulting image a list of lists of 3-value sRGB 8 bit integer
lists. Set linear to True if you would prefer to get linear floating point
RGB back.
The punch parameter can be used to de- or increase the contrast of the
resulting image.
As per the original implementation it is suggested to only decode
to a relatively small size and then scale the result up, as it
basically looks the same anyways.
"""
if len(blurhash) < 6:
raise ValueError("BlurHash must be at least 6 characters long.")
# Decode metadata
size_info = base83_decode(blurhash[0])
size_y = int(size_info / 9) + 1
size_x = (size_info % 9) + 1
quant_max_value = base83_decode(blurhash[1])
real_max_value = (float(quant_max_value + 1) / 166.0) * punch
# Make sure we at least have the right number of characters
if len(blurhash) != 4 + 2 * size_x * size_y:
raise ValueError("Invalid BlurHash length.")
# Decode DC component
dc_value = base83_decode(blurhash[2:6])
colours = [(
srgb_to_linear(dc_value >> 16),
srgb_to_linear((dc_value >> 8) & 255),
srgb_to_linear(dc_value & 255)
)]
# Decode AC components
for component in range(1, size_x * size_y):
ac_value = base83_decode(blurhash[4+component*2:4+(component+1)*2])
colours.append((
sign_pow((float(int(ac_value / (19 * 19))) - 9.0)
/ 9.0, 2.0) * real_max_value,
sign_pow((float(int(ac_value / 19) % 19) - 9.0)
/ 9.0, 2.0) * real_max_value,
sign_pow((float(ac_value % 19) - 9.0)
/ 9.0, 2.0) * real_max_value
))
# Return image RGB values, as a list of lists of lists,
# consumable by something like numpy or PIL.
pixels = []
for y in range(height):
pixel_row = []
for x in range(width):
pixel = [0.0, 0.0, 0.0]
for j in range(size_y):
for i in range(size_x):
basis = \
math.cos(math.pi * float(x) * float(i) /
float(width)) * \
math.cos(math.pi * float(y) * float(j) / float(height))
colour = colours[i + j * size_x]
pixel[0] += colour[0] * basis
pixel[1] += colour[1] * basis
pixel[2] += colour[2] * basis
if linear is False:
pixel_row.append([
linear_to_srgb(pixel[0]),
linear_to_srgb(pixel[1]),
linear_to_srgb(pixel[2]),
])
else:
pixel_row.append(pixel)
pixels.append(pixel_row)
return pixels
def blurhash_encode(image, components_x=4, components_y=4, linear=False):
"""
Calculates the blurhash for an image using the given x and y
@ -218,9 +113,9 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
image_linear_line = []
for x in range(int(width)):
image_linear_line.append([
srgb_to_linear(image[y][x][0]),
srgb_to_linear(image[y][x][1]),
srgb_to_linear(image[y][x][2])
_srgb_to_linear(image[y][x][0]),
_srgb_to_linear(image[y][x][1]),
_srgb_to_linear(image[y][x][2])
])
image_linear.append(image_linear_line)
else:
@ -254,9 +149,9 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
abs(component[1]), abs(component[2]))
# Encode components
dc_value = (linear_to_srgb(components[0][0]) << 16) + \
(linear_to_srgb(components[0][1]) << 8) + \
linear_to_srgb(components[0][2])
dc_value = (_linear_to_srgb(components[0][0]) << 16) + \
(_linear_to_srgb(components[0][1]) << 8) + \
_linear_to_srgb(components[0][2])
quant_max_ac_component = int(max(0, min(82,
math.floor(max_ac_component *
@ -268,9 +163,9 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
r2 = r / ac_component_norm_factor
g2 = g / ac_component_norm_factor
b2 = b / ac_component_norm_factor
r3 = math.floor(sign_pow(r2, 0.5) * 9.0 + 9.5)
g3 = math.floor(sign_pow(g2, 0.5) * 9.0 + 9.5)
b3 = math.floor(sign_pow(b2, 0.5) * 9.0 + 9.5)
r3 = math.floor(_sign_pow(r2, 0.5) * 9.0 + 9.5)
g3 = math.floor(_sign_pow(g2, 0.5) * 9.0 + 9.5)
b3 = math.floor(_sign_pow(b2, 0.5) * 9.0 + 9.5)
ac_values.append(
int(max(0.0, min(18.0, r3))) * 19 * 19 +
int(max(0.0, min(18.0, g3))) * 19 +
@ -279,10 +174,11 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
# Build final blurhash
blurhash = ""
blurhash += base83_encode((components_x - 1) + (components_y - 1) * 9, 1)
blurhash += base83_encode(quant_max_ac_component, 1)
blurhash += base83_encode(dc_value, 4)
blurhashValue = (components_x - 1) + (components_y - 1) * 9
blurhash += _base83_encode(blurhashValue, 1)
blurhash += _base83_encode(quant_max_ac_component, 1)
blurhash += _base83_encode(dc_value, 4)
for ac_value in ac_values:
blurhash += base83_encode(ac_value, 2)
blurhash += _base83_encode(ac_value, 2)
return blurhash

View File

@ -18,10 +18,6 @@ from utils import locatePost
from utils import getCachedPostFilename
from utils import loadJson
from utils import saveJson
from session import postJson
from webfinger import webfingerHandle
from auth import createBasicAuthHeader
from posts import getPersonBox
def undoBookmarksCollectionEntry(recentPostsCache: {},
@ -111,7 +107,7 @@ def undoBookmarksCollectionEntry(recentPostsCache: {},
def bookmarkedByPerson(postJsonObject: {}, nickname: str, domain: str) -> bool:
"""Returns True if the given post is bookmarked by the given person
"""
if noOfBookmarks(postJsonObject) == 0:
if _noOfBookmarks(postJsonObject) == 0:
return False
actorMatch = domain + '/users/' + nickname
for item in postJsonObject['object']['bookmarks']['items']:
@ -120,7 +116,7 @@ def bookmarkedByPerson(postJsonObject: {}, nickname: str, domain: str) -> bool:
return False
def noOfBookmarks(postJsonObject: {}) -> int:
def _noOfBookmarks(postJsonObject: {}) -> int:
"""Returns the number of bookmarks ona given post
"""
if not postJsonObject.get('object'):
@ -283,32 +279,6 @@ def bookmark(recentPostsCache: {},
return newBookmarkJson
def bookmarkPost(recentPostsCache: {},
session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
bookmarkNickname: str, bookmarkedomain: str,
bookmarkPort: int,
ccList: [],
bookmarkStatusNumber: int, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
"""Bookmarks a given status post. This is only used by unit tests
"""
bookmarkedomain = getFullDomain(bookmarkedomain, bookmarkPort)
actorBookmarked = httpPrefix + '://' + bookmarkedomain + \
'/users/' + bookmarkNickname
objectUrl = actorBookmarked + '/statuses/' + str(bookmarkStatusNumber)
return bookmark(recentPostsCache,
session, baseDir, federationList, nickname, domain, port,
ccList, httpPrefix, objectUrl, actorBookmarked,
clientToServer,
sendThreads, postLog, personCache, cachedWebfingers,
debug, projectVersion)
def undoBookmark(recentPostsCache: {},
session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
@ -375,180 +345,6 @@ def undoBookmark(recentPostsCache: {},
return newUndoBookmarkJson
def undoBookmarkPost(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
bookmarkNickname: str, bookmarkedomain: str,
bookmarkPort: int, ccList: [],
bookmarkStatusNumber: int, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Removes a bookmarked post
"""
bookmarkedomain = getFullDomain(bookmarkedomain, bookmarkPort)
objectUrl = httpPrefix + '://' + bookmarkedomain + \
'/users/' + bookmarkNickname + \
'/statuses/' + str(bookmarkStatusNumber)
return undoBookmark(session, baseDir, federationList,
nickname, domain, port,
ccList, httpPrefix, objectUrl, clientToServer,
sendThreads, postLog, personCache,
cachedWebfingers, debug)
def sendBookmarkViaServer(baseDir: str, session,
fromNickname: str, password: str,
fromDomain: str, fromPort: int,
httpPrefix: str, bookmarkUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
"""Creates a bookmark via c2s
"""
if not session:
print('WARN: No session for sendBookmarkViaServer')
return 6
fromDomainFull = getFullDomain(fromDomain, fromPort)
newBookmarkJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Bookmark',
'actor': httpPrefix+'://'+fromDomainFull+'/users/'+fromNickname,
'object': bookmarkUrl
}
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
fromDomain, projectVersion)
if not wfRequest:
if debug:
print('DEBUG: announce webfinger failed for ' + handle)
return 1
if not isinstance(wfRequest, dict):
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
str(wfRequest))
return 1
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
projectVersion, httpPrefix, fromNickname,
fromDomain, postToBox, 72483)
if not inboxUrl:
if debug:
print('DEBUG: No ' + postToBox + ' was found for ' + handle)
return 3
if not fromPersonId:
if debug:
print('DEBUG: No actor was found for ' + handle)
return 4
authHeader = createBasicAuthHeader(fromNickname, password)
headers = {
'host': fromDomain,
'Content-type': 'application/json',
'Authorization': authHeader
}
postResult = postJson(session, newBookmarkJson, [],
inboxUrl, headers)
if not postResult:
if debug:
print('DEBUG: POST announce failed for c2s to ' + inboxUrl)
return 5
if debug:
print('DEBUG: c2s POST bookmark success')
return newBookmarkJson
def sendUndoBookmarkViaServer(baseDir: str, session,
fromNickname: str, password: str,
fromDomain: str, fromPort: int,
httpPrefix: str, bookmarkUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
"""Undo a bookmark via c2s
"""
if not session:
print('WARN: No session for sendUndoBookmarkViaServer')
return 6
fromDomainFull = getFullDomain(fromDomain, fromPort)
newUndoBookmarkJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Undo',
'actor': httpPrefix+'://'+fromDomainFull+'/users/'+fromNickname,
'object': {
'type': 'Bookmark',
'actor': httpPrefix+'://'+fromDomainFull+'/users/'+fromNickname,
'object': bookmarkUrl
}
}
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
fromDomain, projectVersion)
if not wfRequest:
if debug:
print('DEBUG: announce webfinger failed for ' + handle)
return 1
if not isinstance(wfRequest, dict):
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
str(wfRequest))
return 1
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
projectVersion, httpPrefix, fromNickname,
fromDomain, postToBox, 72528)
if not inboxUrl:
if debug:
print('DEBUG: No ' + postToBox + ' was found for ' + handle)
return 3
if not fromPersonId:
if debug:
print('DEBUG: No actor was found for ' + handle)
return 4
authHeader = createBasicAuthHeader(fromNickname, password)
headers = {
'host': fromDomain,
'Content-type': 'application/json',
'Authorization': authHeader
}
postResult = postJson(session, newUndoBookmarkJson, [],
inboxUrl, headers)
if not postResult:
if debug:
print('DEBUG: POST announce failed for c2s to ' + inboxUrl)
return 5
if debug:
print('DEBUG: c2s POST undo bookmark success')
return newUndoBookmarkJson
def outboxBookmark(recentPostsCache: {},
baseDir: str, httpPrefix: str,
nickname: str, domain: str, port: int,

184
categories.py 100644
View File

@ -0,0 +1,184 @@
__filename__ = "categories.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.1.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__status__ = "Production"
import os
import datetime
def getHashtagCategory(baseDir: str, hashtag: str) -> str:
"""Returns the category for the hashtag
"""
categoryFilename = baseDir + '/tags/' + hashtag + '.category'
if not os.path.isfile(categoryFilename):
categoryFilename = baseDir + '/tags/' + hashtag.title() + '.category'
if not os.path.isfile(categoryFilename):
categoryFilename = \
baseDir + '/tags/' + hashtag.upper() + '.category'
if not os.path.isfile(categoryFilename):
return ''
with open(categoryFilename, 'r') as fp:
categoryStr = fp.read()
if categoryStr:
return categoryStr
return ''
def getHashtagCategories(baseDir: str, recent=False, category=None) -> None:
"""Returns a dictionary containing hashtag categories
"""
hashtagCategories = {}
if recent:
currTime = datetime.datetime.utcnow()
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
recently = daysSinceEpoch - 1
for subdir, dirs, files in os.walk(baseDir + '/tags'):
for f in files:
if not f.endswith('.category'):
continue
categoryFilename = os.path.join(baseDir + '/tags', f)
if not os.path.isfile(categoryFilename):
continue
hashtag = f.split('.')[0]
with open(categoryFilename, 'r') as fp:
categoryStr = fp.read()
if not categoryStr:
continue
if category:
# only return a dictionary for a specific category
if categoryStr != category:
continue
if recent:
tagsFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(tagsFilename):
continue
modTimesinceEpoc = \
os.path.getmtime(tagsFilename)
lastModifiedDate = \
datetime.datetime.fromtimestamp(modTimesinceEpoc)
fileDaysSinceEpoch = \
(lastModifiedDate -
datetime.datetime(1970, 1, 1)).days
if fileDaysSinceEpoch < recently:
continue
if not hashtagCategories.get(categoryStr):
hashtagCategories[categoryStr] = [hashtag]
else:
if hashtag not in hashtagCategories[categoryStr]:
hashtagCategories[categoryStr].append(hashtag)
break
return hashtagCategories
def _updateHashtagCategories(baseDir: str) -> None:
"""Regenerates the list of hashtag categories
"""
categoryListFilename = baseDir + '/accounts/categoryList.txt'
hashtagCategories = getHashtagCategories(baseDir)
if not hashtagCategories:
if os.path.isfile(categoryListFilename):
os.remove(categoryListFilename)
return
categoryList = []
for categoryStr, hashtagList in hashtagCategories.items():
categoryList.append(categoryStr)
categoryList.sort()
categoryListStr = ''
for categoryStr in categoryList:
categoryListStr += categoryStr + '\n'
# save a list of available categories for quick lookup
with open(categoryListFilename, 'w+') as fp:
fp.write(categoryListStr)
def _validHashtagCategory(category: str) -> bool:
"""Returns true if the category name is valid
"""
if not category:
return False
invalidChars = (',', ' ', '<', ';', '\\')
for ch in invalidChars:
if ch in category:
return False
# too long
if len(category) > 40:
return False
return True
def setHashtagCategory(baseDir: str, hashtag: str, category: str,
force=False) -> bool:
"""Sets the category for the hashtag
"""
if not _validHashtagCategory(category):
return False
if not force:
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(hashtagFilename):
hashtag = hashtag.title()
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(hashtagFilename):
hashtag = hashtag.upper()
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(hashtagFilename):
return False
if not os.path.isdir(baseDir + '/tags'):
os.mkdir(baseDir + '/tags')
categoryFilename = baseDir + '/tags/' + hashtag + '.category'
if force:
# don't overwrite any existing categories
if os.path.isfile(categoryFilename):
return False
with open(categoryFilename, 'w+') as fp:
fp.write(category)
_updateHashtagCategories(baseDir)
return True
return False
def guessHashtagCategory(tagName: str, hashtagCategories: {}) -> str:
"""Tries to guess a category for the given hashtag.
This works by trying to find the longest similar hashtag
"""
categoryMatched = ''
tagMatchedLen = 0
for categoryStr, hashtagList in hashtagCategories.items():
for hashtag in hashtagList:
if len(hashtag) < 3:
# avoid matching very small strings which often
# lead to spurious categories
continue
if hashtag not in tagName:
if tagName not in hashtag:
continue
if not categoryMatched:
tagMatchedLen = len(hashtag)
categoryMatched = categoryStr
else:
# match the longest tag
if len(hashtag) > tagMatchedLen:
categoryMatched = categoryStr
if not categoryMatched:
return
return categoryMatched

View File

@ -33,7 +33,7 @@ def removeHtmlTag(htmlStr: str, tag: str) -> str:
return htmlStr
def removeQuotesWithinQuotes(content: str) -> str:
def _removeQuotesWithinQuotes(content: str) -> str:
"""Removes any blockquote inside blockquote
"""
if '<blockquote>' not in content:
@ -96,7 +96,7 @@ def htmlReplaceEmailQuote(content: str) -> str:
else:
lineStr = lineStr.replace('&gt;', '<br>')
newContent += '<p>' + lineStr + '</blockquote></p>'
return removeQuotesWithinQuotes(newContent)
return _removeQuotesWithinQuotes(newContent)
def htmlReplaceQuoteMarks(content: str) -> str:
@ -314,7 +314,7 @@ def replaceEmojiFromTags(content: str, tag: [], messageType: str) -> str:
return content
def addMusicTag(content: str, tag: str) -> str:
def _addMusicTag(content: str, tag: str) -> str:
"""If a music link is found then ensure that the post is
tagged appropriately
"""
@ -416,8 +416,8 @@ def validHashTag(hashtag: str) -> bool:
return False
def addHashTags(wordStr: str, httpPrefix: str, domain: str,
replaceHashTags: {}, postHashtags: {}) -> bool:
def _addHashTags(wordStr: str, httpPrefix: str, domain: str,
replaceHashTags: {}, postHashtags: {}) -> bool:
"""Detects hashtags and adds them to the replacements dict
Also updates the hashtags list to be added to the post
"""
@ -438,38 +438,10 @@ def addHashTags(wordStr: str, httpPrefix: str, domain: str,
return True
def loadEmojiDict(emojiDataFilename: str, emojiDict: {}) -> None:
"""Creates an emoji dictionary based on emoji/emoji-data.txt
"""
if not os.path.isfile(emojiDataFilename):
return
with open(emojiDataFilename, "r") as fileHandler:
for line in fileHandler:
if len(line) < 5:
continue
if line.startswith('#'):
continue
if '; Emoji' not in line:
continue
if ')' not in line:
continue
emojiUnicode = line.split(' ')[0]
if len(emojiUnicode) < 4:
continue
if '..' in emojiUnicode:
emojiUnicode = emojiUnicode.split('..')[0]
emojiName = line.split(')', 1)[1].strip()
emojiName = emojiName.replace('\n', '').replace('\r', '')
emojiName = emojiName.replace(' ', '').replace('-', '')
if '..' in emojiName:
emojiName = emojiName.split('..')[0]
emojiDict[emojiName.lower()] = emojiUnicode
def addEmoji(baseDir: str, wordStr: str,
httpPrefix: str, domain: str,
replaceEmoji: {}, postTags: {},
emojiDict: {}) -> bool:
def _addEmoji(baseDir: str, wordStr: str,
httpPrefix: str, domain: str,
replaceEmoji: {}, postTags: {},
emojiDict: {}) -> bool:
"""Detects Emoji and adds them to the replacements dict
Also updates the tags list to be added to the post
"""
@ -517,8 +489,8 @@ def tagExists(tagType: str, tagName: str, tags: {}) -> bool:
return False
def addMention(wordStr: str, httpPrefix: str, following: str,
replaceMentions: {}, recipients: [], tags: {}) -> bool:
def _addMention(wordStr: str, httpPrefix: str, following: str,
replaceMentions: {}, recipients: [], tags: {}) -> bool:
"""Detects mentions and adds them to the replacements dict and
recipients list
"""
@ -700,7 +672,7 @@ def removeLongWords(content: str, maxWordLength: int,
return content
def loadAutoTags(baseDir: str, nickname: str, domain: str) -> []:
def _loadAutoTags(baseDir: str, nickname: str, domain: str) -> []:
"""Loads automatic tags file and returns a list containing
the lines of the file
"""
@ -713,9 +685,9 @@ def loadAutoTags(baseDir: str, nickname: str, domain: str) -> []:
return []
def autoTag(baseDir: str, nickname: str, domain: str,
wordStr: str, autoTagList: [],
appendTags: []):
def _autoTag(baseDir: str, nickname: str, domain: str,
wordStr: str, autoTagList: [],
appendTags: []):
"""Generates a list of tags to be automatically appended to the content
"""
for tagRule in autoTagList:
@ -747,7 +719,7 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
maxWordLength = 40
content = content.replace('\r', '')
content = content.replace('\n', ' --linebreak-- ')
content = addMusicTag(content, 'nowplaying')
content = _addMusicTag(content, 'nowplaying')
contentSimplified = \
content.replace(',', ' ').replace(';', ' ').replace('- ', ' ')
contentSimplified = contentSimplified.replace('. ', ' ').strip()
@ -788,7 +760,7 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
# extract mentions and tags from words
longWordsList = []
prevWordStr = ''
autoTagsList = loadAutoTags(baseDir, nickname, domain)
autoTagsList = _loadAutoTags(baseDir, nickname, domain)
appendTags = []
for wordStr in words:
wordLen = len(wordStr)
@ -797,13 +769,13 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
longWordsList.append(wordStr)
firstChar = wordStr[0]
if firstChar == '@':
if addMention(wordStr, httpPrefix, following,
replaceMentions, recipients, hashtags):
if _addMention(wordStr, httpPrefix, following,
replaceMentions, recipients, hashtags):
prevWordStr = ''
continue
elif firstChar == '#':
if addHashTags(wordStr, httpPrefix, originalDomain,
replaceHashTags, hashtags):
if _addHashTags(wordStr, httpPrefix, originalDomain,
replaceHashTags, hashtags):
prevWordStr = ''
continue
elif ':' in wordStr:
@ -819,18 +791,18 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
emojiDict = loadJson(baseDir + '/emoji/emoji.json')
# print('TAG: looking up emoji for :'+wordStr2+':')
addEmoji(baseDir, ':' + wordStr2 + ':', httpPrefix,
originalDomain, replaceEmoji, hashtags,
emojiDict)
_addEmoji(baseDir, ':' + wordStr2 + ':', httpPrefix,
originalDomain, replaceEmoji, hashtags,
emojiDict)
else:
if autoTag(baseDir, nickname, domain, wordStr,
autoTagsList, appendTags):
if _autoTag(baseDir, nickname, domain, wordStr,
autoTagsList, appendTags):
prevWordStr = ''
continue
if prevWordStr:
if autoTag(baseDir, nickname, domain,
prevWordStr + ' ' + wordStr,
autoTagsList, appendTags):
if _autoTag(baseDir, nickname, domain,
prevWordStr + ' ' + wordStr,
autoTagsList, appendTags):
prevWordStr = ''
continue
prevWordStr = wordStr
@ -838,8 +810,8 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
# add any auto generated tags
for appended in appendTags:
content = content + ' ' + appended
addHashTags(appended, httpPrefix, originalDomain,
replaceHashTags, hashtags)
_addHashTags(appended, httpPrefix, originalDomain,
replaceHashTags, hashtags)
# replace words with their html versions
for wordStr, replaceStr in replaceMentions.items():

View File

@ -21,7 +21,6 @@ import pyqrcode
from hashlib import sha256
from hashlib import sha1
from session import createSession
from webfinger import parseHandle
from webfinger import webfingerMeta
from webfinger import webfingerNodeInfo
from webfinger import webfingerLookup
@ -59,7 +58,7 @@ from person import personBoxJson
from person import createSharedInbox
from person import createNewsInbox
from person import suspendAccount
from person import unsuspendAccount
from person import reenableAccount
from person import removeAccount
from person import canRemovePost
from person import personSnooze
@ -88,7 +87,7 @@ from inbox import populateReplies
from inbox import getPersonPubKey
from follow import getFollowingFeed
from follow import sendFollowRequest
from follow import unfollowPerson
from follow import unfollowAccount
from follow import createInitialLastSeen
from auth import authorize
from auth import createPassword
@ -174,7 +173,7 @@ from shares import removeShare
from shares import expireShares
from utils import getFullDomain
from utils import removeHtml
from utils import setHashtagCategory
from categories import setHashtagCategory
from utils import isEditor
from utils import getImageExtensions
from utils import mediaFileMimeType
@ -275,22 +274,6 @@ def saveDomainQrcode(baseDir: str, httpPrefix: str,
url.png(qrcodeFilename, scale)
def readFollowList(filename: str) -> None:
"""Returns a list of ActivityPub addresses to follow
"""
followlist = []
if not os.path.isfile(filename):
return followlist
followUsers = open(filename, "r")
for u in followUsers:
if u not in followlist:
nickname, domain = parseHandle(u)
if nickname:
followlist.append(nickname + '@' + domain)
followUsers.close()
return followlist
class PubServer(BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
@ -1091,13 +1074,14 @@ class PubServer(BaseHTTPRequestHandler):
beginSaveTime = time.time()
# save the json for later queue processing
messageBytesDecoded = messageBytes.decode('utf-8')
queueFilename = \
savePostToInboxQueue(self.server.baseDir,
self.server.httpPrefix,
nickname,
self.server.domainFull,
messageJson,
messageBytes.decode('utf-8'),
messageBytesDecoded,
headersDict,
self.path,
self.server.debug)
@ -1531,7 +1515,7 @@ class PubServer(BaseHTTPRequestHandler):
if moderationButton == 'suspend':
suspendAccount(baseDir, nickname, domain)
if moderationButton == 'unsuspend':
unsuspendAccount(baseDir, nickname)
reenableAccount(baseDir, nickname)
if moderationButton == 'filter':
addGlobalFilter(baseDir, moderationText)
if moderationButton == 'unfilter':
@ -2132,9 +2116,9 @@ class PubServer(BaseHTTPRequestHandler):
}
pathUsersSection = path.split('/users/')[1]
self.postToNickname = pathUsersSection.split('/')[0]
unfollowPerson(self.server.baseDir, self.postToNickname,
self.server.domain,
followingNickname, followingDomainFull)
unfollowAccount(self.server.baseDir, self.postToNickname,
self.server.domain,
followingNickname, followingDomainFull)
self._postToOutboxThread(unfollowJson)
if callingDomain.endswith('.onion') and onionDomain:
@ -2929,11 +2913,13 @@ class PubServer(BaseHTTPRequestHandler):
if self.postToNickname:
if monthStr and yearStr:
if monthStr.isdigit() and yearStr.isdigit():
yearInt = int(yearStr)
monthInt = int(monthStr)
removeCalendarEvent(baseDir,
self.postToNickname,
domain,
int(yearStr),
int(monthStr),
yearInt,
monthInt,
removeMessageId)
self._postToOutboxThread(deleteJson)
if callingDomain.endswith('.onion') and onionDomain:
@ -6854,6 +6840,7 @@ class PubServer(BaseHTTPRequestHandler):
recentPostsCache,
maxRecentPosts,
translate,
self.server.baseDir,
self.server.session,
cachedWebfingers,
personCache,
@ -7079,6 +7066,7 @@ class PubServer(BaseHTTPRequestHandler):
'show inbox page')
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = htmlInbox(self.server.cssCache,
defaultTimeline,
recentPostsCache,
@ -7096,7 +7084,7 @@ class PubServer(BaseHTTPRequestHandler):
allowDeletion,
httpPrefix,
projectVersion,
self._isMinimal(nickname),
minimalNick,
YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7202,6 +7190,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlInboxDMs(self.server.cssCache,
self.server.defaultTimeline,
@ -7220,7 +7209,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7318,6 +7307,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlInboxReplies(self.server.cssCache,
self.server.defaultTimeline,
@ -7336,7 +7326,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7434,6 +7424,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlInboxMedia(self.server.cssCache,
self.server.defaultTimeline,
@ -7452,7 +7443,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7551,6 +7542,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlInboxBlogs(self.server.cssCache,
self.server.defaultTimeline,
@ -7569,7 +7561,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7676,6 +7668,7 @@ class PubServer(BaseHTTPRequestHandler):
editor = isEditor(baseDir, currNickname)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlInboxNews(self.server.cssCache,
self.server.defaultTimeline,
@ -7694,7 +7687,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7799,6 +7792,7 @@ class PubServer(BaseHTTPRequestHandler):
currNickname = currNickname.split('/')[0]
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlInboxFeatures(self.server.cssCache,
self.server.defaultTimeline,
@ -7817,7 +7811,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -7984,6 +7978,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlBookmarks(self.server.cssCache,
self.server.defaultTimeline,
@ -8002,7 +7997,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -8104,6 +8099,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlEvents(self.server.cssCache,
self.server.defaultTimeline,
@ -8122,7 +8118,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,
@ -8216,6 +8212,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.votingTimeMins)
fullWidthTimelineButtonHeader = \
self.server.fullWidthTimelineButtonHeader
minimalNick = self._isMinimal(nickname)
msg = \
htmlOutbox(self.server.cssCache,
self.server.defaultTimeline,
@ -8234,7 +8231,7 @@ class PubServer(BaseHTTPRequestHandler):
self.server.allowDeletion,
httpPrefix,
self.server.projectVersion,
self._isMinimal(nickname),
minimalNick,
self.server.YTReplacementDomain,
self.server.showPublishedDateOnly,
self.server.newswire,

View File

@ -6,499 +6,499 @@
<title>sport</title>
<description>billiard darts swim motorsport snooker marathon hockey diving baseball Millwall sailing athletics skating skiing sport football</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>events</title>
<description>neverforget award OONIbday waybackwednesday notifications throwbackthursday adventskalender live Day deepthoughts thursdaythoughts humanrightsday followfriday wednesdaymotivation showerthoughts anarchymonday 100DaysToOffload ff holiday christmas week concert festival dontstarve onthisday livestream sunday screenshotsunday liverpool adayinthelife day InternationalCheetahDay interestingtimes meetup</description>
<link/>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>gafam</title>
<description>zuckerberg caringissharing apple antitrust GoogleDown bankruptBezos youtube ffs facebook interoperability amazon amazonring googleplus Facebook LeiharbeitAbschaffen advertising adtech fuckgoogle microsoft dtm twitter caffeine skype ff chrome hildebrandt youtubedl degoogled youtubers google sharingiscaring gis dt dotcoms deleteyoutube Instagram fascistbook FuckGoogle degoogle fuschia ungoogled ring affordances googledown gafam inspiring fuckoffgoogle deletefacebook office365 instagram MatrixEffect playstore bigtech</description>
<description>zuckerberg caringissharing apple antitrust GoogleDown bankruptBezos youtube ffs facebook interoperability amazon boycottinstagram amazonring googleplus degooglisation Facebook LeiharbeitAbschaffen advertising adtech fuckgoogle microsoft dtm twitter caffeine skype chrome hildebrandt youtubedl degoogled youtubers google sharingiscaring gis dt dotcoms deleteyoutube Instagram fascistbook FuckGoogle degoogle fuschia ungoogled ring affordances googledown gafam inspiring fuckoffgoogle deletefacebook office365 instagram MatrixEffect playstore bigtech</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>activitypub</title>
<description>followerpower Fediverse activitypub pleroma PeerTube webdev fediblock lazyfedi instances fedilab mastotips mastodev friendica misskey siskin followers fediart Pixelfed pixelfed fedidb block Fediseminar monal tusky peertubers imagedescription feditips fedizens Mastodon following epicyon mastomagic dev fediadmin pixeldev instanceblock isolategab fedireads PeertubeMastodonHost Bookwyrm socialhome fediverse smithereen mastodon fedi fediplay peertube lab mobilizon gemifedi</description>
<description>followerpower Fediverse activitypub pleroma PeerTube webdev fediblock lazyfedi federation instances fedilab mastotips mastodev friendica misskey siskin followers fediart Pixelfed pixelfed fediverseplaysjackbox fedidb block Fediseminar monal tusky peertubers imagedescription feditips fedizens Mastodon following epicyon mastomagic dev fediadmin pixeldev instanceblock mastodonmonday isolategab fedireads PeertubeMastodonHost Bookwyrm socialhome MastodonMondays fediverse smithereen mastodon fedi fediplay peertube lab mobilizon gemifedi</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>programming</title>
<description>Easer cpp report programming css Python FrancisBacon2020 mixers release ada schutzstreifen rustlang ocaml program matrix uptronics profiles typescript forums publiccode FreeSoftware rocketchat early adventofcode scripting warn discord spyware git trevornoah copyright daretocare fedidev c sourcecode publiekecode framaforms WendyLPatrick grep django kingparrot Leiharbeit programmer haskell Tarifvertrag frgmntscnr github openrc tuskydev algorithms lisp forge pleaseshare HirsuteHippo resnetting libraries drivers javascript fragment cpm code elisp TeamFerment patterns html terminal rust sauerkraut request spiritbomb r dramasystem clojurescript ruby peppertop contractpatch computers racket python kabelfernsehen OpenSource Scheibenwischer</description>
<description>Easer cpp report programming css tootfic objects Python FrancisBacon2020 mixers release ada schutzstreifen rustlang ocaml program request_reaction uptronics solidarity hypocritcal profiles typescript forums publiccode FreeSoftware vieprivée early adventofcode scripting warn spyware git trevornoah zinccoop daretocare fedidev c sourcecode publiekecode misc framaforms WendyLPatrick grep django gmic sackthelot relevance_P1Y kingparrot Leiharbeit programmer haskell Tarifvertrag frgmntscnr github openrc tuskydev threema algorithms lisp forge pleaseshare HirsuteHippo resnetting fourtwenty libraries drivers javascript fragment cpm code elisp patterns html terminal rust sauerkraut request spiritbomb r dramasystem documentary clojurescript ruby contractpatch computers racket relationships python kabelfernsehen alternatives OpenSource Scheibenwischer</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>writing</title>
<description>blog poem blogs interactivestorytelling WriteFreely goodreads journal poetry</description>
<description>blog authors poem smallstories blogs interactivestorytelling WriteFreely storytelling goodreads journal poetry</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>hardware</title>
<description>plugandplay PersonalComputer cyberdeck PineCUBE keyboards screenless modem analogcomputing TrueDelta keyboard cybredeck solarpunk lenovo fairelectronics ibm 3dprinting MechcanicalKeyboards openhardware raspberrypi barcode pinetime pinebookpro PinebookPro 3dprint arm paperComputer amd thinkpad print</description>
<description>plugandplay PersonalComputer cyberdeck PineCUBE keyboards screenless modem analogcomputing TrueDelta keyboard printmaker cybredeck laptop solarpunk recycling lenovo fairelectronics fuse ibm 3dprinting MechcanicalKeyboards openhardware raspberrypi barcode pinetime pinebookpro PinebookPro 3dprint arm paperComputer amd openpower thinkpad print</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>places</title>
<description>lapaz luanda asunción nouakchott conakry kyiv moscow saipan gibraltar dublin dannibleibt avarua hargeisa delhi niamey chișinău colombo brasília phnompenh mbabane belgrade belmopan pyongyang hannover ulaanbaatar oranjestad gaborone seattle ndjamena raw singapore kingedwardpoint abidjan nuuk pretoria papeete malé zagreb gitega abudhabi flyingfishcove castries georgetown hagåtña basseterre hamburg kinshasa suva valparaíso athens roseau baku charlotteamalie antananarivo domi pristina santiago sukhumi berlin uptronicsberlin funafuti libreville hanoi philipsburg tehran banjul prague andorralavella daw yerevan portauprince dakar paramaribo tifariti capetown tirana klima ankara ipswich managua lisbon bishkek amsterdam portonovo santodomingo bangkok bucharest kathmandu aden madrid sanjuan vienna kingston kabul damascus stockholm douglas willemstad thehague panamacity beirut amman newdelhi tórshavn nouméa oslo alofi gustavia paris video cockburntown ottawa stepanakert portofspain fsberlin honiara asmara florida nicosia helsinki taipei tegucigalpa tokyo tashkent MadeInEU sarajevo algiers nairobi muscat monaco riyadh lusaka wellington bissau juba mariehamn majuro buenosaires ngerulmud dhaka guatemalacity washington vatican kuwaitcity bern mexicocity bratislava bridgetown delhipolice tunis manila stanley matautu copenhagen barcelona lomé budapest ouagadougou mogadishu freetown victoria brazzaville portmoresby ashgabat kampala elaaiún vilnius bloemfontein sucre london marseille pagopago bradesestate oakland vaduz addis nürnberg naypyidaw CassetteNavigation khartoum baghdad bandar moroni portvila kingstown ChrisCrawford reykjavík manama accra windhoek nukualofa ciutatvella tbilisi canberra quito maputo cetinje putrajaya ramallah bogotá dodoma harare havana warsaw münster valletta localberlin ljubljana bamako kualalumpur podgorica rabat cotonou plymouth seoul Portland dushanbe bangui aotearoa westisland tskhinvali palikir caracas jamestown rome munich ass freestuffberlin sãotomé jakarta daressalaam sansalvador apia essex yaren cairo jerusalem brussels kigali southtarawa beijing minsk montevideo vientiane maseru hamilton doha tripoli portlouis lima adamstown abuja lilongwe nassau lobamba heathrow nyc montreal dili riga lesbos monrovia nursultan gab sanjosé marigot islamabad malabo tallinn sahara thimphu yaoundé praia bujumbura sofia skopje</description>
<description>lapaz luanda asunción nouakchott conakry kyiv moscow saipan gibraltar dublin dannibleibt avarua hargeisa delhi niamey chișinău colombo brasília phnompenh mbabane belgrade belmopan pyongyang hannover ulaanbaatar oranjestad gaborone seattle ndjamena raw singapore kingedwardpoint abidjan nuuk pretoria papeete malé zagreb gitega abudhabi flyingfishcove castries georgetown hagåtña borikua basseterre hamburg kinshasa suva valparaíso athens roseau baku charlotteamalie antananarivo domi pristina santiago sukhumi berlin uptronicsberlin funafuti libreville hanoi philipsburg tehran banjul prague andorralavella daw yerevan portauprince dakar paramaribo tifariti capetown tirana klima ankara ipswich managua lisbon bishkek amsterdam portonovo santodomingo bangkok bucharest kathmandu aden madrid sanjuan vienna kingston kabul damascus stockholm douglas willemstad thehague panamacity beirut amman newdelhi tórshavn nouméa oslo alofi gustavia paris video cockburntown ottawa stepanakert portofspain fsberlin honiara asmara florida nicosia helsinki taipei tegucigalpa tokyo tashkent larochelle MadeInEU sarajevo algiers nairobi muscat monaco riyadh lusaka wellington bissau juba mariehamn majuro buenosaires ngerulmud dhaka guatemalacity washington vatican kuwaitcity bern mexicocity bratislava bridgetown delhipolice tunis manila stanley matautu copenhagen barcelona lomé budapest ouagadougou mogadishu freetown victoria brazzaville portmoresby ashgabat kampala elaaiún vilnius bloemfontein sucre london marseille pagopago bradesestate oakland vaduz addis nürnberg naypyidaw CassetteNavigation khartoum baghdad bandar moroni lehavre portvila kingstown ChrisCrawford reykjavík manama accra windhoek nukualofa ciutatvella tbilisi canberra quito maputo cetinje putrajaya ramallah bogotá dodoma harare havana warsaw münster valletta localberlin ljubljana bamako kualalumpur podgorica rabat cotonou plymouth seoul Portland dushanbe bangui aotearoa westisland tskhinvali palikir caracas jamestown rome munich ass freestuffberlin sãotomé jakarta daressalaam sansalvador apia essex yaren cairo jerusalem brussels kigali southtarawa beijing minsk montevideo vientiane maseru hamilton doha tripoli celtic portlouis lima adamstown abuja lilongwe nassau lobamba heathrow nyc montreal dili riga lesbos monrovia nursultan gab sanjosé marigot islamabad malabo tallinn sahara thimphu yaoundé praia bujumbura sofia skopje</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>music</title>
<description>musicprodution punk ourbeats indiemusic streetpunk bandcamp musicians ipod skinheadmusic rap mp3 Music EnvoieStopHashtagAu81212 thecure vaporwave dubstep synthwave oi rave freemusic nowplaying hiphop experimentalmusic fedimusic soundcloud frankiegoestohollywood dj newwave dorkwave producing musicproduction NowPlaying libremusicproduction MusicAdvent coinkydink fedivers arianagrande synth music metal fediversemusic cyberpunkmusic BandcampFriday</description>
<description>musicprodution punk ourbeats indiemusic streetpunk bandcamp musicians jamendo ipod skinheadmusic rap mp3 Music EnvoieStopHashtagAu81212 thecure vaporwave dubstep synthwave oi rave freemusic nowplaying hiphop experimentalmusic fedimusic soundcloud frankiegoestohollywood dj newwave dorkwave producing musicproduction funkwhale NowPlaying libremusicproduction MusicAdvent coinkydink fedivers arianagrande synth music metal fediversemusic cyberpunkmusic BandcampFriday</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>politics</title>
<description>solidarity hypocritcal TakeOurPowerBack cia community wageslavery immigration dissent liberation fascism techtuesday skyofmywindow freedomofspeech rojava humanrights leftists Socialism ukpol FreeKeithLamar copwatch capitalismkills petition BorisJohnson freedom abolitionnow anarchism DefundThePolice technews smalltech oilwars bjp ThirdRunway election sky_of_my_window generalstrike antipolitics digitalfreedom mayday hatespeech fascists burntheprisons cyberlaw peerproduction corporations iww commons corporatewatch wageslave frontex communism RemoveThePolice neoliberalism socialecology MutualAid capitalism technology prisons conspiracytheories KeirStarmer politics inclusivity anarchisme nzpol Bookchin brexit totalitarianism TyskySour Labour green BAME decolonizeyourmind surfaceworldblows ecofascism SocietalChange facialrecognition propaganda decolonization digitalrights polizei xp 18Source redandanarchistskinheads PritiPatel latestagecapitalism racist elections RussellMaroonShoatz white prisoners warrants policebrutality borisjohnson Anarchist press mutuality whitehouse freedomofexpression censorship decolonize decenterwhiteness Biden ChineseAppBan cooperative modi law deathtoamerica manipulation britpol Capitalism surveillancecapitalism leftist Revolution ukpolitics blacklivesmatter FreeAlabamaMovement rentstrike dsa techno migration mutualaid multipleexposure AbolishPrison fascist socialcoop anarchistprisoners polizeiproblem wordpress uselection IDPol ourstreets refugees acab freewestpapua tech</description>
<description>TakeOurPowerBack cia community wageslavery immigration dissent liberation fascism techtuesday skyofmywindow freedomofspeech rojava humanrights leftists Socialism ukpol FreeKeithLamar copwatch capitalismkills petition BorisJohnson freedom abolitionnow anarchism DefundThePolice technews smalltech oilwars bjp ThirdRunway election sky_of_my_window generalstrike antipolitics digitalfreedom mayday hatespeech fascists lowtech burntheprisons cyberlaw peerproduction corporations iww commons corporatewatch wageslave frontex communism RemoveThePolice neoliberalism socialecology MutualAid capitalism technology prisons conspiracytheories KeirStarmer politics inclusivity anarchisme nzpol Bookchin ClemencyNow brexit totalitarianism TyskySour Labour green BAME decolonizeyourmind surfaceworldblows ecofascism SocietalChange facialrecognition anarchy propaganda decolonization digitalrights polizei xp 18Source redandanarchistskinheads PritiPatel latestagecapitalism racist elections RussellMaroonShoatz white prisoners warrants policebrutality borisjohnson Anarchist press mutuality whitehouse freedomofexpression censorship decolonize decenterwhiteness Biden ChineseAppBan cooperative modi law deathtoamerica manipulation britpol Capitalism surveillancecapitalism leftist Revolution ukpolitics blacklivesmatter FreeAlabamaMovement rentstrike dsa techno migration mutualaid multipleexposure AbolishPrison fascist socialcoop anarchistprisoners polizeiproblem uselection IDPol Slavetrade ourstreets refugees acab freewestpapua tech</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>food</title>
<description>vitamind cake margarine dessert salsa caviar theexpanse cookery pietons food skillet liquor milk bolognese recipe foodporn yeast plate waffle biscuit glaze omelette filet pastry wine hamburger juice Amazfish sourdough nuts gras toast broth batter foodie ketchup seasoning mayo soup pan voc teamcapy mayonnaise vegan dish avocado spice bakery cooking yogurt spotify crumble cider butter cook cobbler steak pizza soda aroma oil flour cream nutella pie cuisine tartar tea marinade mushroom entree bread salad beans fresh syrup fermentation mushrooms cookie curd soysauce pudding beer baking fish foodwaste wheat pot stew chocolate paste wok recipes olive burger candy kitchen coffee bagel taste meat noodle raclette caramel rice eggs grill poutine lard croissant pasta foods cheese oregano drink muffin foie sauce soy cocoa sandwich mousse chili vinegar</description>
<description>vitamind cake margarine dessert salsa caviar theexpanse cookery pietons food skillet liquor milk bolognese recipe foodporn yeast plate waffle biscuit glaze omelette filet pastry wine hamburger juice Amazfish sourdough nuts gras toast broth batter foodie ketchup seasoning mayo soup pan voc teamcapy mayonnaise vegan dish avocado spice bakery cooking yogurt spotify crumble cider butter cook pottery cobbler steak pizza soda fedikitchen aroma oil flour cream nutella pie cuisine tartar tea marinade mushroom entree bread salad beans fresh syrup fermentation mushrooms cookie curd soysauce pudding beer baking fish foodwaste wheat pot TeamFerment stew chocolate paste wok recipes olive burger candy kitchen coffee bagel taste meat noodle raclette caramel rice eggs grill poutine lard croissant pasta foods cheese oregano drink muffin foie sauce soy vore cocoa sandwich mousse chili vinegar</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>farming</title>
<description>johndeere</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
</item>
<item>
<title>events</title>
<description>award OONIbday waybackwednesday notifications throwbackthursday adventskalender live Day deepthoughts thursdaythoughts humanrightsday followfriday wednesdaymotivation showerthoughts anarchymonday 100DaysToOffload holiday Introduction christmas week anarchy concert festival dontstarve onthisday livestream sunday screenshotsunday adayinthelife day InternationalCheetahDay interestingtimes meetup</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>countries</title>
<description>romania burma lithuania solomon chile Instanz opensuse fiji tajikistan benin paraguay eeuu senegal ukraine italy brunei nicaragua guyana Pflanzenbestimmung euphoria zambia iceland morocco netherlands swaziland bosnian suriname elsalvador russia samoa european czech belarus hayabusa2 kyrgyzstan uk abuse translation sanmarino catalonia panama japan venezuela gambia freeNukem kuwait barbados papua greece switzerland uae nigeria usa angola honduras djibouti laos sierraleone cambodia ych vietnam dust3d neofeud seychelles marshall kazakhstan estonia tonga stlucia burundi bangladesh egypt mali congo us jordan speedrun grenada israel algeria ghana bosnia russian industrial eritrea bhutan hungary saudi slovenia tig bahamas australia kiribati togo koreanorth poland malawi capeverde run armenia american hautrauswasgeht bahrain mozambique beleuchtung southsudan syria micronesia maldives iran sweden ethiopia cuba liberia canada burkina somalia Chile scotland vaticancity easttimor austria turkey yemen Bolivia denmark trunk madagascar finland philippines ivorycoast haiti ecuador Portugal azerbaijan gasuk spain albania afghanistan europe mauritania dominica thailand belize westpapuauprising macedonia illustration montenegro qatar mongolia costarica boatingeurope birdsofkenya latvia uzbekistan ireland iraq malaysia mexico mauritius oman chad nz georgia zimbabwe france serbia lesotho oddmuse tunisia argentina cameroon namibia sudan indonesia colombia tuvalu beckychambers turkmenistan tanzania germany neuhier norway comoros auteursrecht guatemala kosovo andorra wales servus pakistan belgium china antigua life koreasouth newzealand einzelfall rwanda luxembourg libya italyisntreal nauru Anarchismus moldova palau taiwan kenya trinidad eu botswana CuriosidadesVariadas jamaica vanuatu cyprus aminus3 malta niger westpapua busse unitedstates myanmar saintvincent guinea nepal peru uganda uruguay india lebanon neurodiversity southafrica croatia europeanunion bolivia chinese dominican srilanka bulgaria slovakia speedrunning gabon stkitts liechtenstein brazil shutdowncanada</description>
<description>romania burma lithuania solomon chile Instanz opensuse fiji tajikistan benin paraguay eeuu senegal ukraine italy brunei nicaragua guyana Pflanzenbestimmung euphoria zambia iceland morocco netherlands swaziland bosnian solo suriname elsalvador russia samoa european czech belarus hayabusa2 kyrgyzstan uk abuse translation sanmarino catalonia panama japan buyused venezuela gambia freeNukem kuwait barbados papua greece switzerland uae nigeria usa angola honduras djibouti laos sierraleone cambodia ych vietnam dust3d neofeud seychelles marshall kazakhstan estonia tonga stlucia burundi bangladesh egypt mali congo us jordan speedrun grenada israel algeria ghana bosnia russian industrial eritrea bhutan hungary saudi slovenia uspol tig bahamas australia kiribati togo koreanorth poland malawi capeverde run armenia american hautrauswasgeht bahrain mozambique beleuchtung southsudan syria micronesia maldives iran indigenous sweden ethiopia cuba liberia canada burkina somalia Chile scotland vaticancity easttimor austria turkey yemen Bolivia denmark trunk madagascar finland philippines ivorycoast haiti ecuador Portugal azerbaijan gasuk spain albania afghanistan europe mauritania dominica thailand belize westpapuauprising macedonia illustration montenegro qatar mongolia costarica boatingeurope birdsofkenya latvia uzbekistan ireland iraq malaysia mexico mauritius oman chad nz georgia zimbabwe france serbia lesotho oddmuse tunisia argentina cameroon namibia sudan indonesia colombia tuvalu beckychambers turkmenistan tanzania germany neuhier norway comoros auteursrecht guatemala kosovo andorra wales servus pakistan belgium china antigua life koreasouth newzealand einzelfall rwanda luxembourg libya italyisntreal nauru Anarchismus moldova palau taiwan kenya trinidad eu botswana CuriosidadesVariadas jamaica vanuatu cyprus aminus3 malta niger westpapua busse unitedstates myanmar saintvincent guinea nepal peru uganda uruguay india lebanon neurodiversity southafrica croatia europeanunion bolivia chinese dominican srilanka bulgaria slovakia speedrunning gabon stkitts liechtenstein brazil shutdowncanada</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>cycling</title>
<description>bicycle cycling bike Snowbike</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>phones</title>
<description>mobileapp pine fdroid plasmamobile android smartphone iOS14 BriarProject pinephone mobile fairphone ubuntutouch Android ubports osmand vodafone iphones postmarketos iOS microg mobileKüfA</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>science</title>
<description>math womeninstem supercollider dawkins graphTheory psychology biology gene paleontology</description>
<description>math womeninstem supercollider nextgeneration archaeologist dawkins graphTheory psychology biology gene paleontology</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>pandemic</title>
<description>covid19 corona Coronavirus CoronaWarnApp facemasks vaccines vaccine pandemic contacttracing covid coronavirus Lockdown codid19 COVID19 YesWeWork ContactTracing COVID</description>
<description>covid19 corona Coronavirus CoronaWarnApp facemasks vaccines vaccine pandemic contacttracing covid coronavirus virus Lockdown codid19 COVID19 YesWeWork ContactTracing COVID</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>software</title>
<description>app freedombox windows libre nginx Framasoft invidious drm publicdomain kubernetes fossmendations jami FuckOffZoom quicksy free docker freesoftware gimp foss nextcloud wechat openscad TabOrder ikiwiki Linux outreachy lyft nitter opensource diaspora yunohost littlebigdetails cabal conferencing accessibility devops owncast emacs freiesoftware email chatapps floss plugins deltachat application uifail FOSS bittorrent zoom gpl FriendofGNOME usability obnam snap cryptpad software OwnStream zrythm mumble grsync containers irssi mutt design gameoftrees backup rotonde GNU apps licensing profanity ffmpeg lemmy OSM win10 jitsi ux rsync libreoffice dino plugin OCUPACAOCARLOSMARIGHELLA whatsapp openoffice</description>
<description>app freedombox windows libre nginx Framasoft invidious drm publicdomain kubernetes fossmendations jami FuckOffZoom quicksy free docker freesoftware gimp foss matrix nextcloud wechat openscad TabOrder ikiwiki Linux rocketchat outreachy lyft nitter discord opensource diaspora yunohost littlebigdetails cabal conferencing accessibility devops owncast emacs freiesoftware email chatapps floss plugins deltachat application uifail FOSS bittorrent zoom gpl FriendofGNOME usability obnam snap cryptpad software OwnStream zrythm mumble grsync containers irssi mutt design gameoftrees backup rotonde GNU thunderbird apps licensing profanity ffmpeg lemmy OSM win10 jitsi wordpress ux rsync libreoffice dino plugin OCUPACAOCARLOSMARIGHELLA whatsapp openoffice</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>security</title>
<description>encrypt password history cryptography solarwinds infosec gchq IHaveSomethingToHide IronySec cryptowars supplychainattacks UseAMaskUseTor cyberattack security tor e2e bruceschneier vpn openssh openssl e2ee ed25519 encryption ssh crypto giftofencryption malware opsec keepass torsocks nsa protonvpn yubikey nitrokey openpgp castor9 gpgtools gpg cybersecurity CryptoWars signal noscript np trust openvpn datasecurity tracking cloudflare</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>gardening</title>
<description>sporespondence blockade inde federation deno cabbage bundeswehr onions datenschleuder florespondence thyme DailyFlowers permaculture papuamerdeka flowers gardening de federated fahrräder golden genderQuiz</description>
<description>sporespondence blockade inde independant deno cabbage bundeswehr onions bordeaux datenschleuder florespondence thyme DailyFlowers permaculture papuamerdeka flowers gardening de federated devilslettuce fahrräder golden</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>conferences</title>
<description>debconf talk fossdem FreedomBoxSummit schmoocon summit confidenceTricks minidebconf emacsconf ox defcon flossevent conf rC3 conference flossconf apconf C3 config</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>cats</title>
<description>Cat dailycatpic DailyCatVid Leopard</description>
<description>Cat dailycatpic dxp DailyCatVid CatsOfMastodon Leopard catbellies</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>employment</title>
<description>InterviewQuestions reproductivework bullshitjobs antiwork worklog hire hirefedi work letthenetwork jobs</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>linux</title>
<description>osdev linuxisnotanos elementaryos cli kde Debian11 slackware mobian openwrt distros DebianBullseye shareyourdesktop wireguard linuxaudio gtk debian trisquel gnome linuxposting showyourdesktop ubuntu xubuntu unix fedora centos gentoo usergroup systemd linuxgaming Debian distro destinationlinux qubesos i3wm haiku linuxisnotaplatform linux netbsd termux btrfs reproduciblebuilds artix archlinux rhel debianinstaller linuxisajoke</description>
<description>osdev linuxisnotanos elementaryos cli kde Debian11 slackware mobian openwrt distros DebianBullseye shareyourdesktop wireguard linuxaudio gtk debian trisquel gnome linuxposting showyourdesktop ubuntu xubuntu unix fedora centos gentoo usergroup systemd linuxgaming Debian distro destinationlinux qubesos i3wm haiku linuxisnotaplatform linux netbsd termux btrfs reproduciblebuilds artix gtk4 archlinux rhel debianinstaller linuxisajoke</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>photos</title>
<description>nikon photography photo tokyocameraclub photos photoshop camera picture streetphotography</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>crafts</title>
<description>topic_imadethis upholstery hackerspaces sanding sundiy knitting hack biohacking wip jewelry diy upcycling woodworking origami makers quilting quilt 3dmodel woodwork ceramics</description>
<link/>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>pets</title>
<description>catpics catalunya catofmastodon mastodogs catbehaviour dogsofmastodon gentrification cats kittens pet dog caturday catsofmastodon cute dogs mastocats cat catcontent</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>news</title>
<description>news newsletter doubledownnews journalism SkyNews</description>
<description>news Wikileaks newsletter doubledownnews journalism SkyNews</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>games</title>
<description>minecraft tetris99 TerraNil boardgames gamedesign chess nintendoswitch mud game 0ad ttrpg gamedev guildwars2 TetrisGore gaming Gamesphere rpg tetris dosgaming DnD cyber2077 cyberpunk2077 FreeNukum neopets minetest guildwars dnd</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>retro</title>
<description>A500 atarist commodore teletext floppy 8bit atari trs80 floppydisk retrocomputing C64 plan9 80s microcomputing omm retrogaming z80 8bitdo retro amiga bbcmicro microcomputer bbsing</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>indymedia</title>
<description>visionontv indymediaback pga indymedia omn tv 4opens</description>
<description>visionontv indymediaback pga indymedia omn tv 4opens openmedianetwork</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>activism</title>
<description>protestor g20 riseup sflc DanniVive reuse fsfe softwarefreedom ann activist xr directaction eff openrightsgroup protest actiondirecte kroymann HS2 ngo MarcWittmann fsf StopHS2 grassroots BLM changeisinyourhands conservancy XR freeolabini announcement isolateByoblu annieleonard</description>
<description>protestor grassroot g20 riseup sflc DanniVive reuse fsfe softwarefreedom ann activist xr directaction eff openrightsgroup protest actiondirecte kroymann HS2 ngo MarcWittmann fsf StopHS2 grassroots BLM changeisinyourhands conservancy JefferySaunders XR freeolabini announcement isolateByoblu annieleonard</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>questions</title>
<description>askmastodon askfedi question askmasto askfediverse ask askfosstodon</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>internet</title>
<description>spam firefox redecentralize wikipedia rtmp decentralization decentralize w3c torrent data sitejs internetarchaeology WordPress self router dataretention selfhosting icann discourse PeerToPeer dns openstandards nojs oauth CDNsAreEvil protonmail standards yourdataisyourdata gemini SmallWeb xmpp semanticweb socialnetwork ntp socialnetworks proton icmp videocast jabber decentralized wiki ssb darknet cookies darkweb netcat server browser cloudy p2p social www ilovewikipedia web WebsiteStatus twitch socialmedia domain rss ipns mozilla voicemail mail ipfs browsers decentralizeit openculture cyberspace offthegrid cloud internet decentralisation internetarchive js dark openweb onlineharms dot internetshutdowns fixtheweb socialweb</description>
<description>spam firefox redecentralize wikipedia rtmp decentralization decentralize w3c torrent data sitejs internetarchaeology WordPress self router dataretention selfhosting icann discourse PeerToPeer dns openstandards nojs oauth CDNsAreEvil protonmail standards yourdataisyourdata gemini SmallWeb xmpp semanticweb socialnetwork ntp socialnetworks proton icmp videocast jabber decentralized wiki ssb darknet cookies darkweb netcat server browser cloudy p2p social www ilovewikipedia web WebsiteStatus twitch socialmedia domain rss ipns mozilla voicemail mail ipfs browsers decentralizeit openculture cyberspace offthegrid cloud internet decentralisation internetarchive js dark openweb onlineharms dot ftp internetshutdowns fixtheweb socialweb</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>birds</title>
<description>RainbowBeeEater bird thunderbird</description>
<description>RainbowBeeEater bird</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>ethics</title>
<description>digitalethics ethics ethicallicense ethical</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>economics</title>
<description>theWorkshop WealthConcentration funding shop startups HenryGeorge crowdfunding micropatronage monetize smallbusiness GitPay</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>podcasts</title>
<description>podcasting IntergalacticWasabiHour podcast tilde til tilderadio podcasts smallisbeautiful tilvids</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>years</title>
<description>Year2020 year 1yrago 5yrsago</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>identity</title>
<description>boomer</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>political</title>
<description>copservation linguisticProgramming</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>fashion</title>
<description>brasil fashionistas fashionesta bras fashion patches</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>art</title>
<description>urban artvsartist2020 watercolor autisticartist barrigòtic art krita urbanart queerart deviantart adultcolouring collage streetart coverart MastoArt painting digitalart comic artwork mandala xkcd santa mastoart gnuimagemanipulationprogram webcomic furryart sticker TattoosOfTheFediverse artvsartist artist meme concretepoetry artwithopensource blackartist zine JuliaHartleyBrewer</description>
<description>urban glassart artvsartist2020 watercolor autisticartist barrigòtic art krita urbanart queerart deviantart adultcolouring collage streetart coverart MastoArt culture polArt ink painting digitalart comic artwork mandala xkcd comics santa mastoart gnuimagemanipulationprogram wireart cartoon webcomic furryart sticker artbreeder arttherapy TattoosOfTheFediverse artvsartist sculpture artist meme concretepoetry artwithopensource peppertop blackartist zine furry JuliaHartleyBrewer</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>month</title>
<description>april july march october november august june december september may feburary january month</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>funding</title>
<description>disabledcrowdfund patreon</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>books</title>
<description>justhollythings earthsea ebooks book amreading bookreview theLibrary wayfarers books ebook epub cookbook</description>
<link/>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>comedy</title>
<description>laugh humour satire irony standup funny humor</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
</item>
<item>
<title>crafts</title>
<description>upholstery hackerspaces sanding sundiy knitting hack biohacking wip jewelry diy upcycling woodworking origami makers quilting quilt 3dmodel woodwork</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>health</title>
<description>medical burnout medicine treatment EmotionalFirstAid autistic health meds</description>
<description>medical burnout cannabis medicine treatment EmotionalFirstAid maryjane autistic health meds marijuana</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>seasons</title>
<description>spring autumn winter summer</description>
<description>spring autumn winter summer solstice wintersolstice</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>gender</title>
<description>transwomen transcrowdfund female trans women estradiol queer woman transrights</description>
<description>transwomen transcrowdfund female trans women estradiol queer genderQuiz woman transrights</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>fiction</title>
<description>cyberpunk thehobbit fiction</description>
<description>cyberpunk thehobbit fiction microfiction</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>legal</title>
<description>hfgkarlsruhe amro GameSphere OnlineHarmsBill laipower gdpr intros Anticritique learning energyflow digitalservicesact geekproblem</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>introductions</title>
<description>newhere introductions</description>
<description>newhere firsttoot Introduction Introductions introduction introductions</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>audio</title>
<description>audioproduction audiofeedback audio</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>bots</title>
<description>bot</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>climate</title>
<description>clouds renewableenergy amp climateemergency climate coal globalwarming weather climatecamp sky climatescience climatecrisis</description>
<description>clouds renewableenergy amp climateemergency climate coal globalwarming weather climatecamp science sky climatescience climatecrisis</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>nature</title>
<description>morning trees light birds nature frogs sunrise inaturalist morningcrew australianwildlife capybara amphibians</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
</item>
<item>
<title>books</title>
<description>earthsea ebooks book amreading bookreview theLibrary wayfarers books ebook epub cookbook</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>scifi</title>
<description>startrek starwars babylon5</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>religion</title>
<description>pagan</description>
<description>neopagan pagan</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>media</title>
<description>mainstreaming weAreAllCrazy theatlantic traditionalmedia videos railroads ai realmedia</description>
<description>mainstreaming stream streaming weAreAllCrazy maiabeyrouti theatlantic traditionalmedia videos railroads taina ai realmedia media</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>techbros</title>
<description>hackernews red reddit</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>photography</title>
<description>landscapephotography</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>moderation</title>
<description>fedblock</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>languages</title>
<description>lojban</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>election</title>
<description>voted vote</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>#music</title>
<description>trance</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>facts</title>
<description>didyouknow</description>
<link/>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>radio</title>
<description>radiohost vantascape vantaradio ca radio healthcare hamradio FreeAllPoliticalPrisoners radiobroadcasting 3dcad radioshow california listeningtonow radiobroadcast spazradio anonradio dmca</description>
<description>radiohost vantascape vantaradio ca radio healthcare hamradio FreeAllPoliticalPrisoners card10 radiobroadcasting 3dcad radioshow local california listeningtonow radiobroadcast spazradio anonradio dmca</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>licenses</title>
<description>creative creativecommons</description>
<description>copyright creative creativecommons</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>education</title>
<description>education teach tutorial</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>microcontroller</title>
<description>microcontroller arduino</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>scotland</title>
<description>glasgow highlands edinburgh loch</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>astronomy</title>
<description>space jupiter moon saturn milkyway</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>entertainment</title>
<description>watching Thundercat entertainment themandalorian</description>
<description>watching Thundercat thisisthetypeofmemethatilikecauseitcontainsreptiles entertainment me meow themandalorian</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>privacy</title>
<description>privacymatters surveillance dataprivacy privacy</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>environment</title>
<description>climatechange climatechaos</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>#software</title>
<description>flatpak</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>microcontrollers</title>
<description>esp8266 esp32</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
</item>
<item>
<title>people</title>
<description>introduction</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>help</title>
<description>helpful help</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>war</title>
<description>weapons</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>philosophy</title>
<description>stoic postmodernism</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>france</title>
<description>Macronavirus</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
<item>
<title>travel</title>
<description>travel taxi</description>
<link/>
<pubDate>Sun, 20 Dec 2020 20:57:26 UT</pubDate>
<pubDate>Tue, 22 Dec 2020 09:45:58 UT</pubDate>
</item>
</channel>
</rss>

111
delete.py
View File

@ -10,82 +10,17 @@ import os
from datetime import datetime
from utils import getFullDomain
from utils import removeIdEnding
from utils import getStatusNumber
from utils import urlPermitted
from utils import getNicknameFromActor
from utils import getDomainFromActor
from utils import locatePost
from utils import deletePost
from utils import removeModerationPostFromIndex
from posts import sendSignedJson
from session import postJson
from webfinger import webfingerHandle
from auth import createBasicAuthHeader
from posts import getPersonBox
def createDelete(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
toUrl: str, ccUrl: str, httpPrefix: str,
objectUrl: str, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Creates a delete message
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
and ccUrl might be a specific person whose post is to be deleted
objectUrl is typically the url of the message, corresponding to url
or atomUri in createPostBase
"""
if not urlPermitted(objectUrl, federationList):
return None
if ':' in domain:
domain = domain.split(':')[0]
fullDomain = domain
fullDomain = getFullDomain(domain, port)
statusNumber, published = getStatusNumber()
newDeleteId = \
httpPrefix + '://' + fullDomain + '/users/' + \
nickname + '/statuses/' + statusNumber
newDelete = {
"@context": "https://www.w3.org/ns/activitystreams",
'actor': httpPrefix+'://'+fullDomain+'/users/'+nickname,
'atomUri': newDeleteId,
'cc': [],
'id': newDeleteId + '/activity',
'object': objectUrl,
'published': published,
'to': [toUrl],
'type': 'Delete'
}
if ccUrl:
if len(ccUrl) > 0:
newDelete['cc'] = [ccUrl]
deleteNickname = None
deleteDomain = None
deletePort = None
if '/users/' in objectUrl or \
'/accounts/' in objectUrl or \
'/channel/' in objectUrl or \
'/profile/' in objectUrl:
deleteNickname = getNicknameFromActor(objectUrl)
deleteDomain, deletePort = getDomainFromActor(objectUrl)
if deleteNickname and deleteDomain:
sendSignedJson(newDelete, session, baseDir,
nickname, domain, port,
deleteNickname, deleteDomain, deletePort,
'https://www.w3.org/ns/activitystreams#Public',
httpPrefix, True, clientToServer, federationList,
sendThreads, postLog, cachedWebfingers,
personCache, debug)
return newDelete
def sendDeleteViaServer(baseDir: str, session,
fromNickname: str, password: str,
fromDomain: str, fromPort: int,
@ -167,52 +102,6 @@ def sendDeleteViaServer(baseDir: str, session,
return newDeleteJson
def deletePublic(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
objectUrl: str, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Makes a public delete activity
"""
fromDomain = getFullDomain(domain, port)
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
ccUrl = httpPrefix + '://' + fromDomain + \
'/users/' + nickname + '/followers'
return createDelete(session, baseDir, federationList,
nickname, domain, port,
toUrl, ccUrl, httpPrefix,
objectUrl, clientToServer,
sendThreads, postLog,
personCache, cachedWebfingers,
debug)
def deletePostPub(session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int, httpPrefix: str,
deleteNickname: str, deleteDomain: str,
deletePort: int, deleteHttpsPrefix: str,
deleteStatusNumber: int, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool) -> {}:
"""Deletes a given status post
"""
deletedDomain = getFullDomain(deleteDomain, deletePort)
objectUrl = \
deleteHttpsPrefix + '://' + deletedDomain + '/users/' + \
deleteNickname + '/statuses/' + str(deleteStatusNumber)
return deletePublic(session, baseDir, federationList,
nickname, domain, port, httpPrefix,
objectUrl, clientToServer,
sendThreads, postLog,
personCache, cachedWebfingers,
debug)
def outboxDelete(baseDir: str, httpPrefix: str,
nickname: str, domain: str,
messageJson: {}, debug: bool,

View File

@ -7,7 +7,7 @@ __email__ = "bob@freedombone.net"
__status__ = "Production"
def getDonationTypes() -> str:
def _getDonationTypes() -> str:
return ('patreon', 'paypal', 'gofundme', 'liberapay',
'kickstarter', 'indiegogo', 'crowdsupply',
'subscribestar')
@ -18,7 +18,7 @@ def getDonationUrl(actorJson: {}) -> str:
"""
if not actorJson.get('attachment'):
return ''
donationType = getDonationTypes()
donationType = _getDonationTypes()
for propertyValue in actorJson['attachment']:
if not propertyValue.get('name'):
continue
@ -54,7 +54,7 @@ def setDonationUrl(actorJson: {}, donateUrl: str) -> None:
if not actorJson.get('attachment'):
actorJson['attachment'] = []
donationType = getDonationTypes()
donationType = _getDonationTypes()
donateName = None
for paymentService in donationType:
if paymentService in donateUrl:

View File

@ -1124,7 +1124,7 @@ if args.undoItemName:
cachedWebfingers = {}
print('Sending undo of shared item: ' + args.undoItemName)
sendUndoShareViaServer(session,
sendUndoShareViaServer(baseDir, session,
args.nickname, args.password,
domain, port,
httpPrefix,
@ -1931,33 +1931,88 @@ if args.testdata:
deleteAllPosts(baseDir, nickname, domain, 'inbox')
deleteAllPosts(baseDir, nickname, domain, 'outbox')
testFollowersOnly = False
testSaveToFile = True
testClientToServer = False
testCommentsEnabled = True
testAttachImageFilename = None
testMediaType = None
testImageDescription = None
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"like, this is totally just a #test, man",
False, True, False, True, None, None, useBlurhash)
"like this is totally just a #test man",
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash)
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"Zoiks!!!",
False, True, False, True, None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash)
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"Hey scoob we need like a hundred more #milkshakes",
False, True, False, True, None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash)
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"Getting kinda spooky around here",
False, True, False, True, None, None,
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash, 'someone')
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"And they would have gotten away with it too" +
"if it wasn't for those pesky hackers",
False, True, False, True, 'img/logo.png',
'Description of image', useBlurhash)
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
'img/logo.png', 'image/png',
'Description of image',
useBlurhash)
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"man, these centralized sites are, like, the worst!",
False, True, False, True, None, None, useBlurhash)
"man these centralized sites are like the worst!",
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash)
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"another mystery solved #test",
False, True, False, True, None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash)
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
"let's go bowling",
False, True, False, True, None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
testClientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType, testImageDescription,
useBlurhash)
domainFull = domain + ':' + str(port)
clearFollows(baseDir, nickname, domain)

View File

@ -79,7 +79,7 @@ def removeGlobalFilter(baseDir: str, words: str) -> bool:
return False
def isTwitterPost(content: str) -> bool:
def _isTwitterPost(content: str) -> bool:
"""Returns true if the given post content is a retweet or twitter crosspost
"""
if '/twitter.' in content or '@twitter.' in content:
@ -89,7 +89,7 @@ def isTwitterPost(content: str) -> bool:
return False
def isFilteredBase(filename: str, content: str) -> bool:
def _isFilteredBase(filename: str, content: str) -> bool:
"""Uses the given file containing filtered words to check
the given content
"""
@ -122,7 +122,7 @@ def isFiltered(baseDir: str, nickname: str, domain: str, content: str) -> bool:
words must be present although not necessarily adjacent
"""
globalFiltersFilename = baseDir + '/accounts/filters.txt'
if isFilteredBase(globalFiltersFilename, content):
if _isFilteredBase(globalFiltersFilename, content):
return True
if not nickname or not domain:
@ -132,9 +132,9 @@ def isFiltered(baseDir: str, nickname: str, domain: str, content: str) -> bool:
removeTwitter = baseDir + '/accounts/' + \
nickname + '@' + domain + '/.removeTwitter'
if os.path.isfile(removeTwitter):
if isTwitterPost(content):
if _isTwitterPost(content):
return True
accountFiltersFilename = baseDir + '/accounts/' + \
nickname + '@' + domain + '/filters.txt'
return isFilteredBase(accountFiltersFilename, content)
return _isFilteredBase(accountFiltersFilename, content)

164
follow.py
View File

@ -65,10 +65,10 @@ def createInitialLastSeen(baseDir: str, httpPrefix: str) -> None:
break
def preApprovedFollower(baseDir: str,
nickname: str, domain: str,
approveHandle: str,
allowNewsFollowers: bool) -> bool:
def _preApprovedFollower(baseDir: str,
nickname: str, domain: str,
approveHandle: str,
allowNewsFollowers: bool) -> bool:
"""Is the given handle an already manually approved follower?
"""
# optionally allow the news account to be followed
@ -84,10 +84,10 @@ def preApprovedFollower(baseDir: str,
return False
def removeFromFollowBase(baseDir: str,
nickname: str, domain: str,
acceptOrDenyHandle: str, followFile: str,
debug: bool) -> None:
def _removeFromFollowBase(baseDir: str,
nickname: str, domain: str,
acceptOrDenyHandle: str, followFile: str,
debug: bool) -> None:
"""Removes a handle from follow requests or rejects file
"""
handle = nickname + '@' + domain
@ -114,17 +114,17 @@ def removeFromFollowRequests(baseDir: str,
denyHandle: str, debug: bool) -> None:
"""Removes a handle from follow requests
"""
removeFromFollowBase(baseDir, nickname, domain,
denyHandle, 'followrequests', debug)
_removeFromFollowBase(baseDir, nickname, domain,
denyHandle, 'followrequests', debug)
def removeFromFollowRejects(baseDir: str,
nickname: str, domain: str,
acceptHandle: str, debug: bool) -> None:
def _removeFromFollowRejects(baseDir: str,
nickname: str, domain: str,
acceptHandle: str, debug: bool) -> None:
"""Removes a handle from follow rejects
"""
removeFromFollowBase(baseDir, nickname, domain,
acceptHandle, 'followrejects', debug)
_removeFromFollowBase(baseDir, nickname, domain,
acceptHandle, 'followrejects', debug)
def isFollowingActor(baseDir: str,
@ -179,8 +179,8 @@ def followerOfPerson(baseDir: str, nickname: str, domain: str,
federationList, debug, 'followers.txt')
def isFollowerOfPerson(baseDir: str, nickname: str, domain: str,
followerNickname: str, followerDomain: str) -> bool:
def _isFollowerOfPerson(baseDir: str, nickname: str, domain: str,
followerNickname: str, followerDomain: str) -> bool:
"""is the given nickname a follower of followerNickname?
"""
if ':' in domain:
@ -212,10 +212,10 @@ def isFollowerOfPerson(baseDir: str, nickname: str, domain: str,
return alreadyFollowing
def unfollowPerson(baseDir: str, nickname: str, domain: str,
followNickname: str, followDomain: str,
followFile='following.txt',
debug=False) -> bool:
def unfollowAccount(baseDir: str, nickname: str, domain: str,
followNickname: str, followDomain: str,
followFile='following.txt',
debug=False) -> bool:
"""Removes a person to the follow list
"""
if ':' in domain:
@ -261,14 +261,14 @@ def unfollowPerson(baseDir: str, nickname: str, domain: str,
return True
def unfollowerOfPerson(baseDir: str, nickname: str, domain: str,
followerNickname: str, followerDomain: str,
debug=False) -> bool:
def unfollowerOfAccount(baseDir: str, nickname: str, domain: str,
followerNickname: str, followerDomain: str,
debug=False) -> bool:
"""Remove a follower of a person
"""
return unfollowPerson(baseDir, nickname, domain,
followerNickname, followerDomain,
'followers.txt', debug)
return unfollowAccount(baseDir, nickname, domain,
followerNickname, followerDomain,
'followers.txt', debug)
def clearFollows(baseDir: str, nickname: str, domain: str,
@ -291,9 +291,9 @@ def clearFollowers(baseDir: str, nickname: str, domain: str) -> None:
clearFollows(baseDir, nickname, domain, 'followers.txt')
def getNoOfFollows(baseDir: str, nickname: str, domain: str,
authenticated: bool,
followFile='following.txt') -> int:
def _getNoOfFollows(baseDir: str, nickname: str, domain: str,
authenticated: bool,
followFile='following.txt') -> int:
"""Returns the number of follows or followers
"""
# only show number of followers to authenticated
@ -324,12 +324,12 @@ def getNoOfFollows(baseDir: str, nickname: str, domain: str,
return ctr
def getNoOfFollowers(baseDir: str,
nickname: str, domain: str, authenticated: bool) -> int:
def _getNoOfFollowers(baseDir: str,
nickname: str, domain: str, authenticated: bool) -> int:
"""Returns the number of followers of the given person
"""
return getNoOfFollows(baseDir, nickname, domain,
authenticated, 'followers.txt')
return _getNoOfFollows(baseDir, nickname, domain,
authenticated, 'followers.txt')
def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
@ -382,7 +382,7 @@ def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
httpPrefix + '://' + domain + '/users/' + \
nickname + '/' + followFile
totalStr = \
getNoOfFollows(baseDir, nickname, domain, authenticated)
_getNoOfFollows(baseDir, nickname, domain, authenticated)
following = {
'@context': 'https://www.w3.org/ns/activitystreams',
'first': firstStr,
@ -463,15 +463,15 @@ def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
return following
def followApprovalRequired(baseDir: str, nicknameToFollow: str,
domainToFollow: str, debug: bool,
followRequestHandle: str,
allowNewsFollowers: bool) -> bool:
def _followApprovalRequired(baseDir: str, nicknameToFollow: str,
domainToFollow: str, debug: bool,
followRequestHandle: str,
allowNewsFollowers: bool) -> bool:
""" Returns the policy for follower approvals
"""
# has this handle already been manually approved?
if preApprovedFollower(baseDir, nicknameToFollow, domainToFollow,
followRequestHandle, allowNewsFollowers):
if _preApprovedFollower(baseDir, nicknameToFollow, domainToFollow,
followRequestHandle, allowNewsFollowers):
return False
manuallyApproveFollows = False
@ -494,10 +494,10 @@ def followApprovalRequired(baseDir: str, nicknameToFollow: str,
return manuallyApproveFollows
def noOfFollowRequests(baseDir: str,
nicknameToFollow: str, domainToFollow: str,
nickname: str, domain: str, fromPort: int,
followType: str) -> int:
def _noOfFollowRequests(baseDir: str,
nicknameToFollow: str, domainToFollow: str,
nickname: str, domain: str, fromPort: int,
followType: str) -> int:
"""Returns the current number of follow requests
"""
accountsDir = baseDir + '/accounts/' + \
@ -521,11 +521,11 @@ def noOfFollowRequests(baseDir: str,
return ctr
def storeFollowRequest(baseDir: str,
nicknameToFollow: str, domainToFollow: str, port: int,
nickname: str, domain: str, fromPort: int,
followJson: {},
debug: bool, personUrl: str) -> bool:
def _storeFollowRequest(baseDir: str,
nicknameToFollow: str, domainToFollow: str, port: int,
nickname: str, domain: str, fromPort: int,
followJson: {},
debug: bool, personUrl: str) -> bool:
"""Stores the follow request for later use
"""
accountsDir = baseDir + '/accounts/' + \
@ -668,9 +668,9 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
nicknameToFollow)
return True
if maxFollowers > 0:
if getNoOfFollowers(baseDir,
nicknameToFollow, domainToFollow,
True) > maxFollowers:
if _getNoOfFollowers(baseDir,
nicknameToFollow, domainToFollow,
True) > maxFollowers:
print('WARN: ' + nicknameToFollow +
' has reached their maximum number of followers')
return True
@ -682,9 +682,9 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
baseDir + '/accounts/' + handleToFollow)
return True
if isFollowerOfPerson(baseDir,
nicknameToFollow, domainToFollowFull,
nickname, domainFull):
if _isFollowerOfPerson(baseDir,
nicknameToFollow, domainToFollowFull,
nickname, domainFull):
if debug:
print('DEBUG: ' + nickname + '@' + domain +
' is already a follower of ' +
@ -693,37 +693,37 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
# what is the followers policy?
approveHandle = nickname + '@' + domainFull
if followApprovalRequired(baseDir, nicknameToFollow,
domainToFollow, debug, approveHandle,
allowNewsFollowers):
if _followApprovalRequired(baseDir, nicknameToFollow,
domainToFollow, debug, approveHandle,
allowNewsFollowers):
print('Follow approval is required')
if domain.endswith('.onion'):
if noOfFollowRequests(baseDir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'onion') > 5:
if _noOfFollowRequests(baseDir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'onion') > 5:
print('Too many follow requests from onion addresses')
return False
elif domain.endswith('.i2p'):
if noOfFollowRequests(baseDir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'i2p') > 5:
if _noOfFollowRequests(baseDir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'i2p') > 5:
print('Too many follow requests from i2p addresses')
return False
else:
if noOfFollowRequests(baseDir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'') > 10:
if _noOfFollowRequests(baseDir,
nicknameToFollow, domainToFollow,
nickname, domain, fromPort,
'') > 10:
print('Too many follow requests')
return False
print('Storing follow request for approval')
return storeFollowRequest(baseDir,
nicknameToFollow, domainToFollow, port,
nickname, domain, fromPort,
messageJson, debug, messageJson['actor'])
return _storeFollowRequest(baseDir,
nicknameToFollow, domainToFollow, port,
nickname, domain, fromPort,
messageJson, debug, messageJson['actor'])
else:
print('Follow request does not require approval')
# update the followers
@ -920,15 +920,15 @@ def sendFollowRequest(session, baseDir: str,
'object': followedId
}
if followApprovalRequired(baseDir, nickname, domain, debug,
followHandle, allowNewsFollowers):
if _followApprovalRequired(baseDir, nickname, domain, debug,
followHandle, allowNewsFollowers):
# Remove any follow requests rejected for the account being followed.
# It's assumed that if you are following someone then you are
# ok with them following back. If this isn't the case then a rejected
# follow request will block them again.
removeFromFollowRejects(baseDir,
nickname, domain,
followHandle, debug)
_removeFromFollowRejects(baseDir,
nickname, domain,
followHandle, debug)
sendSignedJson(newFollowJson, session, baseDir, nickname, domain, port,
followNickname, followDomain, followPort,
@ -1208,8 +1208,8 @@ def outboxUndoFollow(baseDir: str, messageJson: {}, debug: bool) -> None:
getDomainFromActor(messageJson['object']['object'])
domainFollowingFull = getFullDomain(domainFollowing, portFollowing)
if unfollowPerson(baseDir, nicknameFollower, domainFollowerFull,
nicknameFollowing, domainFollowingFull):
if unfollowAccount(baseDir, nicknameFollower, domainFollowerFull,
nicknameFollowing, domainFollowingFull):
if debug:
print('DEBUG: ' + nicknameFollower + ' unfollowed ' +
nicknameFollowing + '@' + domainFollowingFull)

View File

@ -34,10 +34,10 @@ def receivingCalendarEvents(baseDir: str, nickname: str, domain: str,
return handle + '\n' in open(calendarFilename).read()
def receiveCalendarEvents(baseDir: str, nickname: str, domain: str,
followingNickname: str,
followingDomain: str,
add: bool) -> None:
def _receiveCalendarEvents(baseDir: str, nickname: str, domain: str,
followingNickname: str,
followingDomain: str,
add: bool) -> None:
"""Adds or removes a handle from the following.txt list into a list
indicating whether to receive calendar events from that account
"""
@ -100,12 +100,12 @@ def receiveCalendarEvents(baseDir: str, nickname: str, domain: str,
def addPersonToCalendar(baseDir: str, nickname: str, domain: str,
followingNickname: str,
followingDomain: str) -> None:
receiveCalendarEvents(baseDir, nickname, domain,
followingNickname, followingDomain, True)
_receiveCalendarEvents(baseDir, nickname, domain,
followingNickname, followingDomain, True)
def removePersonFromCalendar(baseDir: str, nickname: str, domain: str,
followingNickname: str,
followingDomain: str) -> None:
receiveCalendarEvents(baseDir, nickname, domain,
followingNickname, followingDomain, False)
_receiveCalendarEvents(baseDir, nickname, domain,
followingNickname, followingDomain, False)

26
git.py
View File

@ -10,7 +10,7 @@ import os
import html
def gitFormatContent(content: str) -> str:
def _gitFormatContent(content: str) -> str:
""" replace html formatting, so that it's more
like the original patch file
"""
@ -22,8 +22,8 @@ def gitFormatContent(content: str) -> str:
return patchStr
def getGitProjectName(baseDir: str, nickname: str, domain: str,
subject: str) -> str:
def _getGitProjectName(baseDir: str, nickname: str, domain: str,
subject: str) -> str:
"""Returns the project name for a git patch
The project name should be contained within the subject line
and should match against a list of projects which the account
@ -71,13 +71,13 @@ def isGitPatch(baseDir: str, nickname: str, domain: str,
return False
if checkProjectName:
projectName = \
getGitProjectName(baseDir, nickname, domain, subject)
_getGitProjectName(baseDir, nickname, domain, subject)
if not projectName:
return False
return True
def getGitHash(patchStr: str) -> str:
def _getGitHash(patchStr: str) -> str:
"""Returns the commit hash from a given patch
"""
patchLines = patchStr.split('\n')
@ -91,7 +91,7 @@ def getGitHash(patchStr: str) -> str:
return None
def getPatchDescription(patchStr: str) -> str:
def _getPatchDescription(patchStr: str) -> str:
"""Returns the description from a given patch
"""
patchLines = patchStr.split('\n')
@ -134,8 +134,8 @@ def convertPostToPatch(baseDir: str, nickname: str, domain: str,
postJsonObject['object']['content'],
False):
return False
patchStr = gitFormatContent(postJsonObject['object']['content'])
commitHash = getGitHash(patchStr)
patchStr = _gitFormatContent(postJsonObject['object']['content'])
commitHash = _getGitHash(patchStr)
if not commitHash:
return False
postJsonObject['object']['type'] = 'Patch'
@ -146,7 +146,7 @@ def convertPostToPatch(baseDir: str, nickname: str, domain: str,
postJsonObject['object']['hash'] = commitHash
postJsonObject['object']['description'] = {
"mediaType": "text/plain",
"content": getPatchDescription(patchStr)
"content": _getPatchDescription(patchStr)
}
# remove content map
if postJsonObject['object'].get('contentMap'):
@ -155,7 +155,7 @@ def convertPostToPatch(baseDir: str, nickname: str, domain: str,
return True
def gitAddFromHandle(patchStr: str, handle: str) -> str:
def _gitAddFromHandle(patchStr: str, handle: str) -> str:
"""Adds the activitypub handle of the sender to the patch
"""
fromStr = 'AP-signed-off-by: '
@ -181,7 +181,7 @@ def receiveGitPatch(baseDir: str, nickname: str, domain: str,
messageType, subject, content):
return False
patchStr = gitFormatContent(content)
patchStr = _gitFormatContent(content)
patchLines = patchStr.split('\n')
patchFilename = None
@ -197,7 +197,7 @@ def receiveGitPatch(baseDir: str, nickname: str, domain: str,
patchSubject = patchSubject.replace('[PATCH]', '').strip()
patchSubject = patchSubject.replace(' ', '_')
projectName = \
getGitProjectName(baseDir, nickname, domain, subject)
_getGitProjectName(baseDir, nickname, domain, subject)
if not os.path.isdir(patchesDir):
os.mkdir(patchesDir)
projectDir = patchesDir + '/' + projectName
@ -209,7 +209,7 @@ def receiveGitPatch(baseDir: str, nickname: str, domain: str,
if not patchFilename:
return False
patchStr = \
gitAddFromHandle(patchStr, '@' + fromNickname + '@' + fromDomain)
_gitAddFromHandle(patchStr, '@' + fromNickname + '@' + fromDomain)
with open(patchFilename, 'w+') as patchFile:
patchFile.write(patchStr)
patchNotifyFilename = \

View File

@ -17,7 +17,7 @@ from utils import daysInMonth
from utils import mergeDicts
def validUuid(testUuid: str, version=4):
def _validUuid(testUuid: str, version=4):
"""Check if uuid_to_test is a valid UUID
"""
try:
@ -28,7 +28,7 @@ def validUuid(testUuid: str, version=4):
return str(uuid_obj) == testUuid
def removeEventFromTimeline(eventId: str, tlEventsFilename: str) -> None:
def _removeEventFromTimeline(eventId: str, tlEventsFilename: str) -> None:
"""Removes the given event Id from the timeline
"""
if eventId + '\n' not in open(tlEventsFilename).read():
@ -71,7 +71,7 @@ def saveEventPost(baseDir: str, handle: str, postId: str,
if eventJson.get('name') and eventJson.get('actor') and \
eventJson.get('uuid') and eventJson.get('content'):
if not validUuid(eventJson['uuid']):
if not _validUuid(eventJson['uuid']):
return False
print('Mobilizon type event')
# if this is a full description of an event then save it
@ -92,7 +92,7 @@ def saveEventPost(baseDir: str, handle: str, postId: str,
tlEventsFilename = baseDir + '/accounts/' + handle + '/events.txt'
if os.path.isfile(tlEventsFilename):
removeEventFromTimeline(eventId, tlEventsFilename)
_removeEventFromTimeline(eventId, tlEventsFilename)
try:
with open(tlEventsFilename, 'r+') as tlEventsFile:
content = tlEventsFile.read()
@ -146,7 +146,7 @@ def saveEventPost(baseDir: str, handle: str, postId: str,
return True
def isHappeningEvent(tag: {}) -> bool:
def _isHappeningEvent(tag: {}) -> bool:
"""Is this tag an Event or Place ActivityStreams type?
"""
if not tag.get('type'):
@ -156,7 +156,7 @@ def isHappeningEvent(tag: {}) -> bool:
return True
def isHappeningPost(postJsonObject: {}) -> bool:
def _isHappeningPost(postJsonObject: {}) -> bool:
"""Is this a post with tags?
"""
if not postJsonObject:
@ -208,13 +208,13 @@ def getTodaysEvents(baseDir: str, nickname: str, domain: str,
continue
postJsonObject = loadJson(postFilename)
if not isHappeningPost(postJsonObject):
if not _isHappeningPost(postJsonObject):
continue
postEvent = []
dayOfMonth = None
for tag in postJsonObject['object']['tag']:
if not isHappeningEvent(tag):
if not _isHappeningEvent(tag):
continue
# this tag is an event or a place
if tag['type'] == 'Event':
@ -275,11 +275,11 @@ def todaysEventsCheck(baseDir: str, nickname: str, domain: str) -> bool:
continue
postJsonObject = loadJson(postFilename)
if not isHappeningPost(postJsonObject):
if not _isHappeningPost(postJsonObject):
continue
for tag in postJsonObject['object']['tag']:
if not isHappeningEvent(tag):
if not _isHappeningEvent(tag):
continue
# this tag is an event or a place
if tag['type'] != 'Event':
@ -322,11 +322,11 @@ def thisWeeksEventsCheck(baseDir: str, nickname: str, domain: str) -> bool:
continue
postJsonObject = loadJson(postFilename)
if not isHappeningPost(postJsonObject):
if not _isHappeningPost(postJsonObject):
continue
for tag in postJsonObject['object']['tag']:
if not isHappeningEvent(tag):
if not _isHappeningEvent(tag):
continue
# this tag is an event or a place
if tag['type'] != 'Event':
@ -377,14 +377,14 @@ def getThisWeeksEvents(baseDir: str, nickname: str, domain: str) -> {}:
continue
postJsonObject = loadJson(postFilename)
if not isHappeningPost(postJsonObject):
if not _isHappeningPost(postJsonObject):
continue
postEvent = []
dayOfMonth = None
weekDayIndex = None
for tag in postJsonObject['object']['tag']:
if not isHappeningEvent(tag):
if not _isHappeningEvent(tag):
continue
# this tag is an event or a place
if tag['type'] == 'Event':
@ -462,13 +462,13 @@ def getCalendarEvents(baseDir: str, nickname: str, domain: str,
continue
postJsonObject = loadJson(postFilename)
if not isHappeningPost(postJsonObject):
if not _isHappeningPost(postJsonObject):
continue
postEvent = []
dayOfMonth = None
for tag in postJsonObject['object']['tag']:
if not isHappeningEvent(tag):
if not _isHappeningEvent(tag):
continue
# this tag is an event or a place
if tag['type'] == 'Event':

View File

@ -135,7 +135,7 @@ def createSignedHeader(privateKeyPem: str, nickname: str,
return headers
def verifyRecentSignature(signedDateStr: str) -> bool:
def _verifyRecentSignature(signedDateStr: str) -> bool:
"""Checks whether the given time taken from the header is within
12 hours of the current time
"""
@ -219,7 +219,7 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
else:
if headers.get(signedHeader):
if signedHeader == 'date':
if not verifyRecentSignature(headers[signedHeader]):
if not _verifyRecentSignature(headers[signedHeader]):
if debug:
print('DEBUG: ' +
'verifyPostHeaders date is not recent ' +
@ -230,7 +230,7 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
else:
signedHeaderCap = signedHeader.capitalize()
if signedHeaderCap == 'Date':
if not verifyRecentSignature(headers[signedHeaderCap]):
if not _verifyRecentSignature(headers[signedHeaderCap]):
if debug:
print('DEBUG: ' +
'verifyPostHeaders date is not recent ' +

750
inbox.py

File diff suppressed because it is too large Load Diff

View File

@ -28,21 +28,21 @@ import base64
import json
def b64safeEncode(payload: {}) -> str:
def _b64safeEncode(payload: {}) -> str:
"""
b64 url safe encoding with the padding removed.
"""
return base64.urlsafe_b64encode(payload).rstrip(b'=')
def b64safeDecode(payload: {}) -> str:
def _b64safeDecode(payload: {}) -> str:
"""
b64 url safe decoding with the padding added.
"""
return base64.urlsafe_b64decode(payload + b'=' * (4 - len(payload) % 4))
def normalizeJson(payload: {}) -> str:
def _normalizeJson(payload: {}) -> str:
"""
Normalize with URDNA2015
"""
@ -50,7 +50,7 @@ def normalizeJson(payload: {}) -> str:
sort_keys=True).encode('utf-8')
def signRs256(payload: {}, privateKeyPem: str) -> str:
def _signRs256(payload: {}, privateKeyPem: str) -> str:
"""
Produce a RS256 signature of the payload
"""
@ -60,7 +60,7 @@ def signRs256(payload: {}, privateKeyPem: str) -> str:
return signature
def verifyRs256(payload: {}, signature: str, publicKeyPem: str) -> bool:
def _verifyRs256(payload: {}, signature: str, publicKeyPem: str) -> bool:
"""
Verifies a RS256 signature
"""
@ -69,7 +69,7 @@ def verifyRs256(payload: {}, signature: str, publicKeyPem: str) -> bool:
return verifier.verify(SHA256.new(payload), signature)
def signJws(payload: {}, privateKeyPem: str) -> str:
def _signJws(payload: {}, privateKeyPem: str) -> str:
"""
Prepare payload to sign
"""
@ -78,28 +78,28 @@ def signJws(payload: {}, privateKeyPem: str) -> str:
'b64': False,
'crit': ['b64']
}
normalizedJson = normalizeJson(header)
encodedHeader = b64safeEncode(normalizedJson)
normalizedJson = _normalizeJson(header)
encodedHeader = _b64safeEncode(normalizedJson)
preparedPayload = b'.'.join([encodedHeader, payload])
signature = signRs256(preparedPayload, privateKeyPem)
encodedSignature = b64safeEncode(signature)
signature = _signRs256(preparedPayload, privateKeyPem)
encodedSignature = _b64safeEncode(signature)
jwsSignature = b'..'.join([encodedHeader, encodedSignature])
return jwsSignature
def verifyJws(payload: {}, jwsSignature: str, publicKeyPem: str) -> bool:
def _verifyJws(payload: {}, jwsSignature: str, publicKeyPem: str) -> bool:
"""
Verifies a signature using the given public key
"""
encodedHeader, encodedSignature = jwsSignature.split(b'..')
signature = b64safeDecode(encodedSignature)
signature = _b64safeDecode(encodedSignature)
payload = b'.'.join([encodedHeader, payload])
return verifyRs256(payload, signature, publicKeyPem)
return _verifyRs256(payload, signature, publicKeyPem)
def jsonldNormalize(jldDocument: str):
def _jsonldNormalize(jldDocument: str):
"""
Normalize and hash the json-ld document
"""
@ -117,8 +117,8 @@ def jsonldSign(jldDocument: {}, privateKeyPem: str) -> {}:
Produces a signed JSON-LD document with a Json Web Signature
"""
jldDocument = deepcopy(jldDocument)
normalizedJldHash = jsonldNormalize(jldDocument)
jwsSignature = signJws(normalizedJldHash, privateKeyPem)
normalizedJldHash = _jsonldNormalize(jldDocument)
jwsSignature = _signJws(normalizedJldHash, privateKeyPem)
# construct the signature document and add it to jsonld
signature = {
@ -138,9 +138,9 @@ def jsonldVerify(signedJldDocument: {}, publicKeyPem: str) -> bool:
signedJldDocument = deepcopy(signedJldDocument)
signature = signedJldDocument.pop('signature')
jwsSignature = signature['signatureValue'].encode('utf-8')
normalizedJldHash = jsonldNormalize(signedJldDocument)
normalizedJldHash = _jsonldNormalize(signedJldDocument)
return verifyJws(normalizedJldHash, jwsSignature, publicKeyPem)
return _verifyJws(normalizedJldHash, jwsSignature, publicKeyPem)
def testSignJsonld(jldDocument: {}, privateKeyPem: str) -> {}:

98
like.py
View File

@ -50,15 +50,15 @@ def noOfLikes(postJsonObject: {}) -> int:
return len(postJsonObject['object']['likes']['items'])
def like(recentPostsCache: {},
session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
ccList: [], httpPrefix: str,
objectUrl: str, actorLiked: str,
clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
def _like(recentPostsCache: {},
session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
ccList: [], httpPrefix: str,
objectUrl: str, actorLiked: str,
clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
"""Creates a like
actor is the person doing the liking
'to' might be a specific person (actor) whose post was liked
@ -134,81 +134,11 @@ def likePost(recentPostsCache: {},
actorLiked = httpPrefix + '://' + likeDomain + '/users/' + likeNickname
objectUrl = actorLiked + '/statuses/' + str(likeStatusNumber)
return like(recentPostsCache,
session, baseDir, federationList, nickname, domain, port,
ccList, httpPrefix, objectUrl, actorLiked, clientToServer,
sendThreads, postLog, personCache, cachedWebfingers,
debug, projectVersion)
def undolike(recentPostsCache: {},
session, baseDir: str, federationList: [],
nickname: str, domain: str, port: int,
ccList: [], httpPrefix: str,
objectUrl: str, actorLiked: str,
clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
"""Removes a like
actor is the person doing the liking
'to' might be a specific person (actor) whose post was liked
object is typically the url of the message which was liked
"""
if not urlPermitted(objectUrl, federationList):
return None
fullDomain = getFullDomain(domain, port)
newUndoLikeJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Undo',
'actor': httpPrefix + '://' + fullDomain + '/users/' + nickname,
'object': {
'type': 'Like',
'actor': httpPrefix + '://' + fullDomain + '/users/' + nickname,
'object': objectUrl
}
}
if ccList:
if len(ccList) > 0:
newUndoLikeJson['cc'] = ccList
newUndoLikeJson['object']['cc'] = ccList
# Extract the domain and nickname from a statuses link
likedPostNickname = None
likedPostDomain = None
likedPostPort = None
if actorLiked:
likedPostNickname = getNicknameFromActor(actorLiked)
likedPostDomain, likedPostPort = getDomainFromActor(actorLiked)
else:
if '/users/' in objectUrl or \
'/accounts/' in objectUrl or \
'/channel/' in objectUrl or \
'/profile/' in objectUrl:
likedPostNickname = getNicknameFromActor(objectUrl)
likedPostDomain, likedPostPort = getDomainFromActor(objectUrl)
if likedPostNickname:
postFilename = locatePost(baseDir, nickname, domain, objectUrl)
if not postFilename:
return None
undoLikesCollectionEntry(baseDir, postFilename, objectUrl,
newUndoLikeJson['actor'], domain, debug)
sendSignedJson(newUndoLikeJson, session, baseDir,
nickname, domain, port,
likedPostNickname, likedPostDomain, likedPostPort,
'https://www.w3.org/ns/activitystreams#Public',
httpPrefix, True, clientToServer, federationList,
sendThreads, postLog, cachedWebfingers, personCache,
debug, projectVersion)
else:
return None
return newUndoLikeJson
return _like(recentPostsCache,
session, baseDir, federationList, nickname, domain, port,
ccList, httpPrefix, objectUrl, actorLiked, clientToServer,
sendThreads, postLog, personCache, cachedWebfingers,
debug, projectVersion)
def sendLikeViaServer(baseDir: str, session,

View File

@ -62,7 +62,7 @@ def manualDenyFollowRequest(session, baseDir: str,
print('Follow request from ' + denyHandle + ' was denied.')
def approveFollowerHandle(accountDir: str, approveHandle: str) -> None:
def _approveFollowerHandle(accountDir: str, approveHandle: str) -> None:
""" Record manually approved handles so that if they unfollow and then
re-follow later then they don't need to be manually approved again
"""
@ -203,7 +203,7 @@ def manualApproveFollowRequest(session, baseDir: str,
# in followers.txt
if approveHandleFull in open(followersFilename).read():
# mark this handle as approved for following
approveFollowerHandle(accountDir, approveHandle)
_approveFollowerHandle(accountDir, approveHandle)
# update the follow requests with the handles not yet approved
os.rename(approveFollowsFilename + '.new', approveFollowsFilename)
# remove the .follow file

View File

@ -6,7 +6,7 @@ __maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__status__ = "Production"
from blurhash import blurhash_encode as blurencode
from blurhash import blurhash_encode
from PIL import Image
import numpy
import os
@ -56,11 +56,12 @@ def removeMetaData(imageFilename: str, outputFilename: str) -> None:
os.system('/usr/bin/mogrify -strip ' + outputFilename) # nosec
def getImageHash(imageFilename: str) -> str:
return blurencode(numpy.array(Image.open(imageFilename).convert("RGB")))
def _getImageHash(imageFilename: str) -> str:
value = numpy.array(Image.open(imageFilename).convert("RGB"))
return blurhash_encode(value)
def isMedia(imageFilename: str) -> bool:
def _isMedia(imageFilename: str) -> bool:
permittedMedia = getMediaExtensions()
for m in permittedMedia:
if imageFilename.endswith('.' + m):
@ -102,7 +103,7 @@ def getAttachmentMediaType(filename: str) -> str:
return mediaType
def updateEtag(mediaFilename: str) -> None:
def _updateEtag(mediaFilename: str) -> None:
""" calculate the etag, which is a sha1 of the data
"""
# only create etags for media
@ -142,7 +143,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
Blurhash is optional, since low power systems may take a long
time to calculate it
"""
if not isMedia(imageFilename):
if not _isMedia(imageFilename):
return postJson
fileExtension = None
@ -181,7 +182,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
if mediaType.startswith('image/'):
attachmentJson['focialPoint'] = [0.0, 0.0]
if useBlurhash:
attachmentJson['blurhash'] = getImageHash(imageFilename)
attachmentJson['blurhash'] = _getImageHash(imageFilename)
postJson['attachment'] = [attachmentJson]
if baseDir:
@ -189,7 +190,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
removeMetaData(imageFilename, mediaFilename)
else:
copyfile(imageFilename, mediaFilename)
updateEtag(mediaFilename)
_updateEtag(mediaFilename)
return postJson

View File

@ -9,8 +9,8 @@ __status__ = "Production"
import os
def migrateFollows(followFilename: str, oldHandle: str,
newHandle: str) -> None:
def _migrateFollows(followFilename: str, oldHandle: str,
newHandle: str) -> None:
"""Changes a handle within following or followers list
"""
if not os.path.isfile(followFilename):
@ -48,7 +48,7 @@ def migrateAccount(baseDir: str, oldHandle: str, newHandle: str) -> None:
if '@' in handle:
accountDir = baseDir + '/accounts/' + handle
followFilename = accountDir + '/following.txt'
migrateFollows(followFilename, oldHandle, newHandle)
_migrateFollows(followFilename, oldHandle, newHandle)
followFilename = accountDir + '/followers.txt'
migrateFollows(followFilename, oldHandle, newHandle)
_migrateFollows(followFilename, oldHandle, newHandle)
break

View File

@ -35,7 +35,7 @@ from inbox import storeHashTags
from session import createSession
def updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
def _updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
"""Updates the index used for imported RSS feeds
"""
basePath = baseDir + '/accounts/news@' + domain
@ -59,7 +59,7 @@ def updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
feedsFile.close()
def saveArrivedTime(baseDir: str, postFilename: str, arrived: str) -> None:
def _saveArrivedTime(baseDir: str, postFilename: str, arrived: str) -> None:
"""Saves the time when an rss post arrived to a file
"""
arrivedFile = open(postFilename + '.arrived', 'w+')
@ -68,7 +68,7 @@ def saveArrivedTime(baseDir: str, postFilename: str, arrived: str) -> None:
arrivedFile.close()
def removeControlCharacters(content: str) -> str:
def _removeControlCharacters(content: str) -> str:
"""Remove escaped html
"""
if '&' in content:
@ -227,14 +227,14 @@ def hashtagRuleTree(operators: [],
return tree
def newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
hashtags: [], httpPrefix: str,
domain: str, port: int,
personCache: {},
cachedWebfingers: {},
federationList: [],
sendThreads: [], postLog: [],
moderated: bool, url: str) -> bool:
def _newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
hashtags: [], httpPrefix: str,
domain: str, port: int,
personCache: {},
cachedWebfingers: {},
federationList: [],
sendThreads: [], postLog: [],
moderated: bool, url: str) -> bool:
"""Applies hashtag rules to a news post.
Returns true if the post should be saved to the news timeline
of this instance
@ -356,9 +356,9 @@ def newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
return True
def createNewsMirror(baseDir: str, domain: str,
postIdNumber: str, url: str,
maxMirroredArticles: int) -> bool:
def _createNewsMirror(baseDir: str, domain: str,
postIdNumber: str, url: str,
maxMirroredArticles: int) -> bool:
"""Creates a local mirror of a news article
"""
if '|' in url or '>' in url:
@ -446,17 +446,17 @@ def createNewsMirror(baseDir: str, domain: str,
return True
def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
domain: str, port: int,
newswire: {},
translate: {},
recentPostsCache: {}, maxRecentPosts: int,
session, cachedWebfingers: {},
personCache: {},
federationList: [],
sendThreads: [], postLog: [],
maxMirroredArticles: int,
allowLocalNetworkAccess: bool) -> None:
def _convertRSStoActivityPub(baseDir: str, httpPrefix: str,
domain: str, port: int,
newswire: {},
translate: {},
recentPostsCache: {}, maxRecentPosts: int,
session, cachedWebfingers: {},
personCache: {},
federationList: [],
sendThreads: [], postLog: [],
maxMirroredArticles: int,
allowLocalNetworkAccess: bool) -> None:
"""Converts rss items in a newswire into posts
"""
if not newswire:
@ -497,7 +497,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
newswire[originalDateStr][3] = filename
continue
rssTitle = removeControlCharacters(item[0])
rssTitle = _removeControlCharacters(item[0])
url = item[1]
if dangerousMarkup(url, allowLocalNetworkAccess) or \
dangerousMarkup(rssTitle, allowLocalNetworkAccess):
@ -505,7 +505,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
rssDescription = ''
# get the rss description if it exists
rssDescription = removeControlCharacters(item[4])
rssDescription = _removeControlCharacters(item[4])
if rssDescription.startswith('<![CDATA['):
rssDescription = rssDescription.replace('<![CDATA[', '')
rssDescription = rssDescription.replace(']]>', '')
@ -555,8 +555,8 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
continue
if mirrored:
if not createNewsMirror(baseDir, domain, statusNumber,
url, maxMirroredArticles):
if not _createNewsMirror(baseDir, domain, statusNumber,
url, maxMirroredArticles):
continue
idStr = \
@ -590,12 +590,12 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
moderated = item[5]
savePost = newswireHashtagProcessing(session, baseDir, blog, hashtags,
httpPrefix, domain, port,
personCache, cachedWebfingers,
federationList,
sendThreads, postLog,
moderated, url)
savePost = _newswireHashtagProcessing(session, baseDir, blog, hashtags,
httpPrefix, domain, port,
personCache, cachedWebfingers,
federationList,
sendThreads, postLog,
moderated, url)
# save the post and update the index
if savePost:
@ -628,7 +628,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
blog['object']['content'] = content
# update the newswire tags if new ones have been found by
# newswireHashtagProcessing
# _newswireHashtagProcessing
for tag in hashtags:
if tag not in newswire[originalDateStr][6]:
newswire[originalDateStr][6].append(tag)
@ -637,14 +637,14 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
clearFromPostCaches(baseDir, recentPostsCache, postId)
if saveJson(blog, filename):
updateFeedsOutboxIndex(baseDir, domain, postId + '.json')
_updateFeedsOutboxIndex(baseDir, domain, postId + '.json')
# Save a file containing the time when the post arrived
# this can then later be used to construct the news timeline
# excluding items during the voting period
if moderated:
saveArrivedTime(baseDir, filename,
blog['object']['arrived'])
_saveArrivedTime(baseDir, filename,
blog['object']['arrived'])
else:
if os.path.isfile(filename + '.arrived'):
os.remove(filename + '.arrived')
@ -658,7 +658,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
newswire[originalDateStr][3] = filename
def mergeWithPreviousNewswire(oldNewswire: {}, newNewswire: {}) -> None:
def _mergeWithPreviousNewswire(oldNewswire: {}, newNewswire: {}) -> None:
"""Preserve any votes or generated activitypub post filename
as rss feeds are updated
"""
@ -707,26 +707,26 @@ def runNewswireDaemon(baseDir: str, httpd,
if os.path.isfile(newswireStateFilename):
httpd.newswire = loadJson(newswireStateFilename)
mergeWithPreviousNewswire(httpd.newswire, newNewswire)
_mergeWithPreviousNewswire(httpd.newswire, newNewswire)
httpd.newswire = newNewswire
if newNewswire:
saveJson(httpd.newswire, newswireStateFilename)
print('Newswire updated')
convertRSStoActivityPub(baseDir,
httpPrefix, domain, port,
newNewswire, translate,
httpd.recentPostsCache,
httpd.maxRecentPosts,
httpd.session,
httpd.cachedWebfingers,
httpd.personCache,
httpd.federationList,
httpd.sendThreads,
httpd.postLog,
httpd.maxMirroredArticles,
httpd.allowLocalNetworkAccess)
_convertRSStoActivityPub(baseDir,
httpPrefix, domain, port,
newNewswire, translate,
httpd.recentPostsCache,
httpd.maxRecentPosts,
httpd.session,
httpd.cachedWebfingers,
httpd.personCache,
httpd.federationList,
httpd.sendThreads,
httpd.postLog,
httpd.maxMirroredArticles,
httpd.allowLocalNetworkAccess)
print('Newswire feed converted to ActivityPub')
if httpd.maxNewsPosts > 0:

View File

@ -15,7 +15,7 @@ from datetime import timedelta
from datetime import timezone
from collections import OrderedDict
from utils import validPostDate
from utils import setHashtagCategory
from categories import setHashtagCategory
from utils import firstParagraphFromString
from utils import isPublicPost
from utils import locatePost
@ -29,7 +29,7 @@ from blocking import isBlockedHashtag
from filters import isFiltered
def removeCDATA(text: str) -> str:
def _removeCDATA(text: str) -> str:
"""Removes any CDATA from the given text
"""
if 'CDATA[' in text:
@ -95,13 +95,13 @@ def getNewswireTags(text: str, maxTags: int) -> []:
return tags
def addNewswireDictEntry(baseDir: str, domain: str,
newswire: {}, dateStr: str,
title: str, link: str,
votesStatus: str, postFilename: str,
description: str, moderated: bool,
mirrored: bool,
tags=[], maxTags=32) -> None:
def _addNewswireDictEntry(baseDir: str, domain: str,
newswire: {}, dateStr: str,
title: str, link: str,
votesStatus: str, postFilename: str,
description: str, moderated: bool,
mirrored: bool,
tags=[], maxTags=32) -> None:
"""Update the newswire dictionary
"""
# remove any markup
@ -143,7 +143,7 @@ def addNewswireDictEntry(baseDir: str, domain: str,
]
def validFeedDate(pubDate: str) -> bool:
def _validFeedDate(pubDate: str) -> bool:
# convert from YY-MM-DD HH:MM:SS+00:00 to
# YY-MM-DDTHH:MM:SSZ
postDate = pubDate.replace(' ', 'T').replace('+00:00', 'Z')
@ -219,12 +219,12 @@ def loadHashtagCategories(baseDir: str, language: str) -> None:
with open(hashtagCategoriesFilename, 'r') as fp:
xmlStr = fp.read()
xml2StrToHashtagCategories(baseDir, xmlStr, 1024, True)
_xml2StrToHashtagCategories(baseDir, xmlStr, 1024, True)
def xml2StrToHashtagCategories(baseDir: str, xmlStr: str,
maxCategoriesFeedItemSizeKb: int,
force=False) -> None:
def _xml2StrToHashtagCategories(baseDir: str, xmlStr: str,
maxCategoriesFeedItemSizeKb: int,
force=False) -> None:
"""Updates hashtag categories based upon an rss feed
"""
rssItems = xmlStr.split('<item>')
@ -261,11 +261,11 @@ def xml2StrToHashtagCategories(baseDir: str, xmlStr: str,
setHashtagCategory(baseDir, hashtag, categoryStr, force)
def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int,
maxCategoriesFeedItemSizeKb: int) -> {}:
def _xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int,
maxCategoriesFeedItemSizeKb: int) -> {}:
"""Converts an xml RSS 2.0 string to a dictionary
"""
if '<item>' not in xmlStr:
@ -274,8 +274,8 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
# is this an rss feed containing hashtag categories?
if '<title>#categories</title>' in xmlStr:
xml2StrToHashtagCategories(baseDir, xmlStr,
maxCategoriesFeedItemSizeKb)
_xml2StrToHashtagCategories(baseDir, xmlStr,
maxCategoriesFeedItemSizeKb)
return {}
rssItems = xmlStr.split('<item>')
@ -300,17 +300,17 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
if '</pubDate>' not in rssItem:
continue
title = rssItem.split('<title>')[1]
title = removeCDATA(title.split('</title>')[0])
title = _removeCDATA(title.split('</title>')[0])
description = ''
if '<description>' in rssItem and '</description>' in rssItem:
description = rssItem.split('<description>')[1]
description = removeCDATA(description.split('</description>')[0])
description = _removeCDATA(description.split('</description>')[0])
else:
if '<media:description>' in rssItem and \
'</media:description>' in rssItem:
description = rssItem.split('<media:description>')[1]
description = description.split('</media:description>')[0]
description = removeCDATA(description)
description = _removeCDATA(description)
link = rssItem.split('<link>')[1]
link = link.split('</link>')[0]
if '://' not in link:
@ -325,14 +325,14 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
pubDateStr = parseFeedDate(pubDate)
if pubDateStr:
if validFeedDate(pubDateStr):
if _validFeedDate(pubDateStr):
postFilename = ''
votesStatus = []
addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
_addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
postCtr += 1
if postCtr >= maxPostsPerSource:
break
@ -341,11 +341,11 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
return result
def xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int,
maxCategoriesFeedItemSizeKb: int) -> {}:
def _xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int,
maxCategoriesFeedItemSizeKb: int) -> {}:
"""Converts an xml RSS 1.0 string to a dictionary
https://validator.w3.org/feed/docs/rss1.html
"""
@ -356,8 +356,8 @@ def xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
# is this an rss feed containing hashtag categories?
if '<title>#categories</title>' in xmlStr:
xml2StrToHashtagCategories(baseDir, xmlStr,
maxCategoriesFeedItemSizeKb)
_xml2StrToHashtagCategories(baseDir, xmlStr,
maxCategoriesFeedItemSizeKb)
return {}
rssItems = xmlStr.split(itemStr)
@ -384,17 +384,17 @@ def xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
if '</dc:date>' not in rssItem:
continue
title = rssItem.split('<title>')[1]
title = removeCDATA(title.split('</title>')[0])
title = _removeCDATA(title.split('</title>')[0])
description = ''
if '<description>' in rssItem and '</description>' in rssItem:
description = rssItem.split('<description>')[1]
description = removeCDATA(description.split('</description>')[0])
description = _removeCDATA(description.split('</description>')[0])
else:
if '<media:description>' in rssItem and \
'</media:description>' in rssItem:
description = rssItem.split('<media:description>')[1]
description = description.split('</media:description>')[0]
description = removeCDATA(description)
description = _removeCDATA(description)
link = rssItem.split('<link>')[1]
link = link.split('</link>')[0]
if '://' not in link:
@ -409,14 +409,14 @@ def xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
pubDateStr = parseFeedDate(pubDate)
if pubDateStr:
if validFeedDate(pubDateStr):
if _validFeedDate(pubDateStr):
postFilename = ''
votesStatus = []
addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
_addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
postCtr += 1
if postCtr >= maxPostsPerSource:
break
@ -425,10 +425,10 @@ def xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
return result
def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int) -> {}:
def _atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int) -> {}:
"""Converts an atom feed string to a dictionary
"""
if '<entry>' not in xmlStr:
@ -456,17 +456,17 @@ def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
if '</updated>' not in atomItem:
continue
title = atomItem.split('<title>')[1]
title = removeCDATA(title.split('</title>')[0])
title = _removeCDATA(title.split('</title>')[0])
description = ''
if '<summary>' in atomItem and '</summary>' in atomItem:
description = atomItem.split('<summary>')[1]
description = removeCDATA(description.split('</summary>')[0])
description = _removeCDATA(description.split('</summary>')[0])
else:
if '<media:description>' in atomItem and \
'</media:description>' in atomItem:
description = atomItem.split('<media:description>')[1]
description = description.split('</media:description>')[0]
description = removeCDATA(description)
description = _removeCDATA(description)
link = atomItem.split('<link>')[1]
link = link.split('</link>')[0]
if '://' not in link:
@ -481,14 +481,14 @@ def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
pubDateStr = parseFeedDate(pubDate)
if pubDateStr:
if validFeedDate(pubDateStr):
if _validFeedDate(pubDateStr):
postFilename = ''
votesStatus = []
addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
_addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
postCtr += 1
if postCtr >= maxPostsPerSource:
break
@ -497,10 +497,10 @@ def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
return result
def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int) -> {}:
def _atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int) -> {}:
"""Converts an atom-style YouTube feed string to a dictionary
"""
if '<entry>' not in xmlStr:
@ -532,17 +532,17 @@ def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
if '</yt:videoId>' not in atomItem:
continue
title = atomItem.split('<title>')[1]
title = removeCDATA(title.split('</title>')[0])
title = _removeCDATA(title.split('</title>')[0])
description = ''
if '<media:description>' in atomItem and \
'</media:description>' in atomItem:
description = atomItem.split('<media:description>')[1]
description = description.split('</media:description>')[0]
description = removeCDATA(description)
description = _removeCDATA(description)
elif '<summary>' in atomItem and '</summary>' in atomItem:
description = atomItem.split('<summary>')[1]
description = description.split('</summary>')[0]
description = removeCDATA(description)
description = _removeCDATA(description)
link = atomItem.split('<yt:videoId>')[1]
link = link.split('</yt:videoId>')[0]
link = 'https://www.youtube.com/watch?v=' + link.strip()
@ -551,14 +551,14 @@ def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
pubDateStr = parseFeedDate(pubDate)
if pubDateStr:
if validFeedDate(pubDateStr):
if _validFeedDate(pubDateStr):
postFilename = ''
votesStatus = []
addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
_addNewswireDictEntry(baseDir, domain,
result, pubDateStr,
title, link,
votesStatus, postFilename,
description, moderated, mirrored)
postCtr += 1
if postCtr >= maxPostsPerSource:
break
@ -567,36 +567,36 @@ def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
return result
def xmlStrToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int,
maxCategoriesFeedItemSizeKb: int) -> {}:
def _xmlStrToDict(baseDir: str, domain: str, xmlStr: str,
moderated: bool, mirrored: bool,
maxPostsPerSource: int,
maxFeedItemSizeKb: int,
maxCategoriesFeedItemSizeKb: int) -> {}:
"""Converts an xml string to a dictionary
"""
if '<yt:videoId>' in xmlStr and '<yt:channelId>' in xmlStr:
print('YouTube feed: reading')
return atomFeedYTToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb)
return _atomFeedYTToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb)
elif 'rss version="2.0"' in xmlStr:
return xml2StrToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb,
maxCategoriesFeedItemSizeKb)
elif '<?xml version="1.0"' in xmlStr:
return xml1StrToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb,
maxCategoriesFeedItemSizeKb)
elif 'xmlns="http://www.w3.org/2005/Atom"' in xmlStr:
return atomFeedToDict(baseDir, domain,
return _xml2StrToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb)
maxPostsPerSource, maxFeedItemSizeKb,
maxCategoriesFeedItemSizeKb)
elif '<?xml version="1.0"' in xmlStr:
return _xml1StrToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb,
maxCategoriesFeedItemSizeKb)
elif 'xmlns="http://www.w3.org/2005/Atom"' in xmlStr:
return _atomFeedToDict(baseDir, domain,
xmlStr, moderated, mirrored,
maxPostsPerSource, maxFeedItemSizeKb)
return {}
def YTchannelToAtomFeed(url: str) -> str:
def _YTchannelToAtomFeed(url: str) -> str:
"""Converts a YouTube channel url into an atom feed url
"""
if 'youtube.com/channel/' not in url:
@ -633,17 +633,17 @@ def getRSS(baseDir: str, domain: str, session, url: str,
'Mozilla/5.0 (X11; Linux x86_64; rv:81.0) Gecko/20100101 Firefox/81.0'
if not session:
print('WARN: no session specified for getRSS')
url = YTchannelToAtomFeed(url)
url = _YTchannelToAtomFeed(url)
try:
result = session.get(url, headers=sessionHeaders, params=sessionParams)
if result:
if int(len(result.text) / 1024) < maxFeedSizeKb and \
not containsInvalidChars(result.text):
return xmlStrToDict(baseDir, domain, result.text,
moderated, mirrored,
maxPostsPerSource,
maxFeedItemSizeKb,
maxCategoriesFeedItemSizeKb)
return _xmlStrToDict(baseDir, domain, result.text,
moderated, mirrored,
maxPostsPerSource,
maxFeedItemSizeKb,
maxCategoriesFeedItemSizeKb)
else:
print('WARN: feed is too large, ' +
'or contains invalid characters: ' + url)
@ -692,7 +692,7 @@ def getRSSfromDict(baseDir: str, newswire: {},
continue
rssStr += '<item>\n'
rssStr += ' <title>' + fields[0] + '</title>\n'
description = removeCDATA(firstParagraphFromString(fields[4]))
description = _removeCDATA(firstParagraphFromString(fields[4]))
rssStr += ' <description>' + description + '</description>\n'
url = fields[1]
if '://' not in url:
@ -707,7 +707,7 @@ def getRSSfromDict(baseDir: str, newswire: {},
return rssStr
def isNewswireBlogPost(postJsonObject: {}) -> bool:
def _isNewswireBlogPost(postJsonObject: {}) -> bool:
"""Is the given object a blog post?
There isn't any difference between a blog post and a newswire blog post
but we may here need to check for different properties than
@ -727,7 +727,7 @@ def isNewswireBlogPost(postJsonObject: {}) -> bool:
return False
def getHashtagsFromPost(postJsonObject: {}) -> []:
def _getHashtagsFromPost(postJsonObject: {}) -> []:
"""Returns a list of any hashtags within a post
"""
if not postJsonObject.get('object'):
@ -753,11 +753,11 @@ def getHashtagsFromPost(postJsonObject: {}) -> []:
return tags
def addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
newswire: {},
maxBlogsPerAccount: int,
indexFilename: str,
maxTags: int) -> None:
def _addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
newswire: {},
maxBlogsPerAccount: int,
indexFilename: str,
maxTags: int) -> None:
"""Adds blogs for the given account to the newswire
"""
if not os.path.isfile(indexFilename):
@ -803,7 +803,7 @@ def addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
postJsonObject = None
if fullPostFilename:
postJsonObject = loadJson(fullPostFilename)
if isNewswireBlogPost(postJsonObject):
if _isNewswireBlogPost(postJsonObject):
published = postJsonObject['object']['published']
published = published.replace('T', ' ')
published = published.replace('Z', '+00:00')
@ -812,24 +812,25 @@ def addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
votes = loadJson(fullPostFilename + '.votes')
content = postJsonObject['object']['content']
description = firstParagraphFromString(content)
description = removeCDATA(description)
addNewswireDictEntry(baseDir, domain,
newswire, published,
postJsonObject['object']['summary'],
postJsonObject['object']['url'],
votes, fullPostFilename,
description, moderated, False,
getHashtagsFromPost(postJsonObject),
maxTags)
description = _removeCDATA(description)
tagsFromPost = _getHashtagsFromPost(postJsonObject)
_addNewswireDictEntry(baseDir, domain,
newswire, published,
postJsonObject['object']['summary'],
postJsonObject['object']['url'],
votes, fullPostFilename,
description, moderated, False,
tagsFromPost,
maxTags)
ctr += 1
if ctr >= maxBlogsPerAccount:
break
def addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
maxBlogsPerAccount: int,
maxTags: int) -> None:
def _addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
maxBlogsPerAccount: int,
maxTags: int) -> None:
"""Adds blogs from each user account into the newswire
"""
moderationDict = {}
@ -857,9 +858,9 @@ def addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
blogsIndex = accountDir + '/tlblogs.index'
if os.path.isfile(blogsIndex):
domain = handle.split('@')[1]
addAccountBlogsToNewswire(baseDir, nickname, domain,
newswire, maxBlogsPerAccount,
blogsIndex, maxTags)
_addAccountBlogsToNewswire(baseDir, nickname, domain,
newswire, maxBlogsPerAccount,
blogsIndex, maxTags)
break
# sort the moderation dict into chronological order, latest first
@ -926,8 +927,8 @@ def getDictFromNewswire(session, baseDir: str, domain: str,
result[dateStr] = item
# add blogs from each user account
addBlogsToNewswire(baseDir, domain, result,
maxPostsPerSource, maxTags)
_addBlogsToNewswire(baseDir, domain, result,
maxPostsPerSource, maxTags)
# sort into chronological order, latest first
sortedResult = OrderedDict(sorted(result.items(), reverse=True))

View File

@ -134,7 +134,7 @@ def setOrganizationScheme(baseDir: str, nickname: str, domain: str,
return True
def accountExists(baseDir: str, nickname: str, domain: str) -> bool:
def _accountExists(baseDir: str, nickname: str, domain: str) -> bool:
"""Returns true if the given account exists
"""
if ':' in domain:
@ -201,10 +201,10 @@ def getDefaultPersonContext() -> str:
}
def createPersonBase(baseDir: str, nickname: str, domain: str, port: int,
httpPrefix: str, saveToFile: bool,
manualFollowerApproval: bool,
password=None) -> (str, str, {}, {}):
def _createPersonBase(baseDir: str, nickname: str, domain: str, port: int,
httpPrefix: str, saveToFile: bool,
manualFollowerApproval: bool,
password=None) -> (str, str, {}, {}):
"""Returns the private key, public key, actor and webfinger endpoint
"""
privateKeyPem, publicKeyPem = generateRSAKey()
@ -377,7 +377,7 @@ def registerAccount(baseDir: str, httpPrefix: str, domain: str, port: int,
manualFollowerApproval: bool) -> bool:
"""Registers a new account from the web interface
"""
if accountExists(baseDir, nickname, domain):
if _accountExists(baseDir, nickname, domain):
return False
if not validNickname(domain, nickname):
print('REGISTER: Nickname ' + nickname + ' is invalid')
@ -449,12 +449,12 @@ def createPerson(baseDir: str, nickname: str, domain: str, port: int,
return None, None, None, None
(privateKeyPem, publicKeyPem,
newPerson, webfingerEndpoint) = createPersonBase(baseDir, nickname,
domain, port,
httpPrefix,
saveToFile,
manualFollowerApproval,
password)
newPerson, webfingerEndpoint) = _createPersonBase(baseDir, nickname,
domain, port,
httpPrefix,
saveToFile,
manualFollowerApproval,
password)
if not getConfigParam(baseDir, 'admin'):
if nickname != 'news':
# print(nickname+' becomes the instance admin and a moderator')
@ -525,8 +525,8 @@ def createSharedInbox(baseDir: str, nickname: str, domain: str, port: int,
httpPrefix: str) -> (str, str, {}, {}):
"""Generates the shared inbox
"""
return createPersonBase(baseDir, nickname, domain, port, httpPrefix,
True, True, None)
return _createPersonBase(baseDir, nickname, domain, port, httpPrefix,
True, True, None)
def createNewsInbox(baseDir: str, domain: str, port: int,
@ -722,48 +722,6 @@ def personBoxJson(recentPostsCache: {},
return None
def personInboxJson(recentPostsCache: {},
baseDir: str, domain: str, port: int, path: str,
httpPrefix: str, noOfItems: int) -> []:
"""Obtain the inbox feed for the given person
Authentication is expected to have already happened
"""
if '/inbox' not in path:
return None
# Only show the header by default
headerOnly = True
# handle page numbers
pageNumber = None
if '?page=' in path:
pageNumber = path.split('?page=')[1]
if pageNumber == 'true':
pageNumber = 1
else:
try:
pageNumber = int(pageNumber)
except BaseException:
pass
path = path.split('?page=')[0]
headerOnly = False
if not path.endswith('/inbox'):
return None
nickname = None
if path.startswith('/users/'):
nickname = path.replace('/users/', '', 1).replace('/inbox', '')
if path.startswith('/@'):
nickname = path.replace('/@', '', 1).replace('/inbox', '')
if not nickname:
return None
if not validNickname(domain, nickname):
return None
return createInbox(recentPostsCache, baseDir, nickname,
domain, port, httpPrefix,
noOfItems, headerOnly, pageNumber)
def setDisplayNickname(baseDir: str, nickname: str, domain: str,
displayName: str) -> bool:
if len(displayName) > 32:
@ -800,7 +758,7 @@ def setBio(baseDir: str, nickname: str, domain: str, bio: str) -> bool:
return True
def unsuspendAccount(baseDir: str, nickname: str) -> None:
def reenableAccount(baseDir: str, nickname: str) -> None:
"""Removes an account suspention
"""
suspendedFilename = baseDir + '/accounts/suspended.txt'
@ -887,8 +845,8 @@ def canRemovePost(baseDir: str, nickname: str,
return True
def removeTagsForNickname(baseDir: str, nickname: str,
domain: str, port: int) -> None:
def _removeTagsForNickname(baseDir: str, nickname: str,
domain: str, port: int) -> None:
"""Removes tags for a nickname
"""
if not os.path.isdir(baseDir + '/tags'):
@ -939,10 +897,10 @@ def removeAccount(baseDir: str, nickname: str,
if moderator.strip('\n') == nickname:
return False
unsuspendAccount(baseDir, nickname)
reenableAccount(baseDir, nickname)
handle = nickname + '@' + domain
removePassword(baseDir, nickname)
removeTagsForNickname(baseDir, nickname, domain, port)
_removeTagsForNickname(baseDir, nickname, domain, port)
if os.path.isdir(baseDir + '/deactivated/' + handle):
shutil.rmtree(baseDir + '/deactivated/' + handle)
if os.path.isdir(baseDir + '/accounts/' + handle):

View File

@ -77,8 +77,8 @@ def getPetName(baseDir: str, nickname: str, domain: str,
return ''
def getPetNameHandle(baseDir: str, nickname: str, domain: str,
petname: str) -> str:
def _getPetNameHandle(baseDir: str, nickname: str, domain: str,
petname: str) -> str:
"""Given a petname returns the handle
"""
if petname.startswith('@'):
@ -113,7 +113,7 @@ def resolvePetnames(baseDir: str, nickname: str, domain: str,
if not wrd.startswith('@'):
break
# does a petname handle exist for this?
handle = getPetNameHandle(baseDir, nickname, domain, wrd)
handle = _getPetNameHandle(baseDir, nickname, domain, wrd)
if not handle:
continue
# replace the petname with the handle

661
posts.py

File diff suppressed because it is too large Load Diff

View File

@ -63,7 +63,7 @@ def clearEditorStatus(baseDir: str) -> None:
saveJson(actorJson, filename)
def addModerator(baseDir: str, nickname: str, domain: str) -> None:
def _addModerator(baseDir: str, nickname: str, domain: str) -> None:
"""Adds a moderator nickname to the file
"""
if ':' in domain:
@ -92,7 +92,7 @@ def addModerator(baseDir: str, nickname: str, domain: str) -> None:
f.write(nickname + '\n')
def removeModerator(baseDir: str, nickname: str):
def _removeModerator(baseDir: str, nickname: str):
"""Removes a moderator nickname from the file
"""
moderatorsFile = baseDir + '/accounts/moderators.txt'
@ -125,7 +125,7 @@ def setRole(baseDir: str, nickname: str, domain: str,
if role:
# add the role
if project == 'instance' and 'role' == 'moderator':
addModerator(baseDir, nickname, domain)
_addModerator(baseDir, nickname, domain)
if actorJson['roles'].get(project):
if role not in actorJson['roles'][project]:
actorJson['roles'][project].append(role)
@ -134,7 +134,7 @@ def setRole(baseDir: str, nickname: str, domain: str,
else:
# remove the role
if project == 'instance':
removeModerator(baseDir, nickname)
_removeModerator(baseDir, nickname)
if actorJson['roles'].get(project):
actorJson['roles'][project].remove(role)
# if the project contains no roles then remove it
@ -144,8 +144,8 @@ def setRole(baseDir: str, nickname: str, domain: str,
return True
def getRoles(baseDir: str, nickname: str, domain: str,
project: str) -> []:
def _getRoles(baseDir: str, nickname: str, domain: str,
project: str) -> []:
"""Returns the roles for a given person on a given project
"""
actorFilename = baseDir + '/accounts/' + \
@ -198,8 +198,8 @@ def outboxDelegate(baseDir: str, authenticatedNickname: str,
# instance delegators can delagate to other projects
# than their own
canDelegate = False
delegatorRoles = getRoles(baseDir, delegatorNickname,
domain, 'instance')
delegatorRoles = _getRoles(baseDir, delegatorNickname,
domain, 'instance')
if delegatorRoles:
if 'delegator' in delegatorRoles:
canDelegate = True
@ -207,8 +207,8 @@ def outboxDelegate(baseDir: str, authenticatedNickname: str,
if not canDelegate:
canDelegate = True
# non-instance delegators can only delegate within their project
delegatorRoles = getRoles(baseDir, delegatorNickname,
domain, project)
delegatorRoles = _getRoles(baseDir, delegatorNickname,
domain, project)
if delegatorRoles:
if 'delegator' not in delegatorRoles:
return False
@ -230,7 +230,7 @@ def outboxDelegate(baseDir: str, authenticatedNickname: str,
return True
# what roles is this person already assigned to?
existingRoles = getRoles(baseDir, nickname, domain, project)
existingRoles = _getRoles(baseDir, nickname, domain, project)
if existingRoles:
if role in existingRoles:
if debug:

View File

@ -14,8 +14,8 @@ from utils import loadJson
from outbox import postMessageToOutbox
def updatePostSchedule(baseDir: str, handle: str, httpd,
maxScheduledPosts: int) -> None:
def _updatePostSchedule(baseDir: str, handle: str, httpd,
maxScheduledPosts: int) -> None:
"""Checks if posts are due to be delivered and if so moves them to the outbox
"""
scheduleIndexFilename = baseDir + '/accounts/' + handle + '/schedule.index'
@ -104,7 +104,9 @@ def updatePostSchedule(baseDir: str, handle: str, httpd,
httpd.proxyType,
httpd.projectVersion,
httpd.debug,
httpd.YTReplacementDomain):
httpd.YTReplacementDomain,
httpd.showPublishedDateOnly,
httpd.allowLocalNetworkAccess):
indexLines.remove(line)
os.remove(postFilename)
continue
@ -145,7 +147,7 @@ def runPostSchedule(baseDir: str, httpd, maxScheduledPosts: int):
baseDir + '/accounts/' + account + '/schedule.index'
if not os.path.isfile(scheduleIndexFilename):
continue
updatePostSchedule(baseDir, account, httpd, maxScheduledPosts)
_updatePostSchedule(baseDir, account, httpd, maxScheduledPosts)
break

View File

@ -176,11 +176,11 @@ def expireShares(baseDir: str) -> None:
continue
nickname = account.split('@')[0]
domain = account.split('@')[1]
expireSharesForAccount(baseDir, nickname, domain)
_expireSharesForAccount(baseDir, nickname, domain)
break
def expireSharesForAccount(baseDir: str, nickname: str, domain: str) -> None:
def _expireSharesForAccount(baseDir: str, nickname: str, domain: str) -> None:
"""Removes expired items from shares for a particular account
"""
handleDomain = domain

View File

@ -41,17 +41,6 @@ def setSkillLevel(baseDir: str, nickname: str, domain: str,
return True
def setSkills(baseDir: str, nickname: str, domain: str, skills: {}) -> None:
actorFilename = baseDir + '/accounts/' + nickname + '@' + domain + '.json'
if not os.path.isfile(actorFilename):
return False
actorJson = loadJson(actorFilename)
if actorJson:
actorJson['skills'] = skills
saveJson(actorJson, actorFilename)
def getSkills(baseDir: str, nickname: str, domain: str) -> []:
"""Returns the skills for a given person
"""

373
tests.py
View File

@ -49,8 +49,8 @@ from utils import getStatusNumber
from utils import getFollowersOfPerson
from utils import removeHtml
from follow import followerOfPerson
from follow import unfollowPerson
from follow import unfollowerOfPerson
from follow import unfollowAccount
from follow import unfollowerOfAccount
from follow import sendFollowRequest
from person import createPerson
from person import setDisplayNickname
@ -73,7 +73,7 @@ from delete import sendDeleteViaServer
from inbox import jsonPostAllowsComments
from inbox import validInbox
from inbox import validInboxFilenames
from inbox import guessHashtagCategory
from categories import guessHashtagCategory
from content import htmlReplaceEmailQuote
from content import htmlReplaceQuoteMarks
from content import dangerousMarkup
@ -101,7 +101,7 @@ thrBob = None
thrEve = None
def testHttpsigBase(withDigest):
def _testHttpsigBase(withDigest):
print('testHttpsig(' + str(withDigest) + ')')
baseDir = os.getcwd()
@ -206,8 +206,8 @@ def testHttpsigBase(withDigest):
def testHttpsig():
testHttpsigBase(True)
testHttpsigBase(False)
_testHttpsigBase(True)
_testHttpsigBase(False)
def testCache():
@ -274,19 +274,44 @@ def createServerAlice(path: str, domain: str, port: int,
followerOfPerson(path, nickname, domain, 'bob', bobAddress,
federationList, False)
if hasPosts:
testFollowersOnly = False
testSaveToFile = True
clientToServer = False
testCommentsEnabled = True
testAttachImageFilename = None
testMediaType = None
testImageDescription = None
createPublicPost(path, nickname, domain, port, httpPrefix,
"No wise fish would go anywhere without a porpoise",
False, True, clientToServer, True,
None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
clientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType,
testImageDescription,
useBlurhash)
createPublicPost(path, nickname, domain, port, httpPrefix,
"Curiouser and curiouser!", False, True,
clientToServer, True, None, None, useBlurhash)
"Curiouser and curiouser!",
testFollowersOnly,
testSaveToFile,
clientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType,
testImageDescription,
useBlurhash)
createPublicPost(path, nickname, domain, port, httpPrefix,
"In the gardens of memory, in the palace " +
"of dreams, that is where you and I shall meet",
False, True, clientToServer, True,
None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
clientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType,
testImageDescription,
useBlurhash)
global testServerAliceRunning
testServerAliceRunning = True
maxMentions = 10
@ -346,19 +371,43 @@ def createServerBob(path: str, domain: str, port: int,
followerOfPerson(path, nickname, domain,
'alice', aliceAddress, federationList, False)
if hasPosts:
testFollowersOnly = False
testSaveToFile = True
testCommentsEnabled = True
testAttachImageFilename = None
testImageDescription = None
testMediaType = None
createPublicPost(path, nickname, domain, port, httpPrefix,
"It's your life, live it your way.",
False, True, clientToServer, True,
None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
clientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType,
testImageDescription,
useBlurhash)
createPublicPost(path, nickname, domain, port, httpPrefix,
"One of the things I've realised is that " +
"I am very simple",
False, True, clientToServer, True,
None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
clientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType,
testImageDescription,
useBlurhash)
createPublicPost(path, nickname, domain, port, httpPrefix,
"Quantum physics is a bit of a passion of mine",
False, True, clientToServer, True,
None, None, useBlurhash)
testFollowersOnly,
testSaveToFile,
clientToServer,
testCommentsEnabled,
testAttachImageFilename,
testMediaType,
testImageDescription,
useBlurhash)
global testServerBobRunning
testServerBobRunning = True
maxMentions = 10
@ -601,10 +650,12 @@ def testPostMessageBetweenServers():
print('\n\n*******************************************************')
print("Bob likes Alice's post")
aliceDomainStr = aliceDomain + ':' + str(alicePort)
followerOfPerson(bobDir, 'bob', bobDomain, 'alice',
aliceDomain + ':' + str(alicePort), federationList, False)
aliceDomainStr, federationList, False)
bobDomainStr = bobDomain + ':' + str(bobPort)
followPerson(aliceDir, 'alice', aliceDomain, 'bob',
bobDomain + ':' + str(bobPort), federationList, False)
bobDomainStr, federationList, False)
sessionBob = createSession(proxyType)
bobPostLog = []
@ -978,7 +1029,7 @@ def testNoOfFollowersOnDomain():
noOfFollowersOnDomain(baseDir, nickname + '@' + domain, otherdomain)
assert followersOnOtherDomain == 3
unfollowerOfPerson(baseDir, nickname, domain, 'sausagedog', otherdomain)
unfollowerOfAccount(baseDir, nickname, domain, 'sausagedog', otherdomain)
followersOnOtherDomain = \
noOfFollowersOnDomain(baseDir, nickname + '@' + domain, otherdomain)
assert followersOnOtherDomain == 2
@ -1074,7 +1125,7 @@ def testFollows():
assert(False)
assert(domainFound)
unfollowPerson(baseDir, nickname, domain, 'batman', 'mesh.com')
unfollowAccount(baseDir, nickname, domain, 'batman', 'mesh.com')
domainFound = False
for followingDomain in f:
@ -1753,7 +1804,8 @@ def testWebLinks():
'they prefer to cling to their customs, beliefs, and traditions ' + \
'rather than to accept the teachings of a war of each ' + \
'against all"\n\n--Peter Kropotkin'
resultText = removeLongWords(addWebLinks(exampleText), 40, [])
testFnStr = addWebLinks(exampleText)
resultText = removeLongWords(testFnStr, 40, [])
assert resultText == exampleText
assert 'ellipsis' not in resultText
@ -1767,7 +1819,8 @@ def testWebLinks():
exampleText = \
'<p>Test1 test2 #YetAnotherExcessivelyLongwindedAndBoringHashtag</p>'
resultText = removeLongWords(addWebLinks(exampleText), 40, [])
testFnStr = addWebLinks(exampleText)
resultText = removeLongWords(testFnStr, 40, [])
assert(resultText ==
'<p>Test1 test2 '
'#YetAnotherExcessivelyLongwindedAndBorin\ngHashtag</p>')
@ -1776,7 +1829,8 @@ def testWebLinks():
"<p>Don't remove a p2p link " + \
"rad:git:hwd1yrerc3mcgn8ga9rho3dqi4w33nep7kxmqezss4topyfgmexihp" + \
"33xcw</p>"
resultText = removeLongWords(addWebLinks(exampleText), 40, [])
testFnStr = addWebLinks(exampleText)
resultText = removeLongWords(testFnStr, 40, [])
assert resultText == exampleText
@ -2533,8 +2587,277 @@ def testReplyToPublicPost() -> None:
httpPrefix + '://rat.site/users/ninjarodent'
def getFunctionCallArgs(name: str, lines: [], startLineCtr: int) -> []:
"""Returns the arguments of a function call given lines
of source code and a starting line number
"""
argsStr = lines[startLineCtr].split(name + '(')[1]
if ')' in argsStr:
argsStr = argsStr.split(')')[0].replace(' ', '').split(',')
return argsStr
for lineCtr in range(startLineCtr + 1, len(lines)):
if ')' not in lines[lineCtr]:
argsStr += lines[lineCtr]
continue
else:
argsStr += lines[lineCtr].split(')')[0]
break
return argsStr.replace('\n', '').replace(' ', '').split(',')
def getFunctionCalls(name: str, lines: [], startLineCtr: int,
functionProperties: {}) -> []:
"""Returns the functions called by the given one,
Starting with the given source code at the given line
"""
callsFunctions = []
functionContentStr = ''
for lineCtr in range(startLineCtr + 1, len(lines)):
if lines[lineCtr].startswith('def '):
break
if lines[lineCtr].startswith('class '):
break
functionContentStr += lines[lineCtr]
for funcName, properties in functionProperties.items():
if funcName + '(' in functionContentStr:
callsFunctions.append(funcName)
return callsFunctions
def functionArgsMatch(callArgs: [], funcArgs: []):
"""Do the function artuments match the function call arguments
"""
if len(callArgs) == len(funcArgs):
return True
# count non-optional arguments
callArgsCtr = 0
for a in callArgs:
if a == 'self':
continue
if '=' not in a:
callArgsCtr += 1
funcArgsCtr = 0
for a in funcArgs:
if a == 'self':
continue
if '=' not in a:
funcArgsCtr += 1
return callArgsCtr >= funcArgsCtr
def testFunctions():
print('testFunctions')
function = {}
functionProperties = {}
modules = {}
for subdir, dirs, files in os.walk('.'):
for sourceFile in files:
if not sourceFile.endswith('.py'):
continue
modName = sourceFile.replace('.py', '')
modules[modName] = {
'functions': []
}
sourceStr = ''
with open(sourceFile, "r") as f:
sourceStr = f.read()
modules[modName]['source'] = sourceStr
with open(sourceFile, "r") as f:
lines = f.readlines()
modules[modName]['lines'] = lines
for line in lines:
if not line.startswith('def '):
continue
methodName = line.split('def ', 1)[1].split('(')[0]
methodArgs = \
sourceStr.split('def ' + methodName + '(')[1]
methodArgs = methodArgs.split(')')[0]
methodArgs = methodArgs.replace(' ', '').split(',')
if function.get(modName):
function[modName].append(methodName)
else:
function[modName] = [methodName]
if methodName not in modules[modName]['functions']:
modules[modName]['functions'].append(methodName)
functionProperties[methodName] = {
"args": methodArgs,
"module": modName,
"calledInModule": []
}
break
excludeFuncArgs = [
'pyjsonld'
]
excludeFuncs = [
'link'
]
# which modules is each function used within?
for modName, modProperties in modules.items():
print('Module: ' + modName + '')
for name, properties in functionProperties.items():
lineCtr = 0
for line in modules[modName]['lines']:
if line.startswith('def '):
lineCtr += 1
continue
if name + '(' in line:
modList = \
functionProperties[name]['calledInModule']
if modName not in modList:
modList.append(modName)
if modName in excludeFuncArgs:
lineCtr += 1
continue
if name in excludeFuncs:
lineCtr += 1
continue
callArgs = \
getFunctionCallArgs(name,
modules[modName]['lines'],
lineCtr)
if not functionArgsMatch(callArgs,
functionProperties[name]['args']):
print('Call to function ' + name +
' does not match its arguments')
print('def args: ' +
str(len(functionProperties[name]['args'])) +
'\n' + str(functionProperties[name]['args']))
print('Call args: ' + str(len(callArgs)) + '\n' +
str(callArgs))
print('module ' + modName + ' line ' + str(lineCtr))
assert False
lineCtr += 1
# don't check these functions, because they are procedurally called
exclusions = [
'set_document_loader',
'normalize',
'get_document_loader',
'runInboxQueueWatchdog',
'runInboxQueue',
'runPostSchedule',
'runPostScheduleWatchdog',
'str2bool',
'runNewswireDaemon',
'runNewswireWatchdog',
'threadSendPost',
'sendToFollowers',
'expireCache',
'migrateAccount',
'getMutualsOfPerson',
'runPostsQueue',
'runSharesExpire',
'runPostsWatchdog',
'runSharesExpireWatchdog',
'getThisWeeksEvents',
'getAvailability',
'testThreadsFunction',
'createServerAlice',
'createServerBob',
'createServerEve',
'E2EEremoveDevice',
'setOrganizationScheme'
]
excludeImports = [
'link'
]
excludeLocal = [
'pyjsonld',
'daemon',
'tests'
]
# check that functions are called somewhere
for name, properties in functionProperties.items():
if name in exclusions:
continue
isLocalFunction = False
if not properties['calledInModule']:
print('function ' + name +
' in module ' + properties['module'] +
' is not called anywhere')
assert properties['calledInModule']
if len(properties['calledInModule']) == 1:
modName = properties['calledInModule'][0]
if modName not in excludeLocal and \
modName == properties['module']:
isLocalFunction = True
if not name.startswith('_'):
print('Local function ' + name +
' in ' + modName + '.py does not begin with _')
assert False
if name not in excludeImports:
for modName in properties['calledInModule']:
if modName == properties['module']:
continue
importStr = 'from ' + properties['module'] + ' import ' + name
if importStr not in modules[modName]['source']:
print(importStr + ' not found in ' + modName + '.py')
assert False
if not isLocalFunction:
if name.startswith('_'):
excludePublic = [
'pyjsonld',
'daemon',
'tests'
]
modName = properties['module']
if modName not in excludePublic:
print('Public function ' + name + ' in ' +
modName + '.py begins with _')
assert False
print('Function: ' + name + '')
print('Constructing call graph')
for modName, modProperties in modules.items():
lineCtr = 0
for line in modules[modName]['lines']:
if line.startswith('def '):
name = line.split('def ')[1].split('(')[0]
callsList = \
getFunctionCalls(name, modules[modName]['lines'],
lineCtr, functionProperties)
functionProperties[name]['calls'] = callsList.copy()
lineCtr += 1
callGraphStr = 'digraph Epicyon {\n\n'
callGraphStr += ' graph [fontsize=10 fontname="Verdana" compound=true];\n'
callGraphStr += ' node [shape=record fontsize=10 fontname="Verdana"];\n\n'
for modName, modProperties in modules.items():
callGraphStr += ' subgraph cluster_' + modName + ' {\n'
callGraphStr += ' label = "' + modName + '";\n'
callGraphStr += ' node [style=filled];\n'
callGraphStr += ' '
for name in modProperties['functions']:
callGraphStr += '"' + name + '" '
callGraphStr += ';\n'
callGraphStr += ' color=blue;\n'
callGraphStr += ' }\n\n'
for name, properties in functionProperties.items():
if not properties['calls']:
continue
for calledFunc in properties['calls']:
callGraphStr += ' "' + name + '" -> "' + calledFunc + '";\n'
callGraphStr += '\n}\n'
with open('epicyon.dot', 'w+') as fp:
fp.write(callGraphStr)
print('Call graph saved to epicyon.dot')
print('Convert to image with: ' +
'dot -Tjpg epicyon.dot -o epicyon_diagram.jpg')
def runAllTests():
print('Running tests...')
testFunctions()
testReplyToPublicPost()
testGetMentionedPeople()
testGuessHashtagCategory()

141
theme.py
View File

@ -14,7 +14,7 @@ from shutil import copyfile
from content import dangerousCSS
def getThemeFiles() -> []:
def _getThemeFiles() -> []:
return ('epicyon.css', 'login.css', 'follow.css',
'suspended.css', 'calendar.css', 'blog.css',
'options.css', 'search.css', 'links.css')
@ -38,7 +38,7 @@ def getThemesList(baseDir: str) -> []:
return themes
def setThemeInConfig(baseDir: str, name: str) -> bool:
def _setThemeInConfig(baseDir: str, name: str) -> bool:
configFilename = baseDir + '/config.json'
if not os.path.isfile(configFilename):
return False
@ -49,7 +49,7 @@ def setThemeInConfig(baseDir: str, name: str) -> bool:
return saveJson(configJson, configFilename)
def setNewswirePublishAsIcon(baseDir: str, useIcon: bool) -> bool:
def _setNewswirePublishAsIcon(baseDir: str, useIcon: bool) -> bool:
"""Shows the newswire publish action as an icon or a button
"""
configFilename = baseDir + '/config.json'
@ -62,7 +62,7 @@ def setNewswirePublishAsIcon(baseDir: str, useIcon: bool) -> bool:
return saveJson(configJson, configFilename)
def setIconsAsButtons(baseDir: str, useButtons: bool) -> bool:
def _setIconsAsButtons(baseDir: str, useButtons: bool) -> bool:
"""Whether to show icons in the header (inbox, outbox, etc)
as buttons
"""
@ -76,7 +76,7 @@ def setIconsAsButtons(baseDir: str, useButtons: bool) -> bool:
return saveJson(configJson, configFilename)
def setRssIconAtTop(baseDir: str, atTop: bool) -> bool:
def _setRssIconAtTop(baseDir: str, atTop: bool) -> bool:
"""Whether to show RSS icon at the top of the timeline
"""
configFilename = baseDir + '/config.json'
@ -89,7 +89,7 @@ def setRssIconAtTop(baseDir: str, atTop: bool) -> bool:
return saveJson(configJson, configFilename)
def setPublishButtonAtTop(baseDir: str, atTop: bool) -> bool:
def _setPublishButtonAtTop(baseDir: str, atTop: bool) -> bool:
"""Whether to show the publish button above the title image
in the newswire column
"""
@ -103,7 +103,7 @@ def setPublishButtonAtTop(baseDir: str, atTop: bool) -> bool:
return saveJson(configJson, configFilename)
def setFullWidthTimelineButtonHeader(baseDir: str, fullWidth: bool) -> bool:
def _setFullWidthTimelineButtonHeader(baseDir: str, fullWidth: bool) -> bool:
"""Shows the timeline button header containing inbox, outbox,
calendar, etc as full width
"""
@ -127,8 +127,8 @@ def getTheme(baseDir: str) -> str:
return 'default'
def removeTheme(baseDir: str):
themeFiles = getThemeFiles()
def _removeTheme(baseDir: str):
themeFiles = _getThemeFiles()
for filename in themeFiles:
if os.path.isfile(baseDir + '/' + filename):
os.remove(baseDir + '/' + filename)
@ -183,14 +183,14 @@ def setCSSparam(css: str, param: str, value: str) -> str:
return newcss.strip()
def setThemeFromDict(baseDir: str, name: str,
themeParams: {}, bgParams: {},
allowLocalNetworkAccess: bool) -> None:
def _setThemeFromDict(baseDir: str, name: str,
themeParams: {}, bgParams: {},
allowLocalNetworkAccess: bool) -> None:
"""Uses a dictionary to set a theme
"""
if name:
setThemeInConfig(baseDir, name)
themeFiles = getThemeFiles()
_setThemeInConfig(baseDir, name)
themeFiles = _getThemeFiles()
for filename in themeFiles:
# check for custom css within the theme directory
templateFilename = baseDir + '/theme/' + name + '/epicyon-' + filename
@ -215,33 +215,33 @@ def setThemeFromDict(baseDir: str, name: str,
for paramName, paramValue in themeParams.items():
if paramName == 'newswire-publish-icon':
if paramValue.lower() == 'true':
setNewswirePublishAsIcon(baseDir, True)
_setNewswirePublishAsIcon(baseDir, True)
else:
setNewswirePublishAsIcon(baseDir, False)
_setNewswirePublishAsIcon(baseDir, False)
continue
elif paramName == 'full-width-timeline-buttons':
if paramValue.lower() == 'true':
setFullWidthTimelineButtonHeader(baseDir, True)
_setFullWidthTimelineButtonHeader(baseDir, True)
else:
setFullWidthTimelineButtonHeader(baseDir, False)
_setFullWidthTimelineButtonHeader(baseDir, False)
continue
elif paramName == 'icons-as-buttons':
if paramValue.lower() == 'true':
setIconsAsButtons(baseDir, True)
_setIconsAsButtons(baseDir, True)
else:
setIconsAsButtons(baseDir, False)
_setIconsAsButtons(baseDir, False)
continue
elif paramName == 'rss-icon-at-top':
if paramValue.lower() == 'true':
setRssIconAtTop(baseDir, True)
_setRssIconAtTop(baseDir, True)
else:
setRssIconAtTop(baseDir, False)
_setRssIconAtTop(baseDir, False)
continue
elif paramName == 'publish-button-at-top':
if paramValue.lower() == 'true':
setPublishButtonAtTop(baseDir, True)
_setPublishButtonAtTop(baseDir, True)
else:
setPublishButtonAtTop(baseDir, False)
_setPublishButtonAtTop(baseDir, False)
continue
css = setCSSparam(css, paramName, paramValue)
filename = baseDir + '/' + filename
@ -249,17 +249,17 @@ def setThemeFromDict(baseDir: str, name: str,
cssfile.write(css)
if bgParams.get('login'):
setBackgroundFormat(baseDir, name, 'login', bgParams['login'])
_setBackgroundFormat(baseDir, name, 'login', bgParams['login'])
if bgParams.get('follow'):
setBackgroundFormat(baseDir, name, 'follow', bgParams['follow'])
_setBackgroundFormat(baseDir, name, 'follow', bgParams['follow'])
if bgParams.get('options'):
setBackgroundFormat(baseDir, name, 'options', bgParams['options'])
_setBackgroundFormat(baseDir, name, 'options', bgParams['options'])
if bgParams.get('search'):
setBackgroundFormat(baseDir, name, 'search', bgParams['search'])
_setBackgroundFormat(baseDir, name, 'search', bgParams['search'])
def setBackgroundFormat(baseDir: str, name: str,
backgroundType: str, extension: str) -> None:
def _setBackgroundFormat(baseDir: str, name: str,
backgroundType: str, extension: str) -> None:
"""Sets the background file extension
"""
if extension == 'jpg':
@ -277,7 +277,7 @@ def setBackgroundFormat(baseDir: str, name: str,
def enableGrayscale(baseDir: str) -> None:
"""Enables grayscale for the current theme
"""
themeFiles = getThemeFiles()
themeFiles = _getThemeFiles()
for filename in themeFiles:
templateFilename = baseDir + '/' + filename
if not os.path.isfile(templateFilename):
@ -300,7 +300,7 @@ def enableGrayscale(baseDir: str) -> None:
def disableGrayscale(baseDir: str) -> None:
"""Disables grayscale for the current theme
"""
themeFiles = getThemeFiles()
themeFiles = _getThemeFiles()
for filename in themeFiles:
templateFilename = baseDir + '/' + filename
if not os.path.isfile(templateFilename):
@ -318,7 +318,7 @@ def disableGrayscale(baseDir: str) -> None:
os.remove(grayscaleFilename)
def setCustomFont(baseDir: str):
def _setCustomFont(baseDir: str):
"""Uses a dictionary to set a theme
"""
customFontExt = None
@ -337,7 +337,7 @@ def setCustomFont(baseDir: str):
if not customFontExt:
return
themeFiles = getThemeFiles()
themeFiles = _getThemeFiles()
for filename in themeFiles:
templateFilename = baseDir + '/' + filename
if not os.path.isfile(templateFilename):
@ -356,9 +356,9 @@ def setCustomFont(baseDir: str):
cssfile.write(css)
def readVariablesFile(baseDir: str, themeName: str,
variablesFile: str,
allowLocalNetworkAccess: bool) -> None:
def _readVariablesFile(baseDir: str, themeName: str,
variablesFile: str,
allowLocalNetworkAccess: bool) -> None:
"""Reads variables from a file in the theme directory
"""
themeParams = loadJson(variablesFile, 0)
@ -370,14 +370,14 @@ def readVariablesFile(baseDir: str, themeName: str,
"options": "jpg",
"search": "jpg"
}
setThemeFromDict(baseDir, themeName, themeParams, bgParams,
allowLocalNetworkAccess)
_setThemeFromDict(baseDir, themeName, themeParams, bgParams,
allowLocalNetworkAccess)
def setThemeDefault(baseDir: str, allowLocalNetworkAccess: bool):
def _setThemeDefault(baseDir: str, allowLocalNetworkAccess: bool):
name = 'default'
removeTheme(baseDir)
setThemeInConfig(baseDir, name)
_removeTheme(baseDir)
_setThemeInConfig(baseDir, name)
bgParams = {
"login": "jpg",
"follow": "jpg",
@ -394,44 +394,11 @@ def setThemeDefault(baseDir: str, allowLocalNetworkAccess: bool):
"banner-height-mobile": "10vh",
"search-banner-height-mobile": "15vh"
}
setThemeFromDict(baseDir, name, themeParams, bgParams,
allowLocalNetworkAccess)
_setThemeFromDict(baseDir, name, themeParams, bgParams,
allowLocalNetworkAccess)
def setThemeHighVis(baseDir: str, allowLocalNetworkAccess: bool):
name = 'highvis'
themeParams = {
"newswire-publish-icon": True,
"full-width-timeline-buttons": False,
"icons-as-buttons": False,
"rss-icon-at-top": True,
"publish-button-at-top": False,
"font-size-header": "22px",
"font-size-header-mobile": "32px",
"font-size": "45px",
"font-size2": "45px",
"font-size3": "45px",
"font-size4": "35px",
"font-size5": "29px",
"gallery-font-size": "35px",
"gallery-font-size-mobile": "55px",
"hashtag-vertical-spacing3": "100px",
"hashtag-vertical-spacing4": "150px",
"time-vertical-align": "-10px",
"*font-family": "'LinBiolinum_Rah'",
"*src": "url('./fonts/LinBiolinum_Rah.woff2') format('woff2')"
}
bgParams = {
"login": "jpg",
"follow": "jpg",
"options": "jpg",
"search": "jpg"
}
setThemeFromDict(baseDir, name, themeParams, bgParams,
allowLocalNetworkAccess)
def setThemeFonts(baseDir: str, themeName: str) -> None:
def _setThemeFonts(baseDir: str, themeName: str) -> None:
"""Adds custom theme fonts
"""
themeNameLower = themeName.lower()
@ -455,7 +422,7 @@ def setThemeFonts(baseDir: str, themeName: str) -> None:
break
def setThemeImages(baseDir: str, name: str) -> None:
def _setThemeImages(baseDir: str, name: str) -> None:
"""Changes the profile background image
and banner to the defaults
"""
@ -590,7 +557,7 @@ def setTheme(baseDir: str, name: str, domain: str,
result = False
prevThemeName = getTheme(baseDir)
removeTheme(baseDir)
_removeTheme(baseDir)
themes = getThemesList(baseDir)
for themeName in themes:
@ -606,21 +573,21 @@ def setTheme(baseDir: str, name: str, domain: str,
if prevThemeName.lower() != themeNameLower:
# change the banner and profile image
# to the default for the theme
setThemeImages(baseDir, name)
setThemeFonts(baseDir, name)
_setThemeImages(baseDir, name)
_setThemeFonts(baseDir, name)
result = True
if not result:
# default
setThemeDefault(baseDir)
_setThemeDefault(baseDir, allowLocalNetworkAccess)
result = True
variablesFile = baseDir + '/theme/' + name + '/theme.json'
if os.path.isfile(variablesFile):
readVariablesFile(baseDir, name, variablesFile,
allowLocalNetworkAccess)
_readVariablesFile(baseDir, name, variablesFile,
allowLocalNetworkAccess)
setCustomFont(baseDir)
_setCustomFont(baseDir)
# set the news avatar
newsAvatarThemeFilename = \
@ -637,5 +604,5 @@ def setTheme(baseDir: str, name: str, domain: str,
else:
disableGrayscale(baseDir)
setThemeInConfig(baseDir, name)
_setThemeInConfig(baseDir, name)
return result

176
utils.py
View File

@ -78,152 +78,6 @@ def isDormant(baseDir: str, nickname: str, domain: str, actor: str,
return False
def getHashtagCategory(baseDir: str, hashtag: str) -> str:
"""Returns the category for the hashtag
"""
categoryFilename = baseDir + '/tags/' + hashtag + '.category'
if not os.path.isfile(categoryFilename):
categoryFilename = baseDir + '/tags/' + hashtag.title() + '.category'
if not os.path.isfile(categoryFilename):
categoryFilename = \
baseDir + '/tags/' + hashtag.upper() + '.category'
if not os.path.isfile(categoryFilename):
return ''
with open(categoryFilename, 'r') as fp:
categoryStr = fp.read()
if categoryStr:
return categoryStr
return ''
def getHashtagCategories(baseDir: str, recent=False, category=None) -> None:
"""Returns a dictionary containing hashtag categories
"""
hashtagCategories = {}
if recent:
currTime = datetime.datetime.utcnow()
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
recently = daysSinceEpoch - 1
for subdir, dirs, files in os.walk(baseDir + '/tags'):
for f in files:
if not f.endswith('.category'):
continue
categoryFilename = os.path.join(baseDir + '/tags', f)
if not os.path.isfile(categoryFilename):
continue
hashtag = f.split('.')[0]
with open(categoryFilename, 'r') as fp:
categoryStr = fp.read()
if not categoryStr:
continue
if category:
# only return a dictionary for a specific category
if categoryStr != category:
continue
if recent:
tagsFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(tagsFilename):
continue
modTimesinceEpoc = \
os.path.getmtime(tagsFilename)
lastModifiedDate = \
datetime.datetime.fromtimestamp(modTimesinceEpoc)
fileDaysSinceEpoch = \
(lastModifiedDate -
datetime.datetime(1970, 1, 1)).days
if fileDaysSinceEpoch < recently:
continue
if not hashtagCategories.get(categoryStr):
hashtagCategories[categoryStr] = [hashtag]
else:
if hashtag not in hashtagCategories[categoryStr]:
hashtagCategories[categoryStr].append(hashtag)
break
return hashtagCategories
def updateHashtagCategories(baseDir: str) -> None:
"""Regenerates the list of hashtag categories
"""
categoryListFilename = baseDir + '/accounts/categoryList.txt'
hashtagCategories = getHashtagCategories(baseDir)
if not hashtagCategories:
if os.path.isfile(categoryListFilename):
os.remove(categoryListFilename)
return
categoryList = []
for categoryStr, hashtagList in hashtagCategories.items():
categoryList.append(categoryStr)
categoryList.sort()
categoryListStr = ''
for categoryStr in categoryList:
categoryListStr += categoryStr + '\n'
# save a list of available categories for quick lookup
with open(categoryListFilename, 'w+') as fp:
fp.write(categoryListStr)
def validHashtagCategory(category: str) -> bool:
"""Returns true if the category name is valid
"""
if not category:
return False
invalidChars = (',', ' ', '<', ';', '\\')
for ch in invalidChars:
if ch in category:
return False
# too long
if len(category) > 40:
return False
return True
def setHashtagCategory(baseDir: str, hashtag: str, category: str,
force=False) -> bool:
"""Sets the category for the hashtag
"""
if not validHashtagCategory(category):
return False
if not force:
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(hashtagFilename):
hashtag = hashtag.title()
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(hashtagFilename):
hashtag = hashtag.upper()
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
if not os.path.isfile(hashtagFilename):
return False
if not os.path.isdir(baseDir + '/tags'):
os.mkdir(baseDir + '/tags')
categoryFilename = baseDir + '/tags/' + hashtag + '.category'
if force:
# don't overwrite any existing categories
if os.path.isfile(categoryFilename):
return False
with open(categoryFilename, 'w+') as fp:
fp.write(category)
updateHashtagCategories(baseDir)
return True
return False
def isEditor(baseDir: str, nickname: str) -> bool:
"""Returns true if the given nickname is an editor
"""
@ -343,7 +197,7 @@ def isSystemAccount(nickname: str) -> bool:
return False
def createConfig(baseDir: str) -> None:
def _createConfig(baseDir: str) -> None:
"""Creates a configuration file
"""
configFilename = baseDir + '/config.json'
@ -357,7 +211,7 @@ def createConfig(baseDir: str) -> None:
def setConfigParam(baseDir: str, variableName: str, variableValue) -> None:
"""Sets a configuration value
"""
createConfig(baseDir)
_createConfig(baseDir)
configFilename = baseDir + '/config.json'
configJson = {}
if os.path.isfile(configFilename):
@ -369,7 +223,7 @@ def setConfigParam(baseDir: str, variableName: str, variableValue) -> None:
def getConfigParam(baseDir: str, variableName: str):
"""Gets a configuration value
"""
createConfig(baseDir)
_createConfig(baseDir)
configFilename = baseDir + '/config.json'
configJson = loadJson(configFilename)
if configJson:
@ -756,8 +610,8 @@ def getDomainFromActor(actor: str) -> (str, int):
return domain, port
def setDefaultPetName(baseDir: str, nickname: str, domain: str,
followNickname: str, followDomain: str) -> None:
def _setDefaultPetName(baseDir: str, nickname: str, domain: str,
followNickname: str, followDomain: str) -> None:
"""Sets a default petname
This helps especially when using onion or i2p address
"""
@ -793,7 +647,8 @@ def followPerson(baseDir: str, nickname: str, domain: str,
followFile='following.txt') -> bool:
"""Adds a person to the follow list
"""
if not domainPermitted(followDomain.lower().replace('\n', ''),
followDomainStrLower = followDomain.lower().replace('\n', '')
if not domainPermitted(followDomainStrLower,
federationList):
if debug:
print('DEBUG: follow of domain ' +
@ -869,8 +724,8 @@ def followPerson(baseDir: str, nickname: str, domain: str,
addPersonToCalendar(baseDir, nickname, domain,
followNickname, followDomain)
# add a default petname
setDefaultPetName(baseDir, nickname, domain,
followNickname, followDomain)
_setDefaultPetName(baseDir, nickname, domain,
followNickname, followDomain)
return True
@ -1010,7 +865,8 @@ def locatePost(baseDir: str, nickname: str, domain: str,
return None
def removeAttachment(baseDir: str, httpPrefix: str, domain: str, postJson: {}):
def _removeAttachment(baseDir: str, httpPrefix: str, domain: str,
postJson: {}):
if not postJson.get('attachment'):
return
if not postJson['attachment'][0].get('url'):
@ -1053,8 +909,8 @@ def removeModerationPostFromIndex(baseDir: str, postUrl: str,
' from moderation index')
def isReplyToBlogPost(baseDir: str, nickname: str, domain: str,
postJsonObject: str):
def _isReplyToBlogPost(baseDir: str, nickname: str, domain: str,
postJsonObject: str):
"""Is the given post a reply to a blog post?
"""
if not postJsonObject.get('object'):
@ -1093,8 +949,8 @@ def deletePost(baseDir: str, httpPrefix: str,
return
# don't remove replies to blog posts
if isReplyToBlogPost(baseDir, nickname, domain,
postJsonObject):
if _isReplyToBlogPost(baseDir, nickname, domain,
postJsonObject):
return
# remove from recent posts cache in memory
@ -1112,7 +968,7 @@ def deletePost(baseDir: str, httpPrefix: str,
del recentPostsCache['html'][postId]
# remove any attachment
removeAttachment(baseDir, httpPrefix, domain, postJsonObject)
_removeAttachment(baseDir, httpPrefix, domain, postJsonObject)
extensions = ('votes', 'arrived', 'muted')
for ext in extensions:

View File

@ -89,11 +89,11 @@ def htmlCalendarDeleteConfirm(cssCache: {}, translate: {}, baseDir: str,
return deletePostStr
def htmlCalendarDay(cssCache: {}, translate: {},
baseDir: str, path: str,
year: int, monthNumber: int, dayNumber: int,
nickname: str, domain: str, dayEvents: [],
monthName: str, actor: str) -> str:
def _htmlCalendarDay(cssCache: {}, translate: {},
baseDir: str, path: str,
year: int, monthNumber: int, dayNumber: int,
nickname: str, domain: str, dayEvents: [],
monthName: str, actor: str) -> str:
"""Show a day within the calendar
"""
accountDir = baseDir + '/accounts/' + nickname + '@' + domain
@ -251,10 +251,10 @@ def htmlCalendar(cssCache: {}, translate: {},
if events:
if events.get(str(dayNumber)):
dayEvents = events[str(dayNumber)]
return htmlCalendarDay(cssCache, translate, baseDir, path,
year, monthNumber, dayNumber,
nickname, domain, dayEvents,
monthName, actor)
return _htmlCalendarDay(cssCache, translate, baseDir, path,
year, monthNumber, dayNumber,
nickname, domain, dayEvents,
monthName, actor)
events = \
getCalendarEvents(baseDir, nickname, domain, year, monthNumber)

View File

@ -19,18 +19,18 @@ from webapp_utils import htmlFooter
from webapp_utils import getBannerFile
def linksExist(baseDir: str) -> bool:
def _linksExist(baseDir: str) -> bool:
"""Returns true if links have been created
"""
linksFilename = baseDir + '/accounts/links.txt'
return os.path.isfile(linksFilename)
def getLeftColumnShares(baseDir: str,
httpPrefix: str, domainFull: str,
nickname: str,
maxSharesInLeftColumn: int,
translate: {}) -> []:
def _getLeftColumnShares(baseDir: str,
httpPrefix: str, domainFull: str,
nickname: str,
maxSharesInLeftColumn: int,
translate: {}) -> []:
"""get any shares and turn them into the left column links format
"""
pageNumber = 1
@ -164,9 +164,9 @@ def getLeftColumnContent(baseDir: str, nickname: str, domainFull: str,
# show a number of shares
maxSharesInLeftColumn = 3
sharesList = \
getLeftColumnShares(baseDir,
httpPrefix, domainFull, nickname,
maxSharesInLeftColumn, translate)
_getLeftColumnShares(baseDir,
httpPrefix, domainFull, nickname,
maxSharesInLeftColumn, translate)
if linksList and sharesList:
linksList = sharesList + linksList
@ -271,7 +271,7 @@ def htmlLinksMobile(cssCache: {}, baseDir: str,
headerButtonsFrontScreen(translate, nickname,
'links', authorized,
iconsAsButtons) + '</center>'
if linksExist(baseDir):
if _linksExist(baseDir):
htmlStr += \
getLeftColumnContent(baseDir, nickname, domainFull,
httpPrefix, translate,

View File

@ -24,7 +24,7 @@ from webapp_utils import htmlPostSeparator
from webapp_utils import headerButtonsFrontScreen
def votesIndicator(totalVotes: int, positiveVoting: bool) -> str:
def _votesIndicator(totalVotes: int, positiveVoting: bool) -> str:
"""Returns an indicator of the number of votes on a newswire item
"""
if totalVotes <= 0:
@ -177,8 +177,8 @@ def getRightColumnContent(baseDir: str, nickname: str, domainFull: str,
# show the newswire lines
newswireContentStr = \
htmlNewswire(baseDir, newswire, nickname, moderator, translate,
positiveVoting)
_htmlNewswire(baseDir, newswire, nickname, moderator, translate,
positiveVoting)
htmlStr += newswireContentStr
# show the rss icon at the bottom, typically on the right hand side
@ -187,8 +187,8 @@ def getRightColumnContent(baseDir: str, nickname: str, domainFull: str,
return htmlStr
def htmlNewswire(baseDir: str, newswire: {}, nickname: str, moderator: bool,
translate: {}, positiveVoting: bool) -> str:
def _htmlNewswire(baseDir: str, newswire: {}, nickname: str, moderator: bool,
translate: {}, positiveVoting: bool) -> str:
"""Converts a newswire dict into html
"""
separatorStr = htmlPostSeparator(baseDir, 'right')
@ -220,7 +220,7 @@ def htmlNewswire(baseDir: str, newswire: {}, nickname: str, moderator: bool,
if moderator:
totalVotes = votesOnNewswireItem(item[2])
totalVotesStr = \
votesIndicator(totalVotes, positiveVoting)
_votesIndicator(totalVotes, positiveVoting)
title = removeLongWords(item[0], 16, []).replace('\n', '<br>')
htmlStr += '<p class="newswireItemVotedOn">' + \
@ -247,7 +247,7 @@ def htmlNewswire(baseDir: str, newswire: {}, nickname: str, moderator: bool,
# show a number of ticks or crosses for how many
# votes for or against
totalVotesStr = \
votesIndicator(totalVotes, positiveVoting)
_votesIndicator(totalVotes, positiveVoting)
title = removeLongWords(item[0], 16, []).replace('\n', '<br>')
if moderator and moderatedItem:

View File

@ -17,8 +17,8 @@ from webapp_utils import htmlHeaderWithExternalStyle
from webapp_utils import htmlFooter
def htmlFollowingDataList(baseDir: str, nickname: str,
domain: str, domainFull: str) -> str:
def _htmlFollowingDataList(baseDir: str, nickname: str,
domain: str, domainFull: str) -> str:
"""Returns a datalist of handles being followed
"""
listStr = '<datalist id="followingHandles">\n'
@ -57,20 +57,20 @@ def htmlFollowingDataList(baseDir: str, nickname: str,
return listStr
def htmlNewPostDropDown(scopeIcon: str, scopeDescription: str,
replyStr: str,
translate: {},
showPublicOnDropdown: bool,
defaultTimeline: str,
pathBase: str,
dropdownNewPostSuffix: str,
dropdownNewBlogSuffix: str,
dropdownUnlistedSuffix: str,
dropdownFollowersSuffix: str,
dropdownDMSuffix: str,
dropdownReminderSuffix: str,
dropdownEventSuffix: str,
dropdownReportSuffix: str) -> str:
def _htmlNewPostDropDown(scopeIcon: str, scopeDescription: str,
replyStr: str,
translate: {},
showPublicOnDropdown: bool,
defaultTimeline: str,
pathBase: str,
dropdownNewPostSuffix: str,
dropdownNewBlogSuffix: str,
dropdownUnlistedSuffix: str,
dropdownFollowersSuffix: str,
dropdownDMSuffix: str,
dropdownReminderSuffix: str,
dropdownEventSuffix: str,
dropdownReportSuffix: str) -> str:
"""Returns the html for a drop down list of new post types
"""
dropDownContent = '<div class="newPostDropdown">\n'
@ -617,20 +617,20 @@ def htmlNewPost(cssCache: {}, mediaInstance: bool, translate: {},
dropDownContent = ''
if not reportUrl and not shareDescription:
dropDownContent = \
htmlNewPostDropDown(scopeIcon, scopeDescription,
replyStr,
translate,
showPublicOnDropdown,
defaultTimeline,
pathBase,
dropdownNewPostSuffix,
dropdownNewBlogSuffix,
dropdownUnlistedSuffix,
dropdownFollowersSuffix,
dropdownDMSuffix,
dropdownReminderSuffix,
dropdownEventSuffix,
dropdownReportSuffix)
_htmlNewPostDropDown(scopeIcon, scopeDescription,
replyStr,
translate,
showPublicOnDropdown,
defaultTimeline,
pathBase,
dropdownNewPostSuffix,
dropdownNewBlogSuffix,
dropdownUnlistedSuffix,
dropdownFollowersSuffix,
dropdownDMSuffix,
dropdownReminderSuffix,
dropdownEventSuffix,
dropdownReportSuffix)
else:
if not shareDescription:
# reporting a post to moderator
@ -717,7 +717,7 @@ def htmlNewPost(cssCache: {}, mediaInstance: bool, translate: {},
' <input type="text" name="mentions" ' + \
'list="followingHandles" value="' + mentionsStr + '" selected>\n'
newPostForm += \
htmlFollowingDataList(baseDir, nickname, domain, domainFull)
_htmlFollowingDataList(baseDir, nickname, domain, domainFull)
newPostForm += ''
selectedStr = ''

View File

@ -20,14 +20,14 @@ from webapp_column_right import getRightColumnContent
from webapp_post import individualPostAsHtml
def htmlFrontScreenPosts(recentPostsCache: {}, maxRecentPosts: int,
translate: {},
baseDir: str, httpPrefix: str,
nickname: str, domain: str, port: int,
session, wfRequest: {}, personCache: {},
projectVersion: str,
YTReplacementDomain: str,
showPublishedDateOnly: bool) -> str:
def _htmlFrontScreenPosts(recentPostsCache: {}, maxRecentPosts: int,
translate: {},
baseDir: str, httpPrefix: str,
nickname: str, domain: str, port: int,
session, wfRequest: {}, personCache: {},
projectVersion: str,
YTReplacementDomain: str,
showPublishedDateOnly: bool) -> str:
"""Shows posts on the front screen of a news instance
These should only be public blog posts from the features timeline
which is the blog timeline of the news actor
@ -40,10 +40,12 @@ def htmlFrontScreenPosts(recentPostsCache: {}, maxRecentPosts: int,
boxName = 'tlfeatures'
authorized = True
while ctr < maxItems and currPage < 4:
outboxFeedPathStr = \
'/users/' + nickname + '/' + boxName + \
'?page=' + str(currPage)
outboxFeed = \
personBoxJson({}, session, baseDir, domain, port,
'/users/' + nickname + '/' + boxName +
'?page=' + str(currPage),
outboxFeedPathStr,
httpPrefix, 10, boxName,
authorized, 0, False, 0)
if not outboxFeed:
@ -139,14 +141,14 @@ def htmlFrontScreen(rssIconAtTop: bool,
bannerFile, bannerFilename = \
getBannerFile(baseDir, nickname, domain, theme)
profileStr += \
htmlFrontScreenPosts(recentPostsCache, maxRecentPosts,
translate,
baseDir, httpPrefix,
nickname, domain, port,
session, wfRequest, personCache,
projectVersion,
YTReplacementDomain,
showPublishedDateOnly) + licenseStr
_htmlFrontScreenPosts(recentPostsCache, maxRecentPosts,
translate,
baseDir, httpPrefix,
nickname, domain, port,
session, wfRequest, personCache,
projectVersion,
YTReplacementDomain,
showPublishedDateOnly) + licenseStr
# Footer which is only used for system accounts
profileFooterStr = ' </td>\n'

View File

@ -10,8 +10,8 @@ import os
from shutil import copyfile
from datetime import datetime
from utils import getNicknameFromActor
from utils import getHashtagCategories
from utils import getHashtagCategory
from categories import getHashtagCategories
from categories import getHashtagCategory
from webapp_utils import getSearchBannerFile
from webapp_utils import getContentWarningButton
from webapp_utils import htmlHeaderWithExternalStyle
@ -51,7 +51,7 @@ def getHashtagCategoriesFeed(baseDir: str,
return rssStr
def getHashtagDomainMax(domainHistogram: {}) -> str:
def _getHashtagDomainMax(domainHistogram: {}) -> str:
"""Returns the domain with the maximum number of hashtags
"""
maxCount = 1
@ -63,7 +63,7 @@ def getHashtagDomainMax(domainHistogram: {}) -> str:
return maxDomain
def getHashtagDomainHistogram(domainHistogram: {}, translate: {}) -> str:
def _getHashtagDomainHistogram(domainHistogram: {}, translate: {}) -> str:
"""Returns the html for a histogram of domains
from which hashtags are coming
"""
@ -88,7 +88,7 @@ def getHashtagDomainHistogram(domainHistogram: {}, translate: {}) -> str:
rightColStr = ''
for i in range(len(domainHistogram)):
domain = getHashtagDomainMax(domainHistogram)
domain = _getHashtagDomainMax(domainHistogram)
if not domain:
break
percent = int(domainHistogram[domain] * 100 / totalCount)
@ -224,7 +224,7 @@ def htmlHashTagSwarm(baseDir: str, actor: str, translate: {}) -> str:
getContentWarningButton('alltags', translate, tagSwarmStr)
tagSwarmHtml = categorySwarmStr + tagSwarmStr.strip() + '\n'
# tagSwarmHtml += getHashtagDomainHistogram(domainHistogram, translate)
# tagSwarmHtml += _getHashtagDomainHistogram(domainHistogram, translate)
return tagSwarmHtml

View File

@ -7,8 +7,8 @@ __email__ = "bob@freedombone.net"
__status__ = "Production"
def addEmbeddedVideoFromSites(translate: {}, content: str,
width=400, height=300) -> str:
def _addEmbeddedVideoFromSites(translate: {}, content: str,
width=400, height=300) -> str:
"""Adds embedded videos
"""
if '>vimeo.com/' in content:
@ -122,7 +122,7 @@ def addEmbeddedVideoFromSites(translate: {}, content: str,
return content
def addEmbeddedAudio(translate: {}, content: str) -> str:
def _addEmbeddedAudio(translate: {}, content: str) -> str:
"""Adds embedded audio for mp3/ogg
"""
if not ('.mp3' in content or '.ogg' in content):
@ -167,8 +167,8 @@ def addEmbeddedAudio(translate: {}, content: str) -> str:
return content
def addEmbeddedVideo(translate: {}, content: str,
width=400, height=300) -> str:
def _addEmbeddedVideo(translate: {}, content: str,
width=400, height=300) -> str:
"""Adds embedded video for mp4/webm/ogv
"""
if not ('.mp4' in content or '.webm' in content or '.ogv' in content):
@ -219,6 +219,6 @@ def addEmbeddedVideo(translate: {}, content: str,
def addEmbeddedElements(translate: {}, content: str) -> str:
"""Adds embedded elements for various media types
"""
content = addEmbeddedVideoFromSites(translate, content)
content = addEmbeddedAudio(translate, content)
return addEmbeddedVideo(translate, content)
content = _addEmbeddedVideoFromSites(translate, content)
content = _addEmbeddedAudio(translate, content)
return _addEmbeddedVideo(translate, content)

View File

@ -110,8 +110,9 @@ def htmlAccountInfo(cssCache: {}, translate: {},
urlCtr += 1
blockedPostsHtml = ''
if blockedPostsLinks:
blockNoStr = 'blockNumber' + str(ctr)
blockedPostsHtml = \
getContentWarningButton('blockNumber' + str(ctr),
getContentWarningButton(blockNoStr,
translate, blockedPostsLinks)
ctr += 1

File diff suppressed because it is too large Load Diff

View File

@ -214,14 +214,14 @@ def htmlProfileAfterSearch(cssCache: {},
imageUrl = profileJson['image']['url']
profileStr = \
getProfileHeaderAfterSearch(baseDir,
nickname, defaultTimeline,
searchNickname,
searchDomainFull,
translate,
displayName,
profileDescriptionShort,
avatarUrl, imageUrl)
_getProfileHeaderAfterSearch(baseDir,
nickname, defaultTimeline,
searchNickname,
searchDomainFull,
translate,
displayName,
profileDescriptionShort,
avatarUrl, imageUrl)
domainFull = getFullDomain(domain, port)
@ -287,14 +287,14 @@ def htmlProfileAfterSearch(cssCache: {},
return htmlHeaderWithExternalStyle(cssFilename) + profileStr + htmlFooter()
def getProfileHeader(baseDir: str, nickname: str, domain: str,
domainFull: str, translate: {},
defaultTimeline: str,
displayName: str,
avatarDescription: str,
profileDescriptionShort: str,
loginButton: str, avatarUrl: str,
theme: str) -> str:
def _getProfileHeader(baseDir: str, nickname: str, domain: str,
domainFull: str, translate: {},
defaultTimeline: str,
displayName: str,
avatarDescription: str,
profileDescriptionShort: str,
loginButton: str, avatarUrl: str,
theme: str) -> str:
"""The header of the profile screen, containing background
image and avatar
"""
@ -327,14 +327,14 @@ def getProfileHeader(baseDir: str, nickname: str, domain: str,
return htmlStr
def getProfileHeaderAfterSearch(baseDir: str,
nickname: str, defaultTimeline: str,
searchNickname: str,
searchDomainFull: str,
translate: {},
displayName: str,
profileDescriptionShort: str,
avatarUrl: str, imageUrl: str) -> str:
def _getProfileHeaderAfterSearch(baseDir: str,
nickname: str, defaultTimeline: str,
searchNickname: str,
searchDomainFull: str,
translate: {},
displayName: str,
profileDescriptionShort: str,
avatarUrl: str, imageUrl: str) -> str:
"""The header of a searched for handle, containing background
image and avatar
"""
@ -568,12 +568,12 @@ def htmlProfile(rssIconAtTop: bool,
avatarUrl = profileJson['icon']['url']
profileHeaderStr = \
getProfileHeader(baseDir, nickname, domain,
domainFull, translate,
defaultTimeline, displayName,
avatarDescription,
profileDescriptionShort,
loginButton, avatarUrl, theme)
_getProfileHeader(baseDir, nickname, domain,
domainFull, translate,
defaultTimeline, displayName,
avatarDescription,
profileDescriptionShort,
loginButton, avatarUrl, theme)
profileStr = profileHeaderStr + donateSection
profileStr += '<div class="container" id="buttonheader">\n'
@ -621,44 +621,44 @@ def htmlProfile(rssIconAtTop: bool,
if selected == 'posts':
profileStr += \
htmlProfilePosts(recentPostsCache, maxRecentPosts,
translate,
baseDir, httpPrefix, authorized,
nickname, domain, port,
session, wfRequest, personCache,
projectVersion,
YTReplacementDomain,
showPublishedDateOnly) + licenseStr
_htmlProfilePosts(recentPostsCache, maxRecentPosts,
translate,
baseDir, httpPrefix, authorized,
nickname, domain, port,
session, wfRequest, personCache,
projectVersion,
YTReplacementDomain,
showPublishedDateOnly) + licenseStr
elif selected == 'following':
profileStr += \
htmlProfileFollowing(translate, baseDir, httpPrefix,
authorized, nickname,
domain, port, session,
wfRequest, personCache, extraJson,
projectVersion, ["unfollow"], selected,
usersPath, pageNumber, maxItemsPerPage,
dormantMonths)
_htmlProfileFollowing(translate, baseDir, httpPrefix,
authorized, nickname,
domain, port, session,
wfRequest, personCache, extraJson,
projectVersion, ["unfollow"], selected,
usersPath, pageNumber, maxItemsPerPage,
dormantMonths)
elif selected == 'followers':
profileStr += \
htmlProfileFollowing(translate, baseDir, httpPrefix,
authorized, nickname,
domain, port, session,
wfRequest, personCache, extraJson,
projectVersion, ["block"],
selected, usersPath, pageNumber,
maxItemsPerPage, dormantMonths)
_htmlProfileFollowing(translate, baseDir, httpPrefix,
authorized, nickname,
domain, port, session,
wfRequest, personCache, extraJson,
projectVersion, ["block"],
selected, usersPath, pageNumber,
maxItemsPerPage, dormantMonths)
elif selected == 'roles':
profileStr += \
htmlProfileRoles(translate, nickname, domainFull,
extraJson)
_htmlProfileRoles(translate, nickname, domainFull,
extraJson)
elif selected == 'skills':
profileStr += \
htmlProfileSkills(translate, nickname, domainFull, extraJson)
_htmlProfileSkills(translate, nickname, domainFull, extraJson)
elif selected == 'shares':
profileStr += \
htmlProfileShares(actor, translate,
nickname, domainFull,
extraJson) + licenseStr
_htmlProfileShares(actor, translate,
nickname, domainFull,
extraJson) + licenseStr
profileStr = \
htmlHeaderWithExternalStyle(cssFilename) + \
@ -666,15 +666,15 @@ def htmlProfile(rssIconAtTop: bool,
return profileStr
def htmlProfilePosts(recentPostsCache: {}, maxRecentPosts: int,
translate: {},
baseDir: str, httpPrefix: str,
authorized: bool,
nickname: str, domain: str, port: int,
session, wfRequest: {}, personCache: {},
projectVersion: str,
YTReplacementDomain: str,
showPublishedDateOnly: bool) -> str:
def _htmlProfilePosts(recentPostsCache: {}, maxRecentPosts: int,
translate: {},
baseDir: str, httpPrefix: str,
authorized: bool,
nickname: str, domain: str, port: int,
session, wfRequest: {}, personCache: {},
projectVersion: str,
YTReplacementDomain: str,
showPublishedDateOnly: bool) -> str:
"""Shows posts on the profile screen
These should only be public posts
"""
@ -685,11 +685,13 @@ def htmlProfilePosts(recentPostsCache: {}, maxRecentPosts: int,
currPage = 1
boxName = 'outbox'
while ctr < maxItems and currPage < 4:
outboxFeedPathStr = \
'/users/' + nickname + '/' + boxName + '?page=' + \
str(currPage)
outboxFeed = \
personBoxJson({}, session, baseDir, domain,
port,
'/users/' + nickname + '/' + boxName + '?page=' +
str(currPage),
outboxFeedPathStr,
httpPrefix,
10, boxName,
authorized, 0, False, 0)
@ -720,16 +722,16 @@ def htmlProfilePosts(recentPostsCache: {}, maxRecentPosts: int,
return profileStr
def htmlProfileFollowing(translate: {}, baseDir: str, httpPrefix: str,
authorized: bool,
nickname: str, domain: str, port: int,
session, wfRequest: {}, personCache: {},
followingJson: {}, projectVersion: str,
buttons: [],
feedName: str, actor: str,
pageNumber: int,
maxItemsPerPage: int,
dormantMonths: int) -> str:
def _htmlProfileFollowing(translate: {}, baseDir: str, httpPrefix: str,
authorized: bool,
nickname: str, domain: str, port: int,
session, wfRequest: {}, personCache: {},
followingJson: {}, projectVersion: str,
buttons: [],
feedName: str, actor: str,
pageNumber: int,
maxItemsPerPage: int,
dormantMonths: int) -> str:
"""Shows following on the profile screen
"""
profileStr = ''
@ -756,12 +758,12 @@ def htmlProfileFollowing(translate: {}, baseDir: str, httpPrefix: str,
dormantMonths)
profileStr += \
individualFollowAsHtml(translate, baseDir, session,
wfRequest, personCache,
domain, followingActor,
authorized, nickname,
httpPrefix, projectVersion, dormant,
buttons)
_individualFollowAsHtml(translate, baseDir, session,
wfRequest, personCache,
domain, followingActor,
authorized, nickname,
httpPrefix, projectVersion, dormant,
buttons)
if authorized and maxItemsPerPage and pageNumber:
if len(followingJson['orderedItems']) >= maxItemsPerPage:
@ -778,8 +780,8 @@ def htmlProfileFollowing(translate: {}, baseDir: str, httpPrefix: str,
return profileStr
def htmlProfileRoles(translate: {}, nickname: str, domain: str,
rolesJson: {}) -> str:
def _htmlProfileRoles(translate: {}, nickname: str, domain: str,
rolesJson: {}) -> str:
"""Shows roles on the profile screen
"""
profileStr = ''
@ -801,8 +803,8 @@ def htmlProfileRoles(translate: {}, nickname: str, domain: str,
return profileStr
def htmlProfileSkills(translate: {}, nickname: str, domain: str,
skillsJson: {}) -> str:
def _htmlProfileSkills(translate: {}, nickname: str, domain: str,
skillsJson: {}) -> str:
"""Shows skills on the profile screen
"""
profileStr = ''
@ -817,8 +819,8 @@ def htmlProfileSkills(translate: {}, nickname: str, domain: str,
return profileStr
def htmlProfileShares(actor: str, translate: {},
nickname: str, domain: str, sharesJson: {}) -> str:
def _htmlProfileShares(actor: str, translate: {},
nickname: str, domain: str, sharesJson: {}) -> str:
"""Shows shares on the profile screen
"""
profileStr = ''
@ -1450,16 +1452,16 @@ def htmlEditProfile(cssCache: {}, translate: {}, baseDir: str, path: str,
return editProfileForm
def individualFollowAsHtml(translate: {},
baseDir: str, session, wfRequest: {},
personCache: {}, domain: str,
followUrl: str,
authorized: bool,
actorNickname: str,
httpPrefix: str,
projectVersion: str,
dormant: bool,
buttons=[]) -> str:
def _individualFollowAsHtml(translate: {},
baseDir: str, session, wfRequest: {},
personCache: {}, domain: str,
followUrl: str,
authorized: bool,
actorNickname: str,
httpPrefix: str,
projectVersion: str,
dormant: bool,
buttons=[]) -> str:
"""An individual follow entry on the profile screen
"""
nickname = getNicknameFromActor(followUrl)

View File

@ -19,7 +19,7 @@ from utils import locatePost
from utils import isPublicPost
from utils import firstParagraphFromString
from utils import searchBoxPosts
from utils import getHashtagCategory
from categories import getHashtagCategory
from feeds import rss2TagHeader
from feeds import rss2TagFooter
from webapp_utils import getAltPath

View File

@ -27,8 +27,8 @@ from webapp_headerbuttons import headerButtonsTimeline
from posts import isModerator
def logTimelineTiming(enableTimingLog: bool, timelineStartTime,
boxName: str, debugId: str) -> None:
def _logTimelineTiming(enableTimingLog: bool, timelineStartTime,
boxName: str, debugId: str) -> None:
"""Create a log of timings for performance tuning
"""
if not enableTimingLog:
@ -127,7 +127,7 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
bannerFile, bannerFilename = \
getBannerFile(baseDir, nickname, domain, theme)
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '1')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '1')
# is the user a moderator?
if not moderator:
@ -137,7 +137,7 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
if not editor:
editor = isEditor(baseDir, nickname)
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '2')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '2')
# the appearance of buttons - highlighted or not
inboxButton = 'button'
@ -221,7 +221,7 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
'" src="/icons/person.png"/></a>\n'
break
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '3')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '3')
# moderation / reports button
moderationButtonStr = ''
@ -256,7 +256,7 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
tlStr = htmlHeaderWithExternalStyle(cssFilename)
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '4')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '4')
# if this is a news instance and we are viewing the news timeline
newsHeader = False
@ -487,17 +487,17 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
tlStr += '</div>\n</form>\n'
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '6')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '6')
if boxName == 'tlshares':
maxSharesPerAccount = itemsPerPage
return (tlStr +
htmlSharesTimeline(translate, pageNumber, itemsPerPage,
baseDir, actor, nickname, domain, port,
maxSharesPerAccount, httpPrefix) +
_htmlSharesTimeline(translate, pageNumber, itemsPerPage,
baseDir, actor, nickname, domain, port,
maxSharesPerAccount, httpPrefix) +
htmlFooter())
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '7')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '7')
# page up arrow
if pageNumber > 1:
@ -543,14 +543,14 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
preparePostFromHtmlCache(currTlStr,
boxName,
pageNumber)
logTimelineTiming(enableTimingLog,
timelineStartTime,
boxName, '10')
_logTimelineTiming(enableTimingLog,
timelineStartTime,
boxName, '10')
if not currTlStr:
logTimelineTiming(enableTimingLog,
timelineStartTime,
boxName, '11')
_logTimelineTiming(enableTimingLog,
timelineStartTime,
boxName, '11')
# read the post from disk
currTlStr = \
@ -570,8 +570,8 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
showIndividualPostIcons,
manuallyApproveFollowers,
False, True)
logTimelineTiming(enableTimingLog,
timelineStartTime, boxName, '12')
_logTimelineTiming(enableTimingLog,
timelineStartTime, boxName, '12')
if currTlStr:
itemCtr += 1
@ -612,7 +612,7 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
rightColumnStr + ' </td>\n'
tlStr += ' </tr>\n'
logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '9')
_logTimelineTiming(enableTimingLog, timelineStartTime, boxName, '9')
tlStr += ' </tbody>\n'
tlStr += '</table>\n'
@ -656,10 +656,10 @@ def htmlIndividualShare(actor: str, item: {}, translate: {},
return profileStr
def htmlSharesTimeline(translate: {}, pageNumber: int, itemsPerPage: int,
baseDir: str, actor: str,
nickname: str, domain: str, port: int,
maxSharesPerAccount: int, httpPrefix: str) -> str:
def _htmlSharesTimeline(translate: {}, pageNumber: int, itemsPerPage: int,
baseDir: str, actor: str,
nickname: str, domain: str, port: int,
maxSharesPerAccount: int, httpPrefix: str) -> str:
"""Show shared items timeline as html
"""
sharesJson, lastPage = \

View File

@ -167,7 +167,7 @@ def getContentWarningButton(postID: str, translate: {},
'</div></details>\n'
def getActorPropertyUrl(actorJson: {}, propertyName: str) -> str:
def _getActorPropertyUrl(actorJson: {}, propertyName: str) -> str:
"""Returns a url property from an actor
"""
if not actorJson.get('attachment'):
@ -206,10 +206,10 @@ def getActorPropertyUrl(actorJson: {}, propertyName: str) -> str:
def getBlogAddress(actorJson: {}) -> str:
"""Returns blog address for the given actor
"""
return getActorPropertyUrl(actorJson, 'Blog')
return _getActorPropertyUrl(actorJson, 'Blog')
def setActorPropertyUrl(actorJson: {}, propertyName: str, url: str) -> None:
def _setActorPropertyUrl(actorJson: {}, propertyName: str, url: str) -> None:
"""Sets a url for the given actor property
"""
if not actorJson.get('attachment'):
@ -269,7 +269,7 @@ def setActorPropertyUrl(actorJson: {}, propertyName: str, url: str) -> None:
def setBlogAddress(actorJson: {}, blogAddress: str) -> None:
"""Sets an blog address for the given actor
"""
setActorPropertyUrl(actorJson, 'Blog', removeHtml(blogAddress))
_setActorPropertyUrl(actorJson, 'Blog', removeHtml(blogAddress))
def updateAvatarImageCache(session, baseDir: str, httpPrefix: str,
@ -475,8 +475,8 @@ def postContainsPublic(postJsonObject: {}) -> bool:
return containsPublic
def getImageFile(baseDir: str, name: str, directory: str,
nickname: str, domain: str, theme: str) -> (str, str):
def _getImageFile(baseDir: str, name: str, directory: str,
nickname: str, domain: str, theme: str) -> (str, str):
"""
returns the filenames for an image with the given name
"""
@ -495,30 +495,30 @@ def getImageFile(baseDir: str, name: str, directory: str,
def getBannerFile(baseDir: str,
nickname: str, domain: str, theme: str) -> (str, str):
return getImageFile(baseDir, 'banner',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
return _getImageFile(baseDir, 'banner',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
def getSearchBannerFile(baseDir: str,
nickname: str, domain: str, theme: str) -> (str, str):
return getImageFile(baseDir, 'search_banner',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
return _getImageFile(baseDir, 'search_banner',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
def getLeftImageFile(baseDir: str,
nickname: str, domain: str, theme: str) -> (str, str):
return getImageFile(baseDir, 'left_col_image',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
return _getImageFile(baseDir, 'left_col_image',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
def getRightImageFile(baseDir: str,
nickname: str, domain: str, theme: str) -> (str, str):
return getImageFile(baseDir, 'right_col_image',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
return _getImageFile(baseDir, 'right_col_image',
baseDir + '/accounts/' + nickname + '@' + domain,
nickname, domain, theme)
def htmlHeaderWithExternalStyle(cssFilename: str, lang='en') -> str:
@ -542,16 +542,6 @@ def htmlFooter() -> str:
return htmlStr
def getFontFromCss(css: str) -> (str, str):
"""Returns the font name and format
"""
if ' url(' not in css:
return None, None
fontName = css.split(" url(")[1].split(")")[0].replace("'", '')
fontFormat = css.split(" format('")[1].split("')")[0]
return fontName, fontFormat
def loadIndividualPostAsHtmlFromCache(baseDir: str,
nickname: str, domain: str,
postJsonObject: {}) -> str:

View File

@ -25,7 +25,7 @@ from utils import saveJson
from utils import getProtocolPrefixes
def parseHandle(handle: str) -> (str, str):
def _parseHandle(handle: str) -> (str, str):
if '.' not in handle:
return None, None
prefixes = getProtocolPrefixes()
@ -54,7 +54,7 @@ def webfingerHandle(session, handle: str, httpPrefix: str,
print('WARN: No session specified for webfingerHandle')
return None
nickname, domain = parseHandle(handle)
nickname, domain = _parseHandle(handle)
if not nickname:
return None
wfDomain = domain
@ -97,7 +97,7 @@ def webfingerHandle(session, handle: str, httpPrefix: str,
return result
def generateMagicKey(publicKeyPem) -> str:
def _generateMagicKey(publicKeyPem) -> str:
"""See magic_key method in
https://github.com/tootsuite/mastodon/blob/
707ddf7808f90e3ab042d7642d368c2ce8e95e6f/app/models/account.rb
@ -170,7 +170,7 @@ def createWebfingerEndpoint(nickname: str, domain: str, port: int,
"type": "application/activity+json"
},
{
"href": generateMagicKey(publicKeyPem),
"href": _generateMagicKey(publicKeyPem),
"rel": "magic-public-key"
}
],
@ -271,7 +271,7 @@ def webfingerLookup(path: str, baseDir: str,
return wfJson
def webfingerUpdateFromProfile(wfJson: {}, actorJson: {}) -> bool:
def _webfingerUpdateFromProfile(wfJson: {}, actorJson: {}) -> bool:
"""Updates webfinger Email/blog/xmpp links from profile
Returns true if one or more tags has been changed
"""
@ -350,6 +350,6 @@ def webfingerUpdate(baseDir: str, nickname: str, domain: str,
if not actorJson:
return
if webfingerUpdateFromProfile(wfJson, actorJson):
if _webfingerUpdateFromProfile(wfJson, actorJson):
if saveJson(wfJson, filename):
cachedWebfingers[handle] = wfJson