mirror of https://gitlab.com/bashrc2/epicyon
Snake case
parent
8b6dc582fa
commit
1684bbbbca
50
blocking.py
50
blocking.py
|
@ -520,18 +520,18 @@ def outboxUndoBlock(base_dir: str, http_prefix: str,
|
||||||
|
|
||||||
|
|
||||||
def mutePost(base_dir: str, nickname: str, domain: str, port: int,
|
def mutePost(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
http_prefix: str, postId: str, recentPostsCache: {},
|
http_prefix: str, post_id: str, recentPostsCache: {},
|
||||||
debug: bool) -> None:
|
debug: bool) -> None:
|
||||||
""" Mutes the given post
|
""" Mutes the given post
|
||||||
"""
|
"""
|
||||||
print('mutePost: postId ' + postId)
|
print('mutePost: post_id ' + post_id)
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
print('mutePost: file not found ' + postId)
|
print('mutePost: file not found ' + post_id)
|
||||||
return
|
return
|
||||||
post_json_object = load_json(postFilename)
|
post_json_object = load_json(postFilename)
|
||||||
if not post_json_object:
|
if not post_json_object:
|
||||||
print('mutePost: object not loaded ' + postId)
|
print('mutePost: object not loaded ' + post_id)
|
||||||
return
|
return
|
||||||
print('mutePost: ' + str(post_json_object))
|
print('mutePost: ' + str(post_json_object))
|
||||||
|
|
||||||
|
@ -553,10 +553,10 @@ def mutePost(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
# does this post have ignores on it from differenent actors?
|
# does this post have ignores on it from differenent actors?
|
||||||
if not postJsonObj.get('ignores'):
|
if not postJsonObj.get('ignores'):
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: Adding initial mute to ' + postId)
|
print('DEBUG: Adding initial mute to ' + post_id)
|
||||||
ignoresJson = {
|
ignoresJson = {
|
||||||
"@context": "https://www.w3.org/ns/activitystreams",
|
"@context": "https://www.w3.org/ns/activitystreams",
|
||||||
'id': postId,
|
'id': post_id,
|
||||||
'type': 'Collection',
|
'type': 'Collection',
|
||||||
"totalItems": 1,
|
"totalItems": 1,
|
||||||
'items': [{
|
'items': [{
|
||||||
|
@ -610,18 +610,18 @@ def mutePost(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
|
|
||||||
# if the post is in the recent posts cache then mark it as muted
|
# if the post is in the recent posts cache then mark it as muted
|
||||||
if recentPostsCache.get('index'):
|
if recentPostsCache.get('index'):
|
||||||
postId = \
|
post_id = \
|
||||||
removeIdEnding(post_json_object['id']).replace('/', '#')
|
removeIdEnding(post_json_object['id']).replace('/', '#')
|
||||||
if postId in recentPostsCache['index']:
|
if post_id in recentPostsCache['index']:
|
||||||
print('MUTE: ' + postId + ' is in recent posts cache')
|
print('MUTE: ' + post_id + ' is in recent posts cache')
|
||||||
if recentPostsCache.get('json'):
|
if recentPostsCache.get('json'):
|
||||||
recentPostsCache['json'][postId] = json.dumps(post_json_object)
|
recentPostsCache['json'][post_id] = json.dumps(post_json_object)
|
||||||
print('MUTE: ' + postId +
|
print('MUTE: ' + post_id +
|
||||||
' marked as muted in recent posts memory cache')
|
' marked as muted in recent posts memory cache')
|
||||||
if recentPostsCache.get('html'):
|
if recentPostsCache.get('html'):
|
||||||
if recentPostsCache['html'].get(postId):
|
if recentPostsCache['html'].get(post_id):
|
||||||
del recentPostsCache['html'][postId]
|
del recentPostsCache['html'][post_id]
|
||||||
print('MUTE: ' + postId + ' removed cached html')
|
print('MUTE: ' + post_id + ' removed cached html')
|
||||||
|
|
||||||
if alsoUpdatePostId:
|
if alsoUpdatePostId:
|
||||||
postFilename = locatePost(base_dir, nickname, domain, alsoUpdatePostId)
|
postFilename = locatePost(base_dir, nickname, domain, alsoUpdatePostId)
|
||||||
|
@ -653,11 +653,11 @@ def mutePost(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
|
|
||||||
|
|
||||||
def unmutePost(base_dir: str, nickname: str, domain: str, port: int,
|
def unmutePost(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
http_prefix: str, postId: str, recentPostsCache: {},
|
http_prefix: str, post_id: str, recentPostsCache: {},
|
||||||
debug: bool) -> None:
|
debug: bool) -> None:
|
||||||
""" Unmutes the given post
|
""" Unmutes the given post
|
||||||
"""
|
"""
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
return
|
return
|
||||||
post_json_object = load_json(postFilename)
|
post_json_object = load_json(postFilename)
|
||||||
|
@ -725,18 +725,18 @@ def unmutePost(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
|
|
||||||
# if the post is in the recent posts cache then mark it as unmuted
|
# if the post is in the recent posts cache then mark it as unmuted
|
||||||
if recentPostsCache.get('index'):
|
if recentPostsCache.get('index'):
|
||||||
postId = \
|
post_id = \
|
||||||
removeIdEnding(post_json_object['id']).replace('/', '#')
|
removeIdEnding(post_json_object['id']).replace('/', '#')
|
||||||
if postId in recentPostsCache['index']:
|
if post_id in recentPostsCache['index']:
|
||||||
print('UNMUTE: ' + postId + ' is in recent posts cache')
|
print('UNMUTE: ' + post_id + ' is in recent posts cache')
|
||||||
if recentPostsCache.get('json'):
|
if recentPostsCache.get('json'):
|
||||||
recentPostsCache['json'][postId] = json.dumps(post_json_object)
|
recentPostsCache['json'][post_id] = json.dumps(post_json_object)
|
||||||
print('UNMUTE: ' + postId +
|
print('UNMUTE: ' + post_id +
|
||||||
' marked as unmuted in recent posts cache')
|
' marked as unmuted in recent posts cache')
|
||||||
if recentPostsCache.get('html'):
|
if recentPostsCache.get('html'):
|
||||||
if recentPostsCache['html'].get(postId):
|
if recentPostsCache['html'].get(post_id):
|
||||||
del recentPostsCache['html'][postId]
|
del recentPostsCache['html'][post_id]
|
||||||
print('UNMUTE: ' + postId + ' removed cached html')
|
print('UNMUTE: ' + post_id + ' removed cached html')
|
||||||
if alsoUpdatePostId:
|
if alsoUpdatePostId:
|
||||||
postFilename = locatePost(base_dir, nickname, domain, alsoUpdatePostId)
|
postFilename = locatePost(base_dir, nickname, domain, alsoUpdatePostId)
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
|
|
18
blog.py
18
blog.py
|
@ -41,13 +41,13 @@ from cache import getPersonFromCache
|
||||||
|
|
||||||
def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
nickname: str, domain: str, domain_full: str,
|
nickname: str, domain: str, domain_full: str,
|
||||||
postId: str, depth=0) -> int:
|
post_id: str, depth=0) -> int:
|
||||||
"""Returns the number of replies on the post
|
"""Returns the number of replies on the post
|
||||||
This is recursive, so can handle replies to replies
|
This is recursive, so can handle replies to replies
|
||||||
"""
|
"""
|
||||||
if depth > 4:
|
if depth > 4:
|
||||||
return 0
|
return 0
|
||||||
if not postId:
|
if not post_id:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
tryPostBox = ('tlblogs', 'inbox', 'outbox')
|
tryPostBox = ('tlblogs', 'inbox', 'outbox')
|
||||||
|
@ -55,7 +55,7 @@ def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
for postBox in tryPostBox:
|
for postBox in tryPostBox:
|
||||||
postFilename = \
|
postFilename = \
|
||||||
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
||||||
postId.replace('/', '#') + '.replies'
|
post_id.replace('/', '#') + '.replies'
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
boxFound = True
|
boxFound = True
|
||||||
break
|
break
|
||||||
|
@ -64,7 +64,7 @@ def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
for postBox in tryPostBox:
|
for postBox in tryPostBox:
|
||||||
postFilename = \
|
postFilename = \
|
||||||
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
||||||
postId.replace('/', '#')
|
post_id.replace('/', '#')
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
return 1
|
return 1
|
||||||
return 0
|
return 0
|
||||||
|
@ -111,12 +111,12 @@ def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
|
|
||||||
def _getBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
def _getBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
nickname: str, domain: str, domain_full: str,
|
nickname: str, domain: str, domain_full: str,
|
||||||
postId: str, depth=0) -> str:
|
post_id: str, depth=0) -> str:
|
||||||
"""Returns a string containing html blog posts
|
"""Returns a string containing html blog posts
|
||||||
"""
|
"""
|
||||||
if depth > 4:
|
if depth > 4:
|
||||||
return ''
|
return ''
|
||||||
if not postId:
|
if not post_id:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
tryPostBox = ('tlblogs', 'inbox', 'outbox')
|
tryPostBox = ('tlblogs', 'inbox', 'outbox')
|
||||||
|
@ -124,7 +124,7 @@ def _getBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
for postBox in tryPostBox:
|
for postBox in tryPostBox:
|
||||||
postFilename = \
|
postFilename = \
|
||||||
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
||||||
postId.replace('/', '#') + '.replies'
|
post_id.replace('/', '#') + '.replies'
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
boxFound = True
|
boxFound = True
|
||||||
break
|
break
|
||||||
|
@ -133,11 +133,11 @@ def _getBlogReplies(base_dir: str, http_prefix: str, translate: {},
|
||||||
for postBox in tryPostBox:
|
for postBox in tryPostBox:
|
||||||
postFilename = \
|
postFilename = \
|
||||||
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \
|
||||||
postId.replace('/', '#') + '.json'
|
post_id.replace('/', '#') + '.json'
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
postFilename = acct_dir(base_dir, nickname, domain) + \
|
postFilename = acct_dir(base_dir, nickname, domain) + \
|
||||||
'/postcache/' + \
|
'/postcache/' + \
|
||||||
postId.replace('/', '#') + '.html'
|
post_id.replace('/', '#') + '.html'
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
try:
|
try:
|
||||||
with open(postFilename, 'r') as postFile:
|
with open(postFilename, 'r') as postFile:
|
||||||
|
|
|
@ -39,19 +39,19 @@ def updateConversation(base_dir: str, nickname: str, domain: str,
|
||||||
_getConversationFilename(base_dir, nickname, domain, post_json_object)
|
_getConversationFilename(base_dir, nickname, domain, post_json_object)
|
||||||
if not conversationFilename:
|
if not conversationFilename:
|
||||||
return False
|
return False
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
if not os.path.isfile(conversationFilename):
|
if not os.path.isfile(conversationFilename):
|
||||||
try:
|
try:
|
||||||
with open(conversationFilename, 'w+') as fp:
|
with open(conversationFilename, 'w+') as fp:
|
||||||
fp.write(postId + '\n')
|
fp.write(post_id + '\n')
|
||||||
return True
|
return True
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: updateConversation ' +
|
print('EX: updateConversation ' +
|
||||||
'unable to write to ' + conversationFilename)
|
'unable to write to ' + conversationFilename)
|
||||||
elif postId + '\n' not in open(conversationFilename).read():
|
elif post_id + '\n' not in open(conversationFilename).read():
|
||||||
try:
|
try:
|
||||||
with open(conversationFilename, 'a+') as fp:
|
with open(conversationFilename, 'a+') as fp:
|
||||||
fp.write(postId + '\n')
|
fp.write(post_id + '\n')
|
||||||
return True
|
return True
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: updateConversation 2 ' +
|
print('EX: updateConversation 2 ' +
|
||||||
|
|
32
daemon.py
32
daemon.py
|
@ -10124,15 +10124,15 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
likedBy = None
|
likedBy = None
|
||||||
reactBy = None
|
reactBy = None
|
||||||
reactEmoji = None
|
reactEmoji = None
|
||||||
postId = path.split('?notifypost=')[1].strip()
|
post_id = path.split('?notifypost=')[1].strip()
|
||||||
postId = postId.replace('-', '/')
|
post_id = post_id.replace('-', '/')
|
||||||
path = path.split('?notifypost=')[0]
|
path = path.split('?notifypost=')[0]
|
||||||
nickname = path.split('/users/')[1]
|
nickname = path.split('/users/')[1]
|
||||||
if '/' in nickname:
|
if '/' in nickname:
|
||||||
return False
|
return False
|
||||||
replies = False
|
replies = False
|
||||||
|
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId, replies)
|
postFilename = locatePost(base_dir, nickname, domain, post_id, replies)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -12876,9 +12876,9 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
GETstartTime) -> bool:
|
GETstartTime) -> bool:
|
||||||
"""Confirm whether to delete a calendar event
|
"""Confirm whether to delete a calendar event
|
||||||
"""
|
"""
|
||||||
postId = path.split('?eventid=')[1]
|
post_id = path.split('?eventid=')[1]
|
||||||
if '?' in postId:
|
if '?' in post_id:
|
||||||
postId = postId.split('?')[0]
|
post_id = post_id.split('?')[0]
|
||||||
postTime = path.split('?time=')[1]
|
postTime = path.split('?time=')[1]
|
||||||
if '?' in postTime:
|
if '?' in postTime:
|
||||||
postTime = postTime.split('?')[0]
|
postTime = postTime.split('?')[0]
|
||||||
|
@ -12897,7 +12897,7 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
base_dir, path,
|
base_dir, path,
|
||||||
http_prefix,
|
http_prefix,
|
||||||
domain_full,
|
domain_full,
|
||||||
postId, postTime,
|
post_id, postTime,
|
||||||
postYear, postMonth, postDay,
|
postYear, postMonth, postDay,
|
||||||
calling_domain)
|
calling_domain)
|
||||||
if not msg:
|
if not msg:
|
||||||
|
@ -13184,11 +13184,11 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
postActor = path.split('?actor=')[1]
|
postActor = path.split('?actor=')[1]
|
||||||
if '?' in postActor:
|
if '?' in postActor:
|
||||||
postActor = postActor.split('?')[0]
|
postActor = postActor.split('?')[0]
|
||||||
postId = path.split('/editnewspost=')[1]
|
post_id = path.split('/editnewspost=')[1]
|
||||||
if '?' in postId:
|
if '?' in post_id:
|
||||||
postId = postId.split('?')[0]
|
post_id = post_id.split('?')[0]
|
||||||
postUrl = local_actor_url(http_prefix, postActor, domain_full) + \
|
postUrl = local_actor_url(http_prefix, postActor, domain_full) + \
|
||||||
'/statuses/' + postId
|
'/statuses/' + post_id
|
||||||
path = path.split('/editnewspost=')[0]
|
path = path.split('/editnewspost=')[0]
|
||||||
msg = htmlEditNewsPost(self.server.cssCache,
|
msg = htmlEditNewsPost(self.server.cssCache,
|
||||||
translate, base_dir,
|
translate, base_dir,
|
||||||
|
@ -13969,17 +13969,17 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
self.server.system_language)
|
self.server.system_language)
|
||||||
message_json = {}
|
message_json = {}
|
||||||
if pinnedPostJson:
|
if pinnedPostJson:
|
||||||
postId = removeIdEnding(pinnedPostJson['id'])
|
post_id = removeIdEnding(pinnedPostJson['id'])
|
||||||
message_json = \
|
message_json = \
|
||||||
outboxMessageCreateWrap(self.server.http_prefix,
|
outboxMessageCreateWrap(self.server.http_prefix,
|
||||||
nickname,
|
nickname,
|
||||||
self.server.domain,
|
self.server.domain,
|
||||||
self.server.port,
|
self.server.port,
|
||||||
pinnedPostJson)
|
pinnedPostJson)
|
||||||
message_json['id'] = postId + '/activity'
|
message_json['id'] = post_id + '/activity'
|
||||||
message_json['object']['id'] = postId
|
message_json['object']['id'] = post_id
|
||||||
message_json['object']['url'] = replace_users_with_at(postId)
|
message_json['object']['url'] = replace_users_with_at(post_id)
|
||||||
message_json['object']['atomUri'] = postId
|
message_json['object']['atomUri'] = post_id
|
||||||
msg = json.dumps(message_json,
|
msg = json.dumps(message_json,
|
||||||
ensure_ascii=False).encode('utf-8')
|
ensure_ascii=False).encode('utf-8')
|
||||||
msglen = len(msg)
|
msglen = len(msg)
|
||||||
|
|
|
@ -152,7 +152,7 @@ def _createDesktopConfig(actor: str) -> None:
|
||||||
os.mkdir(readPostsDir)
|
os.mkdir(readPostsDir)
|
||||||
|
|
||||||
|
|
||||||
def _markPostAsRead(actor: str, postId: str, postCategory: str) -> None:
|
def _markPostAsRead(actor: str, post_id: str, postCategory: str) -> None:
|
||||||
"""Marks the given post as read by the given actor
|
"""Marks the given post as read by the given actor
|
||||||
"""
|
"""
|
||||||
homeDir = str(Path.home())
|
homeDir = str(Path.home())
|
||||||
|
@ -165,24 +165,24 @@ def _markPostAsRead(actor: str, postId: str, postCategory: str) -> None:
|
||||||
readPostsDir = homeDir + '/.config/epicyon/' + handle
|
readPostsDir = homeDir + '/.config/epicyon/' + handle
|
||||||
readPostsFilename = readPostsDir + '/' + postCategory + '.txt'
|
readPostsFilename = readPostsDir + '/' + postCategory + '.txt'
|
||||||
if os.path.isfile(readPostsFilename):
|
if os.path.isfile(readPostsFilename):
|
||||||
if postId in open(readPostsFilename).read():
|
if post_id in open(readPostsFilename).read():
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
# prepend to read posts file
|
# prepend to read posts file
|
||||||
postId += '\n'
|
post_id += '\n'
|
||||||
with open(readPostsFilename, 'r+') as readFile:
|
with open(readPostsFilename, 'r+') as readFile:
|
||||||
content = readFile.read()
|
content = readFile.read()
|
||||||
if postId not in content:
|
if post_id not in content:
|
||||||
readFile.seek(0, 0)
|
readFile.seek(0, 0)
|
||||||
readFile.write(postId + content)
|
readFile.write(post_id + content)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print('WARN: Failed to mark post as read' + str(ex))
|
print('WARN: Failed to mark post as read' + str(ex))
|
||||||
else:
|
else:
|
||||||
with open(readPostsFilename, 'w+') as readFile:
|
with open(readPostsFilename, 'w+') as readFile:
|
||||||
readFile.write(postId + '\n')
|
readFile.write(post_id + '\n')
|
||||||
|
|
||||||
|
|
||||||
def _hasReadPost(actor: str, postId: str, postCategory: str) -> bool:
|
def _hasReadPost(actor: str, post_id: str, postCategory: str) -> bool:
|
||||||
"""Returns true if the given post has been read by the actor
|
"""Returns true if the given post has been read by the actor
|
||||||
"""
|
"""
|
||||||
homeDir = str(Path.home())
|
homeDir = str(Path.home())
|
||||||
|
@ -195,7 +195,7 @@ def _hasReadPost(actor: str, postId: str, postCategory: str) -> bool:
|
||||||
readPostsDir = homeDir + '/.config/epicyon/' + handle
|
readPostsDir = homeDir + '/.config/epicyon/' + handle
|
||||||
readPostsFilename = readPostsDir + '/' + postCategory + '.txt'
|
readPostsFilename = readPostsDir + '/' + postCategory + '.txt'
|
||||||
if os.path.isfile(readPostsFilename):
|
if os.path.isfile(readPostsFilename):
|
||||||
if postId in open(readPostsFilename).read():
|
if post_id in open(readPostsFilename).read():
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -412,7 +412,7 @@ def _sayCommand(content: str, sayStr: str, screenreader: str,
|
||||||
system_language, espeak)
|
system_language, espeak)
|
||||||
|
|
||||||
|
|
||||||
def _desktopReplyToPost(session, postId: str,
|
def _desktopReplyToPost(session, post_id: str,
|
||||||
base_dir: str, nickname: str, password: str,
|
base_dir: str, nickname: str, password: str,
|
||||||
domain: str, port: int, http_prefix: str,
|
domain: str, port: int, http_prefix: str,
|
||||||
cached_webfingers: {}, person_cache: {},
|
cached_webfingers: {}, person_cache: {},
|
||||||
|
@ -424,10 +424,10 @@ def _desktopReplyToPost(session, postId: str,
|
||||||
signing_priv_key_pem: str) -> None:
|
signing_priv_key_pem: str) -> None:
|
||||||
"""Use the desktop client to send a reply to the most recent post
|
"""Use the desktop client to send a reply to the most recent post
|
||||||
"""
|
"""
|
||||||
if '://' not in postId:
|
if '://' not in post_id:
|
||||||
return
|
return
|
||||||
toNickname = getNicknameFromActor(postId)
|
toNickname = getNicknameFromActor(post_id)
|
||||||
toDomain, toPort = getDomainFromActor(postId)
|
toDomain, toPort = getDomainFromActor(post_id)
|
||||||
sayStr = 'Replying to ' + toNickname + '@' + toDomain
|
sayStr = 'Replying to ' + toNickname + '@' + toDomain
|
||||||
_sayCommand(sayStr, sayStr,
|
_sayCommand(sayStr, sayStr,
|
||||||
screenreader, system_language, espeak)
|
screenreader, system_language, espeak)
|
||||||
|
@ -476,7 +476,7 @@ def _desktopReplyToPost(session, postId: str,
|
||||||
cached_webfingers, person_cache, isArticle,
|
cached_webfingers, person_cache, isArticle,
|
||||||
system_language, low_bandwidth,
|
system_language, low_bandwidth,
|
||||||
content_license_url,
|
content_license_url,
|
||||||
debug, postId, postId,
|
debug, post_id, post_id,
|
||||||
conversationId, subject) == 0:
|
conversationId, subject) == 0:
|
||||||
sayStr = 'Reply sent'
|
sayStr = 'Reply sent'
|
||||||
else:
|
else:
|
||||||
|
@ -1720,7 +1720,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
elif commandStr == 'reply' or commandStr == 'r':
|
elif commandStr == 'reply' or commandStr == 'r':
|
||||||
if post_json_object:
|
if post_json_object:
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = post_json_object['id']
|
post_id = post_json_object['id']
|
||||||
subject = None
|
subject = None
|
||||||
if post_json_object['object'].get('summary'):
|
if post_json_object['object'].get('summary'):
|
||||||
subject = post_json_object['object']['summary']
|
subject = post_json_object['object']['summary']
|
||||||
|
@ -1729,7 +1729,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
conversationId = \
|
conversationId = \
|
||||||
post_json_object['object']['conversation']
|
post_json_object['object']['conversation']
|
||||||
sessionReply = createSession(proxy_type)
|
sessionReply = createSession(proxy_type)
|
||||||
_desktopReplyToPost(sessionReply, postId,
|
_desktopReplyToPost(sessionReply, post_id,
|
||||||
base_dir, nickname, password,
|
base_dir, nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
cached_webfingers, person_cache,
|
cached_webfingers, person_cache,
|
||||||
|
@ -2084,7 +2084,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
_desktopGetBoxPostObject(boxJson, currIndex)
|
_desktopGetBoxPostObject(boxJson, currIndex)
|
||||||
if post_json_object:
|
if post_json_object:
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = post_json_object['id']
|
post_id = post_json_object['id']
|
||||||
announceActor = \
|
announceActor = \
|
||||||
post_json_object['object']['attributedTo']
|
post_json_object['object']['attributedTo']
|
||||||
sayStr = 'Announcing post by ' + \
|
sayStr = 'Announcing post by ' + \
|
||||||
|
@ -2096,7 +2096,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
sendAnnounceViaServer(base_dir, sessionAnnounce,
|
sendAnnounceViaServer(base_dir, sessionAnnounce,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port,
|
domain, port,
|
||||||
http_prefix, postId,
|
http_prefix, post_id,
|
||||||
cached_webfingers, person_cache,
|
cached_webfingers, person_cache,
|
||||||
True, __version__,
|
True, __version__,
|
||||||
signing_priv_key_pem)
|
signing_priv_key_pem)
|
||||||
|
@ -2117,7 +2117,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
_desktopGetBoxPostObject(boxJson, currIndex)
|
_desktopGetBoxPostObject(boxJson, currIndex)
|
||||||
if post_json_object:
|
if post_json_object:
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = post_json_object['id']
|
post_id = post_json_object['id']
|
||||||
announceActor = \
|
announceActor = \
|
||||||
post_json_object['object']['attributedTo']
|
post_json_object['object']['attributedTo']
|
||||||
sayStr = 'Undoing announce post by ' + \
|
sayStr = 'Undoing announce post by ' + \
|
||||||
|
@ -2130,7 +2130,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
post_json_object,
|
post_json_object,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port,
|
domain, port,
|
||||||
http_prefix, postId,
|
http_prefix, post_id,
|
||||||
cached_webfingers,
|
cached_webfingers,
|
||||||
person_cache,
|
person_cache,
|
||||||
True, __version__,
|
True, __version__,
|
||||||
|
|
54
happening.py
54
happening.py
|
@ -45,7 +45,7 @@ def _removeEventFromTimeline(eventId: str, tlEventsFilename: str) -> None:
|
||||||
print('EX: ERROR: unable to save events timeline')
|
print('EX: ERROR: unable to save events timeline')
|
||||||
|
|
||||||
|
|
||||||
def saveEventPost(base_dir: str, handle: str, postId: str,
|
def saveEventPost(base_dir: str, handle: str, post_id: str,
|
||||||
eventJson: {}) -> bool:
|
eventJson: {}) -> bool:
|
||||||
"""Saves an event to the calendar and/or the events timeline
|
"""Saves an event to the calendar and/or the events timeline
|
||||||
If an event has extra fields, as per Mobilizon,
|
If an event has extra fields, as per Mobilizon,
|
||||||
|
@ -125,14 +125,14 @@ def saveEventPost(base_dir: str, handle: str, postId: str,
|
||||||
|
|
||||||
# Does this event post already exist within the calendar month?
|
# Does this event post already exist within the calendar month?
|
||||||
if os.path.isfile(calendarFilename):
|
if os.path.isfile(calendarFilename):
|
||||||
if postId in open(calendarFilename).read():
|
if post_id in open(calendarFilename).read():
|
||||||
# Event post already exists
|
# Event post already exists
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# append the post Id to the file for the calendar month
|
# append the post Id to the file for the calendar month
|
||||||
try:
|
try:
|
||||||
with open(calendarFilename, 'a+') as calendarFile:
|
with open(calendarFilename, 'a+') as calendarFile:
|
||||||
calendarFile.write(postId + '\n')
|
calendarFile.write(post_id + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to append ' + calendarFilename)
|
print('EX: unable to append ' + calendarFilename)
|
||||||
|
|
||||||
|
@ -203,9 +203,9 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str,
|
||||||
calendarPostIds = []
|
calendarPostIds = []
|
||||||
recreateEventsFile = False
|
recreateEventsFile = False
|
||||||
with open(calendarFilename, 'r') as eventsFile:
|
with open(calendarFilename, 'r') as eventsFile:
|
||||||
for postId in eventsFile:
|
for post_id in eventsFile:
|
||||||
postId = postId.replace('\n', '').replace('\r', '')
|
post_id = post_id.replace('\n', '').replace('\r', '')
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
recreateEventsFile = True
|
recreateEventsFile = True
|
||||||
continue
|
continue
|
||||||
|
@ -233,11 +233,11 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str,
|
||||||
int(eventTime.strftime("%m")) == monthNumber and \
|
int(eventTime.strftime("%m")) == monthNumber and \
|
||||||
int(eventTime.strftime("%d")) == dayNumber:
|
int(eventTime.strftime("%d")) == dayNumber:
|
||||||
dayOfMonth = str(int(eventTime.strftime("%d")))
|
dayOfMonth = str(int(eventTime.strftime("%d")))
|
||||||
if '#statuses#' in postId:
|
if '#statuses#' in post_id:
|
||||||
# link to the id so that the event can be
|
# link to the id so that the event can be
|
||||||
# easily deleted
|
# easily deleted
|
||||||
tag['postId'] = postId.split('#statuses#')[1]
|
tag['post_id'] = post_id.split('#statuses#')[1]
|
||||||
tag['sender'] = postId.split('#statuses#')[0]
|
tag['sender'] = post_id.split('#statuses#')[0]
|
||||||
tag['sender'] = tag['sender'].replace('#', '/')
|
tag['sender'] = tag['sender'].replace('#', '/')
|
||||||
tag['public'] = publicEvent
|
tag['public'] = publicEvent
|
||||||
postEvent.append(tag)
|
postEvent.append(tag)
|
||||||
|
@ -245,7 +245,7 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str,
|
||||||
# tag is a place
|
# tag is a place
|
||||||
postEvent.append(tag)
|
postEvent.append(tag)
|
||||||
if postEvent and dayOfMonth:
|
if postEvent and dayOfMonth:
|
||||||
calendarPostIds.append(postId)
|
calendarPostIds.append(post_id)
|
||||||
if not events.get(dayOfMonth):
|
if not events.get(dayOfMonth):
|
||||||
events[dayOfMonth] = []
|
events[dayOfMonth] = []
|
||||||
events[dayOfMonth].append(postEvent)
|
events[dayOfMonth].append(postEvent)
|
||||||
|
@ -254,8 +254,8 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str,
|
||||||
if recreateEventsFile:
|
if recreateEventsFile:
|
||||||
try:
|
try:
|
||||||
with open(calendarFilename, 'w+') as calendarFile:
|
with open(calendarFilename, 'w+') as calendarFile:
|
||||||
for postId in calendarPostIds:
|
for post_id in calendarPostIds:
|
||||||
calendarFile.write(postId + '\n')
|
calendarFile.write(post_id + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to write ' + calendarFilename)
|
print('EX: unable to write ' + calendarFilename)
|
||||||
|
|
||||||
|
@ -278,9 +278,9 @@ def dayEventsCheck(base_dir: str, nickname: str, domain: str,
|
||||||
|
|
||||||
eventsExist = False
|
eventsExist = False
|
||||||
with open(calendarFilename, 'r') as eventsFile:
|
with open(calendarFilename, 'r') as eventsFile:
|
||||||
for postId in eventsFile:
|
for post_id in eventsFile:
|
||||||
postId = postId.replace('\n', '').replace('\r', '')
|
post_id = post_id.replace('\n', '').replace('\r', '')
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -334,9 +334,9 @@ def getThisWeeksEvents(base_dir: str, nickname: str, domain: str) -> {}:
|
||||||
calendarPostIds = []
|
calendarPostIds = []
|
||||||
recreateEventsFile = False
|
recreateEventsFile = False
|
||||||
with open(calendarFilename, 'r') as eventsFile:
|
with open(calendarFilename, 'r') as eventsFile:
|
||||||
for postId in eventsFile:
|
for post_id in eventsFile:
|
||||||
postId = postId.replace('\n', '').replace('\r', '')
|
post_id = post_id.replace('\n', '').replace('\r', '')
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
recreateEventsFile = True
|
recreateEventsFile = True
|
||||||
continue
|
continue
|
||||||
|
@ -365,7 +365,7 @@ def getThisWeeksEvents(base_dir: str, nickname: str, domain: str) -> {}:
|
||||||
# tag is a place
|
# tag is a place
|
||||||
postEvent.append(tag)
|
postEvent.append(tag)
|
||||||
if postEvent and weekDayIndex:
|
if postEvent and weekDayIndex:
|
||||||
calendarPostIds.append(postId)
|
calendarPostIds.append(post_id)
|
||||||
if not events.get(weekDayIndex):
|
if not events.get(weekDayIndex):
|
||||||
events[weekDayIndex] = []
|
events[weekDayIndex] = []
|
||||||
events[weekDayIndex].append(postEvent)
|
events[weekDayIndex].append(postEvent)
|
||||||
|
@ -374,8 +374,8 @@ def getThisWeeksEvents(base_dir: str, nickname: str, domain: str) -> {}:
|
||||||
if recreateEventsFile:
|
if recreateEventsFile:
|
||||||
try:
|
try:
|
||||||
with open(calendarFilename, 'w+') as calendarFile:
|
with open(calendarFilename, 'w+') as calendarFile:
|
||||||
for postId in calendarPostIds:
|
for post_id in calendarPostIds:
|
||||||
calendarFile.write(postId + '\n')
|
calendarFile.write(post_id + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to write ' + calendarFilename)
|
print('EX: unable to write ' + calendarFilename)
|
||||||
|
|
||||||
|
@ -399,9 +399,9 @@ def getCalendarEvents(base_dir: str, nickname: str, domain: str,
|
||||||
calendarPostIds = []
|
calendarPostIds = []
|
||||||
recreateEventsFile = False
|
recreateEventsFile = False
|
||||||
with open(calendarFilename, 'r') as eventsFile:
|
with open(calendarFilename, 'r') as eventsFile:
|
||||||
for postId in eventsFile:
|
for post_id in eventsFile:
|
||||||
postId = postId.replace('\n', '').replace('\r', '')
|
post_id = post_id.replace('\n', '').replace('\r', '')
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
recreateEventsFile = True
|
recreateEventsFile = True
|
||||||
continue
|
continue
|
||||||
|
@ -432,7 +432,7 @@ def getCalendarEvents(base_dir: str, nickname: str, domain: str,
|
||||||
postEvent.append(tag)
|
postEvent.append(tag)
|
||||||
|
|
||||||
if postEvent and dayOfMonth:
|
if postEvent and dayOfMonth:
|
||||||
calendarPostIds.append(postId)
|
calendarPostIds.append(post_id)
|
||||||
if not events.get(dayOfMonth):
|
if not events.get(dayOfMonth):
|
||||||
events[dayOfMonth] = []
|
events[dayOfMonth] = []
|
||||||
events[dayOfMonth].append(postEvent)
|
events[dayOfMonth].append(postEvent)
|
||||||
|
@ -441,8 +441,8 @@ def getCalendarEvents(base_dir: str, nickname: str, domain: str,
|
||||||
if recreateEventsFile:
|
if recreateEventsFile:
|
||||||
try:
|
try:
|
||||||
with open(calendarFilename, 'w+') as calendarFile:
|
with open(calendarFilename, 'w+') as calendarFile:
|
||||||
for postId in calendarPostIds:
|
for post_id in calendarPostIds:
|
||||||
calendarFile.write(postId + '\n')
|
calendarFile.write(post_id + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to write ' + calendarFilename)
|
print('EX: unable to write ' + calendarFilename)
|
||||||
|
|
||||||
|
|
39
inbox.py
39
inbox.py
|
@ -132,15 +132,15 @@ def _storeLastPostId(base_dir: str, nickname: str, domain: str,
|
||||||
It would be great if edited posts contained a back reference id to the
|
It would be great if edited posts contained a back reference id to the
|
||||||
source but we don't live in that ideal world.
|
source but we don't live in that ideal world.
|
||||||
"""
|
"""
|
||||||
actor = postId = None
|
actor = post_id = None
|
||||||
if has_object_dict(post_json_object):
|
if has_object_dict(post_json_object):
|
||||||
if post_json_object['object'].get('attributedTo'):
|
if post_json_object['object'].get('attributedTo'):
|
||||||
if isinstance(post_json_object['object']['attributedTo'], str):
|
if isinstance(post_json_object['object']['attributedTo'], str):
|
||||||
actor = post_json_object['object']['attributedTo']
|
actor = post_json_object['object']['attributedTo']
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
if not actor:
|
if not actor:
|
||||||
actor = post_json_object['actor']
|
actor = post_json_object['actor']
|
||||||
postId = removeIdEnding(post_json_object['id'])
|
post_id = removeIdEnding(post_json_object['id'])
|
||||||
if not actor:
|
if not actor:
|
||||||
return
|
return
|
||||||
lastpostDir = acct_dir(base_dir, nickname, domain) + '/lastpost'
|
lastpostDir = acct_dir(base_dir, nickname, domain) + '/lastpost'
|
||||||
|
@ -149,7 +149,7 @@ def _storeLastPostId(base_dir: str, nickname: str, domain: str,
|
||||||
actorFilename = lastpostDir + '/' + actor.replace('/', '#')
|
actorFilename = lastpostDir + '/' + actor.replace('/', '#')
|
||||||
try:
|
try:
|
||||||
with open(actorFilename, 'w+') as fp:
|
with open(actorFilename, 'w+') as fp:
|
||||||
fp.write(postId)
|
fp.write(post_id)
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: Unable to write last post id to ' + actorFilename)
|
print('EX: Unable to write last post id to ' + actorFilename)
|
||||||
|
|
||||||
|
@ -535,16 +535,17 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str,
|
||||||
|
|
||||||
curr_time = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
|
|
||||||
postId = None
|
post_id = None
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = removeIdEnding(post_json_object['id'])
|
post_id = removeIdEnding(post_json_object['id'])
|
||||||
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
if not postId:
|
if not post_id:
|
||||||
statusNumber, published = getStatusNumber()
|
statusNumber, published = getStatusNumber()
|
||||||
if actor:
|
if actor:
|
||||||
postId = actor + '/statuses/' + statusNumber
|
post_id = actor + '/statuses/' + statusNumber
|
||||||
else:
|
else:
|
||||||
postId = local_actor_url(http_prefix, nickname, originalDomain) + \
|
post_id = \
|
||||||
|
local_actor_url(http_prefix, nickname, originalDomain) + \
|
||||||
'/statuses/' + statusNumber
|
'/statuses/' + statusNumber
|
||||||
|
|
||||||
# NOTE: don't change post_json_object['id'] before signature check
|
# NOTE: don't change post_json_object['id'] before signature check
|
||||||
|
@ -553,8 +554,8 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str,
|
||||||
|
|
||||||
handle = nickname + '@' + domain
|
handle = nickname + '@' + domain
|
||||||
destination = base_dir + '/accounts/' + \
|
destination = base_dir + '/accounts/' + \
|
||||||
handle + '/inbox/' + postId.replace('/', '#') + '.json'
|
handle + '/inbox/' + post_id.replace('/', '#') + '.json'
|
||||||
filename = inbox_queueDir + '/' + postId.replace('/', '#') + '.json'
|
filename = inbox_queueDir + '/' + post_id.replace('/', '#') + '.json'
|
||||||
|
|
||||||
sharedInboxItem = False
|
sharedInboxItem = False
|
||||||
if nickname == 'inbox':
|
if nickname == 'inbox':
|
||||||
|
@ -572,7 +573,7 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str,
|
||||||
|
|
||||||
newQueueItem = {
|
newQueueItem = {
|
||||||
'originalId': originalPostId,
|
'originalId': originalPostId,
|
||||||
'id': postId,
|
'id': post_id,
|
||||||
'actor': actor,
|
'actor': actor,
|
||||||
'nickname': nickname,
|
'nickname': nickname,
|
||||||
'domain': domain,
|
'domain': domain,
|
||||||
|
@ -2662,14 +2663,14 @@ def _sendToGroupMembers(session, base_dir: str, handle: str, port: int,
|
||||||
savePostToBox(base_dir, http_prefix, None,
|
savePostToBox(base_dir, http_prefix, None,
|
||||||
nickname, domain, post_json_object, 'outbox')
|
nickname, domain, post_json_object, 'outbox')
|
||||||
|
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
if debug:
|
if debug:
|
||||||
print('Group announce: ' + postId)
|
print('Group announce: ' + post_id)
|
||||||
announceJson = \
|
announceJson = \
|
||||||
createAnnounce(session, base_dir, federation_list,
|
createAnnounce(session, base_dir, federation_list,
|
||||||
nickname, domain, port,
|
nickname, domain, port,
|
||||||
groupActor + '/followers', cc,
|
groupActor + '/followers', cc,
|
||||||
http_prefix, postId, False, False,
|
http_prefix, post_id, False, False,
|
||||||
send_threads, postLog,
|
send_threads, postLog,
|
||||||
person_cache, cached_webfingers,
|
person_cache, cached_webfingers,
|
||||||
debug, __version__, signing_priv_key_pem)
|
debug, __version__, signing_priv_key_pem)
|
||||||
|
@ -2710,7 +2711,7 @@ def _inboxUpdateCalendar(base_dir: str, handle: str,
|
||||||
actorNickname, actorDomain):
|
actorNickname, actorDomain):
|
||||||
return
|
return
|
||||||
|
|
||||||
postId = removeIdEnding(post_json_object['id']).replace('/', '#')
|
post_id = removeIdEnding(post_json_object['id']).replace('/', '#')
|
||||||
|
|
||||||
# look for events within the tags list
|
# look for events within the tags list
|
||||||
for tagDict in post_json_object['object']['tag']:
|
for tagDict in post_json_object['object']['tag']:
|
||||||
|
@ -2720,7 +2721,7 @@ def _inboxUpdateCalendar(base_dir: str, handle: str,
|
||||||
continue
|
continue
|
||||||
if not tagDict.get('startTime'):
|
if not tagDict.get('startTime'):
|
||||||
continue
|
continue
|
||||||
saveEventPost(base_dir, handle, postId, tagDict)
|
saveEventPost(base_dir, handle, post_id, tagDict)
|
||||||
|
|
||||||
|
|
||||||
def inboxUpdateIndex(boxname: str, base_dir: str, handle: str,
|
def inboxUpdateIndex(boxname: str, base_dir: str, handle: str,
|
||||||
|
@ -3145,11 +3146,11 @@ def _lowFrequencyPostNotification(base_dir: str, http_prefix: str,
|
||||||
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
fromDomainFull = get_full_domain(fromDomain, fromPort)
|
||||||
if notifyWhenPersonPosts(base_dir, nickname, domain,
|
if notifyWhenPersonPosts(base_dir, nickname, domain,
|
||||||
fromNickname, fromDomainFull):
|
fromNickname, fromDomainFull):
|
||||||
postId = removeIdEnding(jsonObj['id'])
|
post_id = removeIdEnding(jsonObj['id'])
|
||||||
domFull = get_full_domain(domain, port)
|
domFull = get_full_domain(domain, port)
|
||||||
postLink = \
|
postLink = \
|
||||||
local_actor_url(http_prefix, nickname, domFull) + \
|
local_actor_url(http_prefix, nickname, domFull) + \
|
||||||
'?notifypost=' + postId.replace('/', '-')
|
'?notifypost=' + post_id.replace('/', '-')
|
||||||
_notifyPostArrival(base_dir, handle, postLink)
|
_notifyPostArrival(base_dir, handle, postLink)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,20 +38,20 @@ from inbox import storeHashTags
|
||||||
from session import createSession
|
from session import createSession
|
||||||
|
|
||||||
|
|
||||||
def _updateFeedsOutboxIndex(base_dir: str, domain: str, postId: str) -> None:
|
def _updateFeedsOutboxIndex(base_dir: str, domain: str, post_id: str) -> None:
|
||||||
"""Updates the index used for imported RSS feeds
|
"""Updates the index used for imported RSS feeds
|
||||||
"""
|
"""
|
||||||
basePath = base_dir + '/accounts/news@' + domain
|
basePath = base_dir + '/accounts/news@' + domain
|
||||||
indexFilename = basePath + '/outbox.index'
|
indexFilename = basePath + '/outbox.index'
|
||||||
|
|
||||||
if os.path.isfile(indexFilename):
|
if os.path.isfile(indexFilename):
|
||||||
if postId not in open(indexFilename).read():
|
if post_id not in open(indexFilename).read():
|
||||||
try:
|
try:
|
||||||
with open(indexFilename, 'r+') as feedsFile:
|
with open(indexFilename, 'r+') as feedsFile:
|
||||||
content = feedsFile.read()
|
content = feedsFile.read()
|
||||||
if postId + '\n' not in content:
|
if post_id + '\n' not in content:
|
||||||
feedsFile.seek(0, 0)
|
feedsFile.seek(0, 0)
|
||||||
feedsFile.write(postId + '\n' + content)
|
feedsFile.write(post_id + '\n' + content)
|
||||||
print('DEBUG: feeds post added to index')
|
print('DEBUG: feeds post added to index')
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print('WARN: Failed to write entry to feeds posts index ' +
|
print('WARN: Failed to write entry to feeds posts index ' +
|
||||||
|
@ -59,7 +59,7 @@ def _updateFeedsOutboxIndex(base_dir: str, domain: str, postId: str) -> None:
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
with open(indexFilename, 'w+') as feedsFile:
|
with open(indexFilename, 'w+') as feedsFile:
|
||||||
feedsFile.write(postId + '\n')
|
feedsFile.write(post_id + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to write ' + indexFilename)
|
print('EX: unable to write ' + indexFilename)
|
||||||
|
|
||||||
|
@ -440,7 +440,7 @@ def _newswireHashtagProcessing(session, base_dir: str, post_json_object: {},
|
||||||
|
|
||||||
|
|
||||||
def _createNewsMirror(base_dir: str, domain: str,
|
def _createNewsMirror(base_dir: str, domain: str,
|
||||||
postIdNumber: str, url: str,
|
post_idNumber: str, url: str,
|
||||||
max_mirrored_articles: int) -> bool:
|
max_mirrored_articles: int) -> bool:
|
||||||
"""Creates a local mirror of a news article
|
"""Creates a local mirror of a news article
|
||||||
"""
|
"""
|
||||||
|
@ -472,14 +472,14 @@ def _createNewsMirror(base_dir: str, domain: str,
|
||||||
# escape valve
|
# escape valve
|
||||||
break
|
break
|
||||||
|
|
||||||
postId = indexFile.readline()
|
post_id = indexFile.readline()
|
||||||
if not postId:
|
if not post_id:
|
||||||
continue
|
continue
|
||||||
postId = postId.strip()
|
post_id = post_id.strip()
|
||||||
mirrorArticleDir = mirrorDir + '/' + postId
|
mirrorArticleDir = mirrorDir + '/' + post_id
|
||||||
if os.path.isdir(mirrorArticleDir):
|
if os.path.isdir(mirrorArticleDir):
|
||||||
rmtree(mirrorArticleDir, ignore_errors=False, onerror=None)
|
rmtree(mirrorArticleDir, ignore_errors=False, onerror=None)
|
||||||
removals.append(postId)
|
removals.append(post_id)
|
||||||
noOfDirs -= 1
|
noOfDirs -= 1
|
||||||
|
|
||||||
# remove the corresponding index entries
|
# remove the corresponding index entries
|
||||||
|
@ -496,7 +496,7 @@ def _createNewsMirror(base_dir: str, domain: str,
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to write ' + mirrorIndexFilename)
|
print('EX: unable to write ' + mirrorIndexFilename)
|
||||||
|
|
||||||
mirrorArticleDir = mirrorDir + '/' + postIdNumber
|
mirrorArticleDir = mirrorDir + '/' + post_idNumber
|
||||||
if os.path.isdir(mirrorArticleDir):
|
if os.path.isdir(mirrorArticleDir):
|
||||||
# already mirrored
|
# already mirrored
|
||||||
return True
|
return True
|
||||||
|
@ -521,13 +521,13 @@ def _createNewsMirror(base_dir: str, domain: str,
|
||||||
if os.path.isfile(mirrorIndexFilename):
|
if os.path.isfile(mirrorIndexFilename):
|
||||||
try:
|
try:
|
||||||
with open(mirrorIndexFilename, 'a+') as indexFile:
|
with open(mirrorIndexFilename, 'a+') as indexFile:
|
||||||
indexFile.write(postIdNumber + '\n')
|
indexFile.write(post_idNumber + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to append ' + mirrorIndexFilename)
|
print('EX: unable to append ' + mirrorIndexFilename)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
with open(mirrorIndexFilename, 'w+') as indexFile:
|
with open(mirrorIndexFilename, 'w+') as indexFile:
|
||||||
indexFile.write(postIdNumber + '\n')
|
indexFile.write(post_idNumber + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to write ' + mirrorIndexFilename)
|
print('EX: unable to write ' + mirrorIndexFilename)
|
||||||
|
|
||||||
|
@ -676,7 +676,7 @@ def _convertRSStoActivityPub(base_dir: str, http_prefix: str,
|
||||||
|
|
||||||
hashtags = item[6]
|
hashtags = item[6]
|
||||||
|
|
||||||
postId = newPostId.replace('/', '#')
|
post_id = newPostId.replace('/', '#')
|
||||||
|
|
||||||
moderated = item[5]
|
moderated = item[5]
|
||||||
|
|
||||||
|
@ -730,9 +730,9 @@ def _convertRSStoActivityPub(base_dir: str, http_prefix: str,
|
||||||
http_prefix, domain_full,
|
http_prefix, domain_full,
|
||||||
blog, translate)
|
blog, translate)
|
||||||
|
|
||||||
clearFromPostCaches(base_dir, recentPostsCache, postId)
|
clearFromPostCaches(base_dir, recentPostsCache, post_id)
|
||||||
if save_json(blog, filename):
|
if save_json(blog, filename):
|
||||||
_updateFeedsOutboxIndex(base_dir, domain, postId + '.json')
|
_updateFeedsOutboxIndex(base_dir, domain, post_id + '.json')
|
||||||
|
|
||||||
# Save a file containing the time when the post arrived
|
# Save a file containing the time when the post arrived
|
||||||
# this can then later be used to construct the news timeline
|
# this can then later be used to construct the news timeline
|
||||||
|
|
|
@ -351,13 +351,13 @@ def postMessageToOutbox(session, translate: {},
|
||||||
' is not a permitted activity type')
|
' is not a permitted activity type')
|
||||||
return False
|
return False
|
||||||
if message_json.get('id'):
|
if message_json.get('id'):
|
||||||
postId = removeIdEnding(message_json['id'])
|
post_id = removeIdEnding(message_json['id'])
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: id attribute exists within POST to outbox')
|
print('DEBUG: id attribute exists within POST to outbox')
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: No id attribute within POST to outbox')
|
print('DEBUG: No id attribute within POST to outbox')
|
||||||
postId = None
|
post_id = None
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: savePostToBox')
|
print('DEBUG: savePostToBox')
|
||||||
if message_json['type'] != 'Upgrade':
|
if message_json['type'] != 'Upgrade':
|
||||||
|
@ -373,7 +373,7 @@ def postMessageToOutbox(session, translate: {},
|
||||||
savedFilename = \
|
savedFilename = \
|
||||||
savePostToBox(base_dir,
|
savePostToBox(base_dir,
|
||||||
http_prefix,
|
http_prefix,
|
||||||
postId,
|
post_id,
|
||||||
postToNickname, domain_full,
|
postToNickname, domain_full,
|
||||||
message_json, outboxName)
|
message_json, outboxName)
|
||||||
if not savedFilename:
|
if not savedFilename:
|
||||||
|
|
|
@ -1109,10 +1109,10 @@ def suspendAccount(base_dir: str, nickname: str, domain: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def canRemovePost(base_dir: str, nickname: str,
|
def canRemovePost(base_dir: str, nickname: str,
|
||||||
domain: str, port: int, postId: str) -> bool:
|
domain: str, port: int, post_id: str) -> bool:
|
||||||
"""Returns true if the given post can be removed
|
"""Returns true if the given post can be removed
|
||||||
"""
|
"""
|
||||||
if '/statuses/' not in postId:
|
if '/statuses/' not in post_id:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
domain_full = get_full_domain(domain, port)
|
domain_full = get_full_domain(domain, port)
|
||||||
|
@ -1121,7 +1121,7 @@ def canRemovePost(base_dir: str, nickname: str,
|
||||||
adminNickname = get_config_param(base_dir, 'admin')
|
adminNickname = get_config_param(base_dir, 'admin')
|
||||||
if not adminNickname:
|
if not adminNickname:
|
||||||
return False
|
return False
|
||||||
if domain_full + '/users/' + adminNickname + '/' in postId:
|
if domain_full + '/users/' + adminNickname + '/' in post_id:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# is the post by a moderator?
|
# is the post by a moderator?
|
||||||
|
@ -1130,7 +1130,8 @@ def canRemovePost(base_dir: str, nickname: str,
|
||||||
with open(moderatorsFile, 'r') as f:
|
with open(moderatorsFile, 'r') as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
for moderator in lines:
|
for moderator in lines:
|
||||||
if domain_full + '/users/' + moderator.strip('\n') + '/' in postId:
|
if domain_full + '/users/' + \
|
||||||
|
moderator.strip('\n') + '/' in post_id:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
78
posts.py
78
posts.py
|
@ -905,7 +905,7 @@ def deleteAllPosts(base_dir: str,
|
||||||
print('ERROR: deleteAllPosts ' + str(ex))
|
print('ERROR: deleteAllPosts ' + str(ex))
|
||||||
|
|
||||||
|
|
||||||
def savePostToBox(base_dir: str, http_prefix: str, postId: str,
|
def savePostToBox(base_dir: str, http_prefix: str, post_id: str,
|
||||||
nickname: str, domain: str, post_json_object: {},
|
nickname: str, domain: str, post_json_object: {},
|
||||||
boxname: str) -> str:
|
boxname: str) -> str:
|
||||||
"""Saves the give json to the give box
|
"""Saves the give json to the give box
|
||||||
|
@ -918,18 +918,18 @@ def savePostToBox(base_dir: str, http_prefix: str, postId: str,
|
||||||
originalDomain = domain
|
originalDomain = domain
|
||||||
domain = remove_domain_port(domain)
|
domain = remove_domain_port(domain)
|
||||||
|
|
||||||
if not postId:
|
if not post_id:
|
||||||
statusNumber, published = getStatusNumber()
|
statusNumber, published = getStatusNumber()
|
||||||
postId = \
|
post_id = \
|
||||||
local_actor_url(http_prefix, nickname, originalDomain) + \
|
local_actor_url(http_prefix, nickname, originalDomain) + \
|
||||||
'/statuses/' + statusNumber
|
'/statuses/' + statusNumber
|
||||||
post_json_object['id'] = postId + '/activity'
|
post_json_object['id'] = post_id + '/activity'
|
||||||
if has_object_dict(post_json_object):
|
if has_object_dict(post_json_object):
|
||||||
post_json_object['object']['id'] = postId
|
post_json_object['object']['id'] = post_id
|
||||||
post_json_object['object']['atomUri'] = postId
|
post_json_object['object']['atomUri'] = post_id
|
||||||
|
|
||||||
boxDir = createPersonDir(nickname, domain, base_dir, boxname)
|
boxDir = createPersonDir(nickname, domain, base_dir, boxname)
|
||||||
filename = boxDir + '/' + postId.replace('/', '#') + '.json'
|
filename = boxDir + '/' + post_id.replace('/', '#') + '.json'
|
||||||
|
|
||||||
save_json(post_json_object, filename)
|
save_json(post_json_object, filename)
|
||||||
return filename
|
return filename
|
||||||
|
@ -969,14 +969,14 @@ def _updateHashtagsIndex(base_dir: str, tag: {}, newPostId: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def _addSchedulePost(base_dir: str, nickname: str, domain: str,
|
def _addSchedulePost(base_dir: str, nickname: str, domain: str,
|
||||||
eventDateStr: str, postId: str) -> None:
|
eventDateStr: str, post_id: str) -> None:
|
||||||
"""Adds a scheduled post to the index
|
"""Adds a scheduled post to the index
|
||||||
"""
|
"""
|
||||||
handle = nickname + '@' + domain
|
handle = nickname + '@' + domain
|
||||||
scheduleIndexFilename = \
|
scheduleIndexFilename = \
|
||||||
base_dir + '/accounts/' + handle + '/schedule.index'
|
base_dir + '/accounts/' + handle + '/schedule.index'
|
||||||
|
|
||||||
indexStr = eventDateStr + ' ' + postId.replace('/', '#')
|
indexStr = eventDateStr + ' ' + post_id.replace('/', '#')
|
||||||
if os.path.isfile(scheduleIndexFilename):
|
if os.path.isfile(scheduleIndexFilename):
|
||||||
if indexStr not in open(scheduleIndexFilename).read():
|
if indexStr not in open(scheduleIndexFilename).read():
|
||||||
try:
|
try:
|
||||||
|
@ -2093,8 +2093,8 @@ def createDirectMessagePost(base_dir: str,
|
||||||
message_json['cc'] = []
|
message_json['cc'] = []
|
||||||
message_json['object']['cc'] = []
|
message_json['object']['cc'] = []
|
||||||
if schedulePost:
|
if schedulePost:
|
||||||
postId = removeIdEnding(message_json['object']['id'])
|
post_id = removeIdEnding(message_json['object']['id'])
|
||||||
savePostToBox(base_dir, http_prefix, postId,
|
savePostToBox(base_dir, http_prefix, post_id,
|
||||||
nickname, domain, message_json, 'scheduled')
|
nickname, domain, message_json, 'scheduled')
|
||||||
return message_json
|
return message_json
|
||||||
|
|
||||||
|
@ -3969,8 +3969,8 @@ def archivePostsForPerson(http_prefix: str, nickname: str, domain: str,
|
||||||
# get the existing index entries as a string
|
# get the existing index entries as a string
|
||||||
newIndex = ''
|
newIndex = ''
|
||||||
with open(indexFilename, 'r') as indexFile:
|
with open(indexFilename, 'r') as indexFile:
|
||||||
for postId in indexFile:
|
for post_id in indexFile:
|
||||||
newIndex += postId
|
newIndex += post_id
|
||||||
indexCtr += 1
|
indexCtr += 1
|
||||||
if indexCtr >= maxPostsInBox:
|
if indexCtr >= maxPostsInBox:
|
||||||
break
|
break
|
||||||
|
@ -4535,9 +4535,9 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
if not os.path.isdir(announceCacheDir):
|
if not os.path.isdir(announceCacheDir):
|
||||||
os.mkdir(announceCacheDir)
|
os.mkdir(announceCacheDir)
|
||||||
|
|
||||||
postId = None
|
post_id = None
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = removeIdEnding(post_json_object['id'])
|
post_id = removeIdEnding(post_json_object['id'])
|
||||||
announceFilename = \
|
announceFilename = \
|
||||||
announceCacheDir + '/' + \
|
announceCacheDir + '/' + \
|
||||||
post_json_object['object'].replace('/', '#') + '.json'
|
post_json_object['object'].replace('/', '#') + '.json'
|
||||||
|
@ -4611,17 +4611,17 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
print('WARN: announce json is not a dict - ' +
|
print('WARN: announce json is not a dict - ' +
|
||||||
post_json_object['object'])
|
post_json_object['object'])
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not announcedJson.get('id'):
|
if not announcedJson.get('id'):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not announcedJson.get('type'):
|
if not announcedJson.get('type'):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if announcedJson['type'] == 'Video':
|
if announcedJson['type'] == 'Video':
|
||||||
|
@ -4633,12 +4633,12 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
announcedJson = convertedJson
|
announcedJson = convertedJson
|
||||||
if '/statuses/' not in announcedJson['id']:
|
if '/statuses/' not in announcedJson['id']:
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not has_users_path(announcedJson['id']):
|
if not has_users_path(announcedJson['id']):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if announcedJson['type'] != 'Note' and \
|
if announcedJson['type'] != 'Note' and \
|
||||||
|
@ -4646,22 +4646,22 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
announcedJson['type'] != 'Article':
|
announcedJson['type'] != 'Article':
|
||||||
# You can only announce Note or Article types
|
# You can only announce Note or Article types
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not announcedJson.get('content'):
|
if not announcedJson.get('content'):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not announcedJson.get('published'):
|
if not announcedJson.get('published'):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not valid_post_date(announcedJson['published'], 90, debug):
|
if not valid_post_date(announcedJson['published'], 90, debug):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
if not understoodPostLanguage(base_dir, nickname, domain,
|
if not understoodPostLanguage(base_dir, nickname, domain,
|
||||||
|
@ -4673,19 +4673,19 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
contentStr = announcedJson['content']
|
contentStr = announcedJson['content']
|
||||||
if dangerousMarkup(contentStr, allow_local_network_access):
|
if dangerousMarkup(contentStr, allow_local_network_access):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if isFiltered(base_dir, nickname, domain, contentStr):
|
if isFiltered(base_dir, nickname, domain, contentStr):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if invalid_ciphertext(contentStr):
|
if invalid_ciphertext(contentStr):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
print('WARN: Invalid ciphertext within announce ' +
|
print('WARN: Invalid ciphertext within announce ' +
|
||||||
str(announcedJson))
|
str(announcedJson))
|
||||||
|
@ -4711,7 +4711,7 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
if announcedJson['type'] != 'Create':
|
if announcedJson['type'] != 'Create':
|
||||||
# Create wrap failed
|
# Create wrap failed
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -4730,7 +4730,7 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
if isBlocked(base_dir, nickname, domain,
|
if isBlocked(base_dir, nickname, domain,
|
||||||
attributedNickname, attributedDomain):
|
attributedNickname, attributedDomain):
|
||||||
_rejectAnnounce(announceFilename,
|
_rejectAnnounce(announceFilename,
|
||||||
base_dir, nickname, domain, postId,
|
base_dir, nickname, domain, post_id,
|
||||||
recentPostsCache)
|
recentPostsCache)
|
||||||
return None
|
return None
|
||||||
post_json_object = announcedJson
|
post_json_object = announcedJson
|
||||||
|
@ -4742,7 +4742,7 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str,
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def isMuted(base_dir: str, nickname: str, domain: str, postId: str,
|
def isMuted(base_dir: str, nickname: str, domain: str, post_id: str,
|
||||||
conversationId: str) -> bool:
|
conversationId: str) -> bool:
|
||||||
"""Returns true if the given post is muted
|
"""Returns true if the given post is muted
|
||||||
"""
|
"""
|
||||||
|
@ -4752,7 +4752,7 @@ def isMuted(base_dir: str, nickname: str, domain: str, postId: str,
|
||||||
conversationId.replace('/', '#') + '.muted'
|
conversationId.replace('/', '#') + '.muted'
|
||||||
if os.path.isfile(convMutedFilename):
|
if os.path.isfile(convMutedFilename):
|
||||||
return True
|
return True
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
return False
|
return False
|
||||||
if os.path.isfile(postFilename + '.muted'):
|
if os.path.isfile(postFilename + '.muted'):
|
||||||
|
@ -5211,20 +5211,20 @@ def editedPostFilename(base_dir: str, nickname: str, domain: str,
|
||||||
actor.replace('/', '#')
|
actor.replace('/', '#')
|
||||||
if not os.path.isfile(actorFilename):
|
if not os.path.isfile(actorFilename):
|
||||||
return ''
|
return ''
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
lastpostId = None
|
lastpost_id = None
|
||||||
try:
|
try:
|
||||||
with open(actorFilename, 'r') as fp:
|
with open(actorFilename, 'r') as fp:
|
||||||
lastpostId = fp.read()
|
lastpost_id = fp.read()
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: editedPostFilename unable to read ' + actorFilename)
|
print('EX: editedPostFilename unable to read ' + actorFilename)
|
||||||
return ''
|
return ''
|
||||||
if not lastpostId:
|
if not lastpost_id:
|
||||||
return ''
|
return ''
|
||||||
if lastpostId == postId:
|
if lastpost_id == post_id:
|
||||||
return ''
|
return ''
|
||||||
lastpostFilename = \
|
lastpostFilename = \
|
||||||
locatePost(base_dir, nickname, domain, lastpostId, False)
|
locatePost(base_dir, nickname, domain, lastpost_id, False)
|
||||||
if not lastpostFilename:
|
if not lastpostFilename:
|
||||||
return ''
|
return ''
|
||||||
lastpostJson = load_json(lastpostFilename, 0)
|
lastpostJson = load_json(lastpostFilename, 0)
|
||||||
|
@ -5254,11 +5254,11 @@ def editedPostFilename(base_dir: str, nickname: str, domain: str,
|
||||||
if timeDiffSeconds > maxTimeDiffSeconds:
|
if timeDiffSeconds > maxTimeDiffSeconds:
|
||||||
return ''
|
return ''
|
||||||
if debug:
|
if debug:
|
||||||
print(postId + ' might be an edit of ' + lastpostId)
|
print(post_id + ' might be an edit of ' + lastpost_id)
|
||||||
if wordsSimilarity(lastpostJson['object']['content'],
|
if wordsSimilarity(lastpostJson['object']['content'],
|
||||||
post_json_object['object']['content'], 10) < 70:
|
post_json_object['object']['content'], 10) < 70:
|
||||||
return ''
|
return ''
|
||||||
print(postId + ' is an edit of ' + lastpostId)
|
print(post_id + ' is an edit of ' + lastpost_id)
|
||||||
return lastpostFilename
|
return lastpostFilename
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -42,8 +42,8 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd,
|
||||||
dateStr = line.split(' ')[0]
|
dateStr = line.split(' ')[0]
|
||||||
if 'T' not in dateStr:
|
if 'T' not in dateStr:
|
||||||
continue
|
continue
|
||||||
postId = line.split(' ', 1)[1].replace('\n', '').replace('\r', '')
|
post_id = line.split(' ', 1)[1].replace('\n', '').replace('\r', '')
|
||||||
postFilename = scheduleDir + postId + '.json'
|
postFilename = scheduleDir + post_id + '.json'
|
||||||
if deleteSchedulePost:
|
if deleteSchedulePost:
|
||||||
# delete extraneous scheduled posts
|
# delete extraneous scheduled posts
|
||||||
if os.path.isfile(postFilename):
|
if os.path.isfile(postFilename):
|
||||||
|
@ -89,7 +89,7 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd,
|
||||||
if post_json_object['object'].get('published'):
|
if post_json_object['object'].get('published'):
|
||||||
post_json_object['published'] = published
|
post_json_object['published'] = published
|
||||||
|
|
||||||
print('Sending scheduled post ' + postId)
|
print('Sending scheduled post ' + post_id)
|
||||||
|
|
||||||
if nickname:
|
if nickname:
|
||||||
httpd.postToNickname = nickname
|
httpd.postToNickname = nickname
|
||||||
|
@ -143,7 +143,7 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd,
|
||||||
'/outbox/')
|
'/outbox/')
|
||||||
os.rename(postFilename, outboxPostFilename)
|
os.rename(postFilename, outboxPostFilename)
|
||||||
|
|
||||||
print('Scheduled post sent ' + postId)
|
print('Scheduled post sent ' + post_id)
|
||||||
|
|
||||||
indexLines.remove(line)
|
indexLines.remove(line)
|
||||||
if len(indexLines) > maxScheduledPosts:
|
if len(indexLines) > maxScheduledPosts:
|
||||||
|
|
10
speaker.py
10
speaker.py
|
@ -260,7 +260,7 @@ def _removeEmojiFromText(sayText: str) -> str:
|
||||||
def _speakerEndpointJson(displayName: str, summary: str,
|
def _speakerEndpointJson(displayName: str, summary: str,
|
||||||
content: str, sayContent: str,
|
content: str, sayContent: str,
|
||||||
imageDescription: str,
|
imageDescription: str,
|
||||||
links: [], gender: str, postId: str,
|
links: [], gender: str, post_id: str,
|
||||||
postDM: bool, postReply: bool,
|
postDM: bool, postReply: bool,
|
||||||
followRequestsExist: bool,
|
followRequestsExist: bool,
|
||||||
followRequestsList: [],
|
followRequestsList: [],
|
||||||
|
@ -277,7 +277,7 @@ def _speakerEndpointJson(displayName: str, summary: str,
|
||||||
"published": published,
|
"published": published,
|
||||||
"imageDescription": imageDescription,
|
"imageDescription": imageDescription,
|
||||||
"detectedLinks": links,
|
"detectedLinks": links,
|
||||||
"id": postId,
|
"id": post_id,
|
||||||
"direct": isDirect,
|
"direct": isDirect,
|
||||||
"replyToYou": replyToYou,
|
"replyToYou": replyToYou,
|
||||||
"notify": {
|
"notify": {
|
||||||
|
@ -489,9 +489,9 @@ def _postToSpeakerJson(base_dir: str, http_prefix: str,
|
||||||
content = \
|
content = \
|
||||||
translate['announces'] + ' ' + \
|
translate['announces'] + ' ' + \
|
||||||
announcedHandle + '. ' + content
|
announcedHandle + '. ' + content
|
||||||
postId = None
|
post_id = None
|
||||||
if post_json_object['object'].get('id'):
|
if post_json_object['object'].get('id'):
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
|
|
||||||
followRequestsExist = False
|
followRequestsExist = False
|
||||||
followRequestsList = []
|
followRequestsList = []
|
||||||
|
@ -525,7 +525,7 @@ def _postToSpeakerJson(base_dir: str, http_prefix: str,
|
||||||
|
|
||||||
return _speakerEndpointJson(speakerName, summary,
|
return _speakerEndpointJson(speakerName, summary,
|
||||||
content, sayContent, imageDescription,
|
content, sayContent, imageDescription,
|
||||||
detectedLinks, gender, postId,
|
detectedLinks, gender, post_id,
|
||||||
postDM, postReply,
|
postDM, postReply,
|
||||||
followRequestsExist,
|
followRequestsExist,
|
||||||
followRequestsList,
|
followRequestsList,
|
||||||
|
|
5
tests.py
5
tests.py
|
@ -4232,7 +4232,8 @@ def _testReplyToPublicPost(base_dir: str) -> None:
|
||||||
domain = 'other.site'
|
domain = 'other.site'
|
||||||
port = 443
|
port = 443
|
||||||
http_prefix = 'https'
|
http_prefix = 'https'
|
||||||
postId = http_prefix + '://rat.site/users/ninjarodent/statuses/63746173435'
|
post_id = \
|
||||||
|
http_prefix + '://rat.site/users/ninjarodent/statuses/63746173435'
|
||||||
content = "@ninjarodent@rat.site This is a test."
|
content = "@ninjarodent@rat.site This is a test."
|
||||||
followersOnly = False
|
followersOnly = False
|
||||||
saveToFile = False
|
saveToFile = False
|
||||||
|
@ -4242,7 +4243,7 @@ def _testReplyToPublicPost(base_dir: str) -> None:
|
||||||
mediaType = None
|
mediaType = None
|
||||||
imageDescription = 'Some description'
|
imageDescription = 'Some description'
|
||||||
city = 'London, England'
|
city = 'London, England'
|
||||||
testInReplyTo = postId
|
testInReplyTo = post_id
|
||||||
testInReplyToAtomUri = None
|
testInReplyToAtomUri = None
|
||||||
testSubject = None
|
testSubject = None
|
||||||
testSchedulePost = False
|
testSchedulePost = False
|
||||||
|
|
114
utils.py
114
utils.py
|
@ -614,12 +614,12 @@ def removeIdEnding(idStr: str) -> str:
|
||||||
return idStr
|
return idStr
|
||||||
|
|
||||||
|
|
||||||
def removeHashFromPostId(postId: str) -> str:
|
def removeHashFromPostId(post_id: str) -> str:
|
||||||
"""Removes any has from a post id
|
"""Removes any has from a post id
|
||||||
"""
|
"""
|
||||||
if '#' not in postId:
|
if '#' not in post_id:
|
||||||
return postId
|
return post_id
|
||||||
return postId.split('#')[0]
|
return post_id.split('#')[0]
|
||||||
|
|
||||||
|
|
||||||
def getProtocolPrefixes() -> []:
|
def getProtocolPrefixes() -> []:
|
||||||
|
@ -1294,11 +1294,11 @@ def locateNewsArrival(base_dir: str, domain: str,
|
||||||
|
|
||||||
|
|
||||||
def clearFromPostCaches(base_dir: str, recentPostsCache: {},
|
def clearFromPostCaches(base_dir: str, recentPostsCache: {},
|
||||||
postId: str) -> None:
|
post_id: str) -> None:
|
||||||
"""Clears cached html for the given post, so that edits
|
"""Clears cached html for the given post, so that edits
|
||||||
to news will appear
|
to news will appear
|
||||||
"""
|
"""
|
||||||
filename = '/postcache/' + postId + '.html'
|
filename = '/postcache/' + post_id + '.html'
|
||||||
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if '@' not in acct:
|
if '@' not in acct:
|
||||||
|
@ -1315,14 +1315,14 @@ def clearFromPostCaches(base_dir: str, recentPostsCache: {},
|
||||||
str(postFilename))
|
str(postFilename))
|
||||||
# if the post is in the recent posts cache then remove it
|
# if the post is in the recent posts cache then remove it
|
||||||
if recentPostsCache.get('index'):
|
if recentPostsCache.get('index'):
|
||||||
if postId in recentPostsCache['index']:
|
if post_id in recentPostsCache['index']:
|
||||||
recentPostsCache['index'].remove(postId)
|
recentPostsCache['index'].remove(post_id)
|
||||||
if recentPostsCache.get('json'):
|
if recentPostsCache.get('json'):
|
||||||
if recentPostsCache['json'].get(postId):
|
if recentPostsCache['json'].get(post_id):
|
||||||
del recentPostsCache['json'][postId]
|
del recentPostsCache['json'][post_id]
|
||||||
if recentPostsCache.get('html'):
|
if recentPostsCache.get('html'):
|
||||||
if recentPostsCache['html'].get(postId):
|
if recentPostsCache['html'].get(post_id):
|
||||||
del recentPostsCache['html'][postId]
|
del recentPostsCache['html'][post_id]
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
|
@ -1489,20 +1489,20 @@ def removeModerationPostFromIndex(base_dir: str, postUrl: str,
|
||||||
debug: bool) -> None:
|
debug: bool) -> None:
|
||||||
"""Removes a url from the moderation index
|
"""Removes a url from the moderation index
|
||||||
"""
|
"""
|
||||||
moderationIndexFile = base_dir + '/accounts/moderation.txt'
|
moderation_index_file = base_dir + '/accounts/moderation.txt'
|
||||||
if not os.path.isfile(moderationIndexFile):
|
if not os.path.isfile(moderation_index_file):
|
||||||
return
|
return
|
||||||
postId = removeIdEnding(postUrl)
|
post_id = removeIdEnding(postUrl)
|
||||||
if postId in open(moderationIndexFile).read():
|
if post_id in open(moderation_index_file).read():
|
||||||
with open(moderationIndexFile, 'r') as f:
|
with open(moderation_index_file, 'r') as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
with open(moderationIndexFile, 'w+') as f:
|
with open(moderation_index_file, 'w+') as f:
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if line.strip("\n").strip("\r") != postId:
|
if line.strip("\n").strip("\r") != post_id:
|
||||||
f.write(line)
|
f.write(line)
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: removed ' + postId +
|
print('DEBUG: removed ' + post_id +
|
||||||
' from moderation index')
|
' from moderation index')
|
||||||
|
|
||||||
|
|
||||||
|
@ -1579,24 +1579,24 @@ def removePostFromCache(post_json_object: {}, recentPostsCache: {}) -> None:
|
||||||
if not recentPostsCache.get('index'):
|
if not recentPostsCache.get('index'):
|
||||||
return
|
return
|
||||||
|
|
||||||
postId = post_json_object['id']
|
post_id = post_json_object['id']
|
||||||
if '#' in postId:
|
if '#' in post_id:
|
||||||
postId = postId.split('#', 1)[0]
|
post_id = post_id.split('#', 1)[0]
|
||||||
postId = removeIdEnding(postId).replace('/', '#')
|
post_id = removeIdEnding(post_id).replace('/', '#')
|
||||||
if postId not in recentPostsCache['index']:
|
if post_id not in recentPostsCache['index']:
|
||||||
return
|
return
|
||||||
|
|
||||||
if recentPostsCache.get('index'):
|
if recentPostsCache.get('index'):
|
||||||
if postId in recentPostsCache['index']:
|
if post_id in recentPostsCache['index']:
|
||||||
recentPostsCache['index'].remove(postId)
|
recentPostsCache['index'].remove(post_id)
|
||||||
|
|
||||||
if recentPostsCache.get('json'):
|
if recentPostsCache.get('json'):
|
||||||
if recentPostsCache['json'].get(postId):
|
if recentPostsCache['json'].get(post_id):
|
||||||
del recentPostsCache['json'][postId]
|
del recentPostsCache['json'][post_id]
|
||||||
|
|
||||||
if recentPostsCache.get('html'):
|
if recentPostsCache.get('html'):
|
||||||
if recentPostsCache['html'].get(postId):
|
if recentPostsCache['html'].get(post_id):
|
||||||
del recentPostsCache['html'][postId]
|
del recentPostsCache['html'][post_id]
|
||||||
|
|
||||||
|
|
||||||
def _deleteCachedHtml(base_dir: str, nickname: str, domain: str,
|
def _deleteCachedHtml(base_dir: str, nickname: str, domain: str,
|
||||||
|
@ -1632,7 +1632,7 @@ def _deleteHashtagsOnPost(base_dir: str, post_json_object: {}) -> None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# get the id of the post
|
# get the id of the post
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
for tag in post_json_object['object']['tag']:
|
for tag in post_json_object['object']['tag']:
|
||||||
if not tag.get('type'):
|
if not tag.get('type'):
|
||||||
continue
|
continue
|
||||||
|
@ -1644,7 +1644,7 @@ def _deleteHashtagsOnPost(base_dir: str, post_json_object: {}) -> None:
|
||||||
tagIndexFilename = base_dir + '/tags/' + tag['name'][1:] + '.txt'
|
tagIndexFilename = base_dir + '/tags/' + tag['name'][1:] + '.txt'
|
||||||
if not os.path.isfile(tagIndexFilename):
|
if not os.path.isfile(tagIndexFilename):
|
||||||
continue
|
continue
|
||||||
# remove postId from the tag index file
|
# remove post_id from the tag index file
|
||||||
lines = None
|
lines = None
|
||||||
with open(tagIndexFilename, 'r') as f:
|
with open(tagIndexFilename, 'r') as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
@ -1652,7 +1652,7 @@ def _deleteHashtagsOnPost(base_dir: str, post_json_object: {}) -> None:
|
||||||
continue
|
continue
|
||||||
newlines = ''
|
newlines = ''
|
||||||
for fileLine in lines:
|
for fileLine in lines:
|
||||||
if postId in fileLine:
|
if post_id in fileLine:
|
||||||
# skip over the deleted post
|
# skip over the deleted post
|
||||||
continue
|
continue
|
||||||
newlines += fileLine
|
newlines += fileLine
|
||||||
|
@ -1682,16 +1682,16 @@ def _deleteConversationPost(base_dir: str, nickname: str, domain: str,
|
||||||
conversationDir = acct_dir(base_dir, nickname, domain) + '/conversation'
|
conversationDir = acct_dir(base_dir, nickname, domain) + '/conversation'
|
||||||
conversationId = post_json_object['object']['conversation']
|
conversationId = post_json_object['object']['conversation']
|
||||||
conversationId = conversationId.replace('/', '#')
|
conversationId = conversationId.replace('/', '#')
|
||||||
postId = post_json_object['object']['id']
|
post_id = post_json_object['object']['id']
|
||||||
conversationFilename = conversationDir + '/' + conversationId
|
conversationFilename = conversationDir + '/' + conversationId
|
||||||
if not os.path.isfile(conversationFilename):
|
if not os.path.isfile(conversationFilename):
|
||||||
return False
|
return False
|
||||||
conversationStr = ''
|
conversationStr = ''
|
||||||
with open(conversationFilename, 'r') as fp:
|
with open(conversationFilename, 'r') as fp:
|
||||||
conversationStr = fp.read()
|
conversationStr = fp.read()
|
||||||
if postId + '\n' not in conversationStr:
|
if post_id + '\n' not in conversationStr:
|
||||||
return False
|
return False
|
||||||
conversationStr = conversationStr.replace(postId + '\n', '')
|
conversationStr = conversationStr.replace(post_id + '\n', '')
|
||||||
if conversationStr:
|
if conversationStr:
|
||||||
with open(conversationFilename, 'w+') as fp:
|
with open(conversationFilename, 'w+') as fp:
|
||||||
fp.write(conversationStr)
|
fp.write(conversationStr)
|
||||||
|
@ -1771,8 +1771,8 @@ def deletePost(base_dir: str, http_prefix: str,
|
||||||
if has_object_dict(post_json_object):
|
if has_object_dict(post_json_object):
|
||||||
if post_json_object['object'].get('moderationStatus'):
|
if post_json_object['object'].get('moderationStatus'):
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = removeIdEnding(post_json_object['id'])
|
post_id = removeIdEnding(post_json_object['id'])
|
||||||
removeModerationPostFromIndex(base_dir, postId, debug)
|
removeModerationPostFromIndex(base_dir, post_id, debug)
|
||||||
|
|
||||||
# remove any hashtags index entries
|
# remove any hashtags index entries
|
||||||
if hasObject:
|
if hasObject:
|
||||||
|
@ -2013,31 +2013,31 @@ def updateRecentPostsCache(recentPostsCache: {}, max_recent_posts: int,
|
||||||
"""
|
"""
|
||||||
if not post_json_object.get('id'):
|
if not post_json_object.get('id'):
|
||||||
return
|
return
|
||||||
postId = post_json_object['id']
|
post_id = post_json_object['id']
|
||||||
if '#' in postId:
|
if '#' in post_id:
|
||||||
postId = postId.split('#', 1)[0]
|
post_id = post_id.split('#', 1)[0]
|
||||||
postId = removeIdEnding(postId).replace('/', '#')
|
post_id = removeIdEnding(post_id).replace('/', '#')
|
||||||
if recentPostsCache.get('index'):
|
if recentPostsCache.get('index'):
|
||||||
if postId in recentPostsCache['index']:
|
if post_id in recentPostsCache['index']:
|
||||||
return
|
return
|
||||||
recentPostsCache['index'].append(postId)
|
recentPostsCache['index'].append(post_id)
|
||||||
post_json_object['muted'] = False
|
post_json_object['muted'] = False
|
||||||
recentPostsCache['json'][postId] = json.dumps(post_json_object)
|
recentPostsCache['json'][post_id] = json.dumps(post_json_object)
|
||||||
recentPostsCache['html'][postId] = htmlStr
|
recentPostsCache['html'][post_id] = htmlStr
|
||||||
|
|
||||||
while len(recentPostsCache['html'].items()) > max_recent_posts:
|
while len(recentPostsCache['html'].items()) > max_recent_posts:
|
||||||
postId = recentPostsCache['index'][0]
|
post_id = recentPostsCache['index'][0]
|
||||||
recentPostsCache['index'].pop(0)
|
recentPostsCache['index'].pop(0)
|
||||||
if recentPostsCache['json'].get(postId):
|
if recentPostsCache['json'].get(post_id):
|
||||||
del recentPostsCache['json'][postId]
|
del recentPostsCache['json'][post_id]
|
||||||
if recentPostsCache['html'].get(postId):
|
if recentPostsCache['html'].get(post_id):
|
||||||
del recentPostsCache['html'][postId]
|
del recentPostsCache['html'][post_id]
|
||||||
else:
|
else:
|
||||||
recentPostsCache['index'] = [postId]
|
recentPostsCache['index'] = [post_id]
|
||||||
recentPostsCache['json'] = {}
|
recentPostsCache['json'] = {}
|
||||||
recentPostsCache['html'] = {}
|
recentPostsCache['html'] = {}
|
||||||
recentPostsCache['json'][postId] = json.dumps(post_json_object)
|
recentPostsCache['json'][post_id] = json.dumps(post_json_object)
|
||||||
recentPostsCache['html'][postId] = htmlStr
|
recentPostsCache['html'][post_id] = htmlStr
|
||||||
|
|
||||||
|
|
||||||
def fileLastModified(filename: str) -> str:
|
def fileLastModified(filename: str) -> str:
|
||||||
|
@ -2555,11 +2555,11 @@ def camelCaseSplit(text: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def rejectPostId(base_dir: str, nickname: str, domain: str,
|
def rejectPostId(base_dir: str, nickname: str, domain: str,
|
||||||
postId: str, recentPostsCache: {}) -> None:
|
post_id: str, recentPostsCache: {}) -> None:
|
||||||
""" Marks the given post as rejected,
|
""" Marks the given post as rejected,
|
||||||
for example an announce which is too old
|
for example an announce which is too old
|
||||||
"""
|
"""
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ from webapp_utils import htmlKeyboardNavigation
|
||||||
|
|
||||||
def htmlCalendarDeleteConfirm(cssCache: {}, translate: {}, base_dir: str,
|
def htmlCalendarDeleteConfirm(cssCache: {}, translate: {}, base_dir: str,
|
||||||
path: str, http_prefix: str,
|
path: str, http_prefix: str,
|
||||||
domain_full: str, postId: str, postTime: str,
|
domain_full: str, post_id: str, postTime: str,
|
||||||
year: int, monthNumber: int,
|
year: int, monthNumber: int,
|
||||||
dayNumber: int, calling_domain: str) -> str:
|
dayNumber: int, calling_domain: str) -> str:
|
||||||
"""Shows a screen asking to confirm the deletion of a calendar event
|
"""Shows a screen asking to confirm the deletion of a calendar event
|
||||||
|
@ -41,7 +41,7 @@ def htmlCalendarDeleteConfirm(cssCache: {}, translate: {}, base_dir: str,
|
||||||
nickname = getNicknameFromActor(path)
|
nickname = getNicknameFromActor(path)
|
||||||
actor = local_actor_url(http_prefix, nickname, domain_full)
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
||||||
domain, port = getDomainFromActor(actor)
|
domain, port = getDomainFromActor(actor)
|
||||||
messageId = actor + '/statuses/' + postId
|
messageId = actor + '/statuses/' + post_id
|
||||||
|
|
||||||
postFilename = locatePost(base_dir, nickname, domain, messageId)
|
postFilename = locatePost(base_dir, nickname, domain, messageId)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
|
@ -137,15 +137,15 @@ def _htmlCalendarDay(person_cache: {}, cssCache: {}, translate: {},
|
||||||
eventTime = None
|
eventTime = None
|
||||||
eventDescription = None
|
eventDescription = None
|
||||||
eventPlace = None
|
eventPlace = None
|
||||||
postId = None
|
post_id = None
|
||||||
senderName = ''
|
senderName = ''
|
||||||
senderActor = None
|
senderActor = None
|
||||||
eventIsPublic = False
|
eventIsPublic = False
|
||||||
# get the time place and description
|
# get the time place and description
|
||||||
for ev in eventPost:
|
for ev in eventPost:
|
||||||
if ev['type'] == 'Event':
|
if ev['type'] == 'Event':
|
||||||
if ev.get('postId'):
|
if ev.get('post_id'):
|
||||||
postId = ev['postId']
|
post_id = ev['post_id']
|
||||||
if ev.get('startTime'):
|
if ev.get('startTime'):
|
||||||
eventDate = \
|
eventDate = \
|
||||||
datetime.strptime(ev['startTime'],
|
datetime.strptime(ev['startTime'],
|
||||||
|
@ -184,11 +184,13 @@ def _htmlCalendarDay(person_cache: {}, cssCache: {}, translate: {},
|
||||||
translate['Reminder'] + ': ' + eventDescription
|
translate['Reminder'] + ': ' + eventDescription
|
||||||
|
|
||||||
deleteButtonStr = ''
|
deleteButtonStr = ''
|
||||||
if postId:
|
if post_id:
|
||||||
deleteButtonStr = \
|
deleteButtonStr = \
|
||||||
'<td class="calendar__day__icons"><a href="' + calActor + \
|
'<td class="calendar__day__icons"><a href="' + calActor + \
|
||||||
'/eventdelete?eventid=' + postId + '?year=' + str(year) + \
|
'/eventdelete?eventid=' + post_id + \
|
||||||
'?month=' + str(monthNumber) + '?day=' + str(dayNumber) + \
|
'?year=' + str(year) + \
|
||||||
|
'?month=' + str(monthNumber) + \
|
||||||
|
'?day=' + str(dayNumber) + \
|
||||||
'?time=' + eventTime + \
|
'?time=' + eventTime + \
|
||||||
'">\n<img class="calendardayicon" loading="lazy" alt="' + \
|
'">\n<img class="calendardayicon" loading="lazy" alt="' + \
|
||||||
translate['Delete this event'] + ' |" title="' + \
|
translate['Delete this event'] + ' |" title="' + \
|
||||||
|
|
|
@ -475,9 +475,9 @@ def _getEditIconHtml(base_dir: str, nickname: str, domain_full: str,
|
||||||
(is_editor(base_dir, nickname) and
|
(is_editor(base_dir, nickname) and
|
||||||
actor.endswith('/' + domain_full + '/users/news'))):
|
actor.endswith('/' + domain_full + '/users/news'))):
|
||||||
|
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
|
|
||||||
if '/statuses/' not in postId:
|
if '/statuses/' not in post_id:
|
||||||
return editStr
|
return editStr
|
||||||
|
|
||||||
if isBlogPost(post_json_object):
|
if isBlogPost(post_json_object):
|
||||||
|
@ -490,7 +490,7 @@ def _getEditIconHtml(base_dir: str, nickname: str, domain_full: str,
|
||||||
'<a class="imageAnchor" href="/users/' + \
|
'<a class="imageAnchor" href="/users/' + \
|
||||||
nickname + \
|
nickname + \
|
||||||
'/tlblogs?editblogpost=' + \
|
'/tlblogs?editblogpost=' + \
|
||||||
postId.split('/statuses/')[1] + \
|
post_id.split('/statuses/')[1] + \
|
||||||
';actor=' + actorNickname + \
|
';actor=' + actorNickname + \
|
||||||
'" title="' + editBlogPostStr + '">' + \
|
'" title="' + editBlogPostStr + '">' + \
|
||||||
'<img loading="lazy" title="' + \
|
'<img loading="lazy" title="' + \
|
||||||
|
@ -501,7 +501,7 @@ def _getEditIconHtml(base_dir: str, nickname: str, domain_full: str,
|
||||||
' ' + \
|
' ' + \
|
||||||
'<a class="imageAnchor" href="/users/' + \
|
'<a class="imageAnchor" href="/users/' + \
|
||||||
nickname + '/editnewspost=' + \
|
nickname + '/editnewspost=' + \
|
||||||
postId.split('/statuses/')[1] + \
|
post_id.split('/statuses/')[1] + \
|
||||||
'?actor=' + actorNickname + \
|
'?actor=' + actorNickname + \
|
||||||
'" title="' + editBlogPostStr + '">' + \
|
'" title="' + editBlogPostStr + '">' + \
|
||||||
'<img loading="lazy" title="' + \
|
'<img loading="lazy" title="' + \
|
||||||
|
@ -515,7 +515,7 @@ def _getEditIconHtml(base_dir: str, nickname: str, domain_full: str,
|
||||||
' ' + \
|
' ' + \
|
||||||
'<a class="imageAnchor" href="/users/' + nickname + \
|
'<a class="imageAnchor" href="/users/' + nickname + \
|
||||||
'/tlblogs?editeventpost=' + \
|
'/tlblogs?editeventpost=' + \
|
||||||
postId.split('/statuses/')[1] + \
|
post_id.split('/statuses/')[1] + \
|
||||||
'?actor=' + actorNickname + \
|
'?actor=' + actorNickname + \
|
||||||
'" title="' + editEventStr + '">' + \
|
'" title="' + editEventStr + '">' + \
|
||||||
'<img loading="lazy" title="' + \
|
'<img loading="lazy" title="' + \
|
||||||
|
@ -916,13 +916,13 @@ def _announceUnattributedHtml(translate: {},
|
||||||
announcesStr = 'announces'
|
announcesStr = 'announces'
|
||||||
if translate.get(announcesStr):
|
if translate.get(announcesStr):
|
||||||
announcesStr = translate[announcesStr]
|
announcesStr = translate[announcesStr]
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
return ' <img loading="lazy" title="' + \
|
return ' <img loading="lazy" title="' + \
|
||||||
announcesStr + '" alt="' + \
|
announcesStr + '" alt="' + \
|
||||||
announcesStr + '" src="/icons' + \
|
announcesStr + '" src="/icons' + \
|
||||||
'/repeat_inactive.png" ' + \
|
'/repeat_inactive.png" ' + \
|
||||||
'class="announceOrReply"/>\n' + \
|
'class="announceOrReply"/>\n' + \
|
||||||
' <a href="' + postId + \
|
' <a href="' + post_id + \
|
||||||
'" class="announceOrReply">@unattributed</a>\n'
|
'" class="announceOrReply">@unattributed</a>\n'
|
||||||
|
|
||||||
|
|
||||||
|
@ -934,13 +934,13 @@ def _announceWithDisplayNameHtml(translate: {},
|
||||||
announcesStr = 'announces'
|
announcesStr = 'announces'
|
||||||
if translate.get(announcesStr):
|
if translate.get(announcesStr):
|
||||||
announcesStr = translate[announcesStr]
|
announcesStr = translate[announcesStr]
|
||||||
postId = removeIdEnding(post_json_object['object']['id'])
|
post_id = removeIdEnding(post_json_object['object']['id'])
|
||||||
return ' <img loading="lazy" title="' + \
|
return ' <img loading="lazy" title="' + \
|
||||||
announcesStr + '" alt="' + \
|
announcesStr + '" alt="' + \
|
||||||
announcesStr + '" src="/' + \
|
announcesStr + '" src="/' + \
|
||||||
'icons/repeat_inactive.png" ' + \
|
'icons/repeat_inactive.png" ' + \
|
||||||
'class="announceOrReply"/>\n' + \
|
'class="announceOrReply"/>\n' + \
|
||||||
' <a href="' + postId + '" ' + \
|
' <a href="' + post_id + '" ' + \
|
||||||
'class="announceOrReply">' + announceDisplayName + '</a>\n'
|
'class="announceOrReply">' + announceDisplayName + '</a>\n'
|
||||||
|
|
||||||
|
|
||||||
|
@ -2251,13 +2251,13 @@ def htmlEmojiReactionPicker(cssCache: {},
|
||||||
reactionsJson = load_json(reactionsFilename)
|
reactionsJson = load_json(reactionsFilename)
|
||||||
emojiPicksStr = ''
|
emojiPicksStr = ''
|
||||||
baseUrl = '/users/' + nickname
|
baseUrl = '/users/' + nickname
|
||||||
postId = removeIdEnding(post_json_object['id'])
|
post_id = removeIdEnding(post_json_object['id'])
|
||||||
for category, item in reactionsJson.items():
|
for category, item in reactionsJson.items():
|
||||||
emojiPicksStr += '<div class="container">\n'
|
emojiPicksStr += '<div class="container">\n'
|
||||||
for emojiContent in item:
|
for emojiContent in item:
|
||||||
emojiContentEncoded = urllib.parse.quote_plus(emojiContent)
|
emojiContentEncoded = urllib.parse.quote_plus(emojiContent)
|
||||||
emojiUrl = \
|
emojiUrl = \
|
||||||
baseUrl + '?react=' + postId + \
|
baseUrl + '?react=' + post_id + \
|
||||||
'?actor=' + post_json_object['actor'] + \
|
'?actor=' + post_json_object['actor'] + \
|
||||||
'?tl=' + boxName + \
|
'?tl=' + boxName + \
|
||||||
'?page=' + str(pageNumber) + \
|
'?page=' + str(pageNumber) + \
|
||||||
|
|
|
@ -818,20 +818,20 @@ def htmlHashtagSearch(cssCache: {},
|
||||||
'"></a>\n </center>\n'
|
'"></a>\n </center>\n'
|
||||||
index = startIndex
|
index = startIndex
|
||||||
while index <= endIndex:
|
while index <= endIndex:
|
||||||
postId = lines[index].strip('\n').strip('\r')
|
post_id = lines[index].strip('\n').strip('\r')
|
||||||
if ' ' not in postId:
|
if ' ' not in post_id:
|
||||||
nickname = getNicknameFromActor(postId)
|
nickname = getNicknameFromActor(post_id)
|
||||||
if not nickname:
|
if not nickname:
|
||||||
index += 1
|
index += 1
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
postFields = postId.split(' ')
|
postFields = post_id.split(' ')
|
||||||
if len(postFields) != 3:
|
if len(postFields) != 3:
|
||||||
index += 1
|
index += 1
|
||||||
continue
|
continue
|
||||||
nickname = postFields[1]
|
nickname = postFields[1]
|
||||||
postId = postFields[2]
|
post_id = postFields[2]
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
index += 1
|
index += 1
|
||||||
continue
|
continue
|
||||||
|
@ -939,24 +939,24 @@ def rssHashtagSearch(nickname: str, domain: str, port: int,
|
||||||
hashtagFeed = \
|
hashtagFeed = \
|
||||||
rss2TagHeader(hashtag, http_prefix, domain_full)
|
rss2TagHeader(hashtag, http_prefix, domain_full)
|
||||||
for index in range(len(lines)):
|
for index in range(len(lines)):
|
||||||
postId = lines[index].strip('\n').strip('\r')
|
post_id = lines[index].strip('\n').strip('\r')
|
||||||
if ' ' not in postId:
|
if ' ' not in post_id:
|
||||||
nickname = getNicknameFromActor(postId)
|
nickname = getNicknameFromActor(post_id)
|
||||||
if not nickname:
|
if not nickname:
|
||||||
index += 1
|
index += 1
|
||||||
if index >= maxFeedLength:
|
if index >= maxFeedLength:
|
||||||
break
|
break
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
postFields = postId.split(' ')
|
postFields = post_id.split(' ')
|
||||||
if len(postFields) != 3:
|
if len(postFields) != 3:
|
||||||
index += 1
|
index += 1
|
||||||
if index >= maxFeedLength:
|
if index >= maxFeedLength:
|
||||||
break
|
break
|
||||||
continue
|
continue
|
||||||
nickname = postFields[1]
|
nickname = postFields[1]
|
||||||
postId = postFields[2]
|
post_id = postFields[2]
|
||||||
postFilename = locatePost(base_dir, nickname, domain, postId)
|
postFilename = locatePost(base_dir, nickname, domain, post_id)
|
||||||
if not postFilename:
|
if not postFilename:
|
||||||
index += 1
|
index += 1
|
||||||
if index >= maxFeedLength:
|
if index >= maxFeedLength:
|
||||||
|
|
|
@ -876,9 +876,9 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str,
|
||||||
# is the post in the memory cache of recent ones?
|
# is the post in the memory cache of recent ones?
|
||||||
currTlStr = None
|
currTlStr = None
|
||||||
if boxName != 'tlmedia' and recentPostsCache.get('html'):
|
if boxName != 'tlmedia' and recentPostsCache.get('html'):
|
||||||
postId = removeIdEnding(item['id']).replace('/', '#')
|
post_id = removeIdEnding(item['id']).replace('/', '#')
|
||||||
if recentPostsCache['html'].get(postId):
|
if recentPostsCache['html'].get(post_id):
|
||||||
currTlStr = recentPostsCache['html'][postId]
|
currTlStr = recentPostsCache['html'][post_id]
|
||||||
currTlStr = \
|
currTlStr = \
|
||||||
preparePostFromHtmlCache(nickname,
|
preparePostFromHtmlCache(nickname,
|
||||||
currTlStr,
|
currTlStr,
|
||||||
|
|
Loading…
Reference in New Issue