From 1684bbbbca7d2d28391ea81134f898e4f13607c9 Mon Sep 17 00:00:00 2001 From: Bob Mottram Date: Sun, 26 Dec 2021 19:47:06 +0000 Subject: [PATCH] Snake case --- blocking.py | 50 ++++++++++---------- blog.py | 18 +++---- conversation.py | 8 ++-- daemon.py | 32 ++++++------- desktop_client.py | 38 +++++++-------- happening.py | 54 ++++++++++----------- inbox.py | 39 ++++++++-------- newsdaemon.py | 34 +++++++------- outbox.py | 6 +-- person.py | 9 ++-- posts.py | 78 +++++++++++++++---------------- schedule.py | 8 ++-- speaker.py | 10 ++-- tests.py | 5 +- utils.py | 114 ++++++++++++++++++++++----------------------- webapp_calendar.py | 18 +++---- webapp_post.py | 22 ++++----- webapp_search.py | 24 +++++----- webapp_timeline.py | 6 +-- 19 files changed, 289 insertions(+), 284 deletions(-) diff --git a/blocking.py b/blocking.py index ed65f1f98..8d86dfaad 100644 --- a/blocking.py +++ b/blocking.py @@ -520,18 +520,18 @@ def outboxUndoBlock(base_dir: str, http_prefix: str, def mutePost(base_dir: str, nickname: str, domain: str, port: int, - http_prefix: str, postId: str, recentPostsCache: {}, + http_prefix: str, post_id: str, recentPostsCache: {}, debug: bool) -> None: """ Mutes the given post """ - print('mutePost: postId ' + postId) - postFilename = locatePost(base_dir, nickname, domain, postId) + print('mutePost: post_id ' + post_id) + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: - print('mutePost: file not found ' + postId) + print('mutePost: file not found ' + post_id) return post_json_object = load_json(postFilename) if not post_json_object: - print('mutePost: object not loaded ' + postId) + print('mutePost: object not loaded ' + post_id) return print('mutePost: ' + str(post_json_object)) @@ -553,10 +553,10 @@ def mutePost(base_dir: str, nickname: str, domain: str, port: int, # does this post have ignores on it from differenent actors? if not postJsonObj.get('ignores'): if debug: - print('DEBUG: Adding initial mute to ' + postId) + print('DEBUG: Adding initial mute to ' + post_id) ignoresJson = { "@context": "https://www.w3.org/ns/activitystreams", - 'id': postId, + 'id': post_id, 'type': 'Collection', "totalItems": 1, 'items': [{ @@ -610,18 +610,18 @@ def mutePost(base_dir: str, nickname: str, domain: str, port: int, # if the post is in the recent posts cache then mark it as muted if recentPostsCache.get('index'): - postId = \ + post_id = \ removeIdEnding(post_json_object['id']).replace('/', '#') - if postId in recentPostsCache['index']: - print('MUTE: ' + postId + ' is in recent posts cache') + if post_id in recentPostsCache['index']: + print('MUTE: ' + post_id + ' is in recent posts cache') if recentPostsCache.get('json'): - recentPostsCache['json'][postId] = json.dumps(post_json_object) - print('MUTE: ' + postId + + recentPostsCache['json'][post_id] = json.dumps(post_json_object) + print('MUTE: ' + post_id + ' marked as muted in recent posts memory cache') if recentPostsCache.get('html'): - if recentPostsCache['html'].get(postId): - del recentPostsCache['html'][postId] - print('MUTE: ' + postId + ' removed cached html') + if recentPostsCache['html'].get(post_id): + del recentPostsCache['html'][post_id] + print('MUTE: ' + post_id + ' removed cached html') if alsoUpdatePostId: postFilename = locatePost(base_dir, nickname, domain, alsoUpdatePostId) @@ -653,11 +653,11 @@ def mutePost(base_dir: str, nickname: str, domain: str, port: int, def unmutePost(base_dir: str, nickname: str, domain: str, port: int, - http_prefix: str, postId: str, recentPostsCache: {}, + http_prefix: str, post_id: str, recentPostsCache: {}, debug: bool) -> None: """ Unmutes the given post """ - postFilename = locatePost(base_dir, nickname, domain, postId) + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: return post_json_object = load_json(postFilename) @@ -725,18 +725,18 @@ def unmutePost(base_dir: str, nickname: str, domain: str, port: int, # if the post is in the recent posts cache then mark it as unmuted if recentPostsCache.get('index'): - postId = \ + post_id = \ removeIdEnding(post_json_object['id']).replace('/', '#') - if postId in recentPostsCache['index']: - print('UNMUTE: ' + postId + ' is in recent posts cache') + if post_id in recentPostsCache['index']: + print('UNMUTE: ' + post_id + ' is in recent posts cache') if recentPostsCache.get('json'): - recentPostsCache['json'][postId] = json.dumps(post_json_object) - print('UNMUTE: ' + postId + + recentPostsCache['json'][post_id] = json.dumps(post_json_object) + print('UNMUTE: ' + post_id + ' marked as unmuted in recent posts cache') if recentPostsCache.get('html'): - if recentPostsCache['html'].get(postId): - del recentPostsCache['html'][postId] - print('UNMUTE: ' + postId + ' removed cached html') + if recentPostsCache['html'].get(post_id): + del recentPostsCache['html'][post_id] + print('UNMUTE: ' + post_id + ' removed cached html') if alsoUpdatePostId: postFilename = locatePost(base_dir, nickname, domain, alsoUpdatePostId) if os.path.isfile(postFilename): diff --git a/blog.py b/blog.py index f79967023..6f4d015bf 100644 --- a/blog.py +++ b/blog.py @@ -41,13 +41,13 @@ from cache import getPersonFromCache def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {}, nickname: str, domain: str, domain_full: str, - postId: str, depth=0) -> int: + post_id: str, depth=0) -> int: """Returns the number of replies on the post This is recursive, so can handle replies to replies """ if depth > 4: return 0 - if not postId: + if not post_id: return 0 tryPostBox = ('tlblogs', 'inbox', 'outbox') @@ -55,7 +55,7 @@ def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {}, for postBox in tryPostBox: postFilename = \ acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \ - postId.replace('/', '#') + '.replies' + post_id.replace('/', '#') + '.replies' if os.path.isfile(postFilename): boxFound = True break @@ -64,7 +64,7 @@ def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {}, for postBox in tryPostBox: postFilename = \ acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \ - postId.replace('/', '#') + post_id.replace('/', '#') if os.path.isfile(postFilename): return 1 return 0 @@ -111,12 +111,12 @@ def _noOfBlogReplies(base_dir: str, http_prefix: str, translate: {}, def _getBlogReplies(base_dir: str, http_prefix: str, translate: {}, nickname: str, domain: str, domain_full: str, - postId: str, depth=0) -> str: + post_id: str, depth=0) -> str: """Returns a string containing html blog posts """ if depth > 4: return '' - if not postId: + if not post_id: return '' tryPostBox = ('tlblogs', 'inbox', 'outbox') @@ -124,7 +124,7 @@ def _getBlogReplies(base_dir: str, http_prefix: str, translate: {}, for postBox in tryPostBox: postFilename = \ acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \ - postId.replace('/', '#') + '.replies' + post_id.replace('/', '#') + '.replies' if os.path.isfile(postFilename): boxFound = True break @@ -133,11 +133,11 @@ def _getBlogReplies(base_dir: str, http_prefix: str, translate: {}, for postBox in tryPostBox: postFilename = \ acct_dir(base_dir, nickname, domain) + '/' + postBox + '/' + \ - postId.replace('/', '#') + '.json' + post_id.replace('/', '#') + '.json' if os.path.isfile(postFilename): postFilename = acct_dir(base_dir, nickname, domain) + \ '/postcache/' + \ - postId.replace('/', '#') + '.html' + post_id.replace('/', '#') + '.html' if os.path.isfile(postFilename): try: with open(postFilename, 'r') as postFile: diff --git a/conversation.py b/conversation.py index ac6896ab3..30e5d0da7 100644 --- a/conversation.py +++ b/conversation.py @@ -39,19 +39,19 @@ def updateConversation(base_dir: str, nickname: str, domain: str, _getConversationFilename(base_dir, nickname, domain, post_json_object) if not conversationFilename: return False - postId = removeIdEnding(post_json_object['object']['id']) + post_id = removeIdEnding(post_json_object['object']['id']) if not os.path.isfile(conversationFilename): try: with open(conversationFilename, 'w+') as fp: - fp.write(postId + '\n') + fp.write(post_id + '\n') return True except OSError: print('EX: updateConversation ' + 'unable to write to ' + conversationFilename) - elif postId + '\n' not in open(conversationFilename).read(): + elif post_id + '\n' not in open(conversationFilename).read(): try: with open(conversationFilename, 'a+') as fp: - fp.write(postId + '\n') + fp.write(post_id + '\n') return True except OSError: print('EX: updateConversation 2 ' + diff --git a/daemon.py b/daemon.py index d85068d8a..ad5359b65 100644 --- a/daemon.py +++ b/daemon.py @@ -10124,15 +10124,15 @@ class PubServer(BaseHTTPRequestHandler): likedBy = None reactBy = None reactEmoji = None - postId = path.split('?notifypost=')[1].strip() - postId = postId.replace('-', '/') + post_id = path.split('?notifypost=')[1].strip() + post_id = post_id.replace('-', '/') path = path.split('?notifypost=')[0] nickname = path.split('/users/')[1] if '/' in nickname: return False replies = False - postFilename = locatePost(base_dir, nickname, domain, postId, replies) + postFilename = locatePost(base_dir, nickname, domain, post_id, replies) if not postFilename: return False @@ -12876,9 +12876,9 @@ class PubServer(BaseHTTPRequestHandler): GETstartTime) -> bool: """Confirm whether to delete a calendar event """ - postId = path.split('?eventid=')[1] - if '?' in postId: - postId = postId.split('?')[0] + post_id = path.split('?eventid=')[1] + if '?' in post_id: + post_id = post_id.split('?')[0] postTime = path.split('?time=')[1] if '?' in postTime: postTime = postTime.split('?')[0] @@ -12897,7 +12897,7 @@ class PubServer(BaseHTTPRequestHandler): base_dir, path, http_prefix, domain_full, - postId, postTime, + post_id, postTime, postYear, postMonth, postDay, calling_domain) if not msg: @@ -13184,11 +13184,11 @@ class PubServer(BaseHTTPRequestHandler): postActor = path.split('?actor=')[1] if '?' in postActor: postActor = postActor.split('?')[0] - postId = path.split('/editnewspost=')[1] - if '?' in postId: - postId = postId.split('?')[0] + post_id = path.split('/editnewspost=')[1] + if '?' in post_id: + post_id = post_id.split('?')[0] postUrl = local_actor_url(http_prefix, postActor, domain_full) + \ - '/statuses/' + postId + '/statuses/' + post_id path = path.split('/editnewspost=')[0] msg = htmlEditNewsPost(self.server.cssCache, translate, base_dir, @@ -13969,17 +13969,17 @@ class PubServer(BaseHTTPRequestHandler): self.server.system_language) message_json = {} if pinnedPostJson: - postId = removeIdEnding(pinnedPostJson['id']) + post_id = removeIdEnding(pinnedPostJson['id']) message_json = \ outboxMessageCreateWrap(self.server.http_prefix, nickname, self.server.domain, self.server.port, pinnedPostJson) - message_json['id'] = postId + '/activity' - message_json['object']['id'] = postId - message_json['object']['url'] = replace_users_with_at(postId) - message_json['object']['atomUri'] = postId + message_json['id'] = post_id + '/activity' + message_json['object']['id'] = post_id + message_json['object']['url'] = replace_users_with_at(post_id) + message_json['object']['atomUri'] = post_id msg = json.dumps(message_json, ensure_ascii=False).encode('utf-8') msglen = len(msg) diff --git a/desktop_client.py b/desktop_client.py index 57c6d143f..fa046662a 100644 --- a/desktop_client.py +++ b/desktop_client.py @@ -152,7 +152,7 @@ def _createDesktopConfig(actor: str) -> None: os.mkdir(readPostsDir) -def _markPostAsRead(actor: str, postId: str, postCategory: str) -> None: +def _markPostAsRead(actor: str, post_id: str, postCategory: str) -> None: """Marks the given post as read by the given actor """ homeDir = str(Path.home()) @@ -165,24 +165,24 @@ def _markPostAsRead(actor: str, postId: str, postCategory: str) -> None: readPostsDir = homeDir + '/.config/epicyon/' + handle readPostsFilename = readPostsDir + '/' + postCategory + '.txt' if os.path.isfile(readPostsFilename): - if postId in open(readPostsFilename).read(): + if post_id in open(readPostsFilename).read(): return try: # prepend to read posts file - postId += '\n' + post_id += '\n' with open(readPostsFilename, 'r+') as readFile: content = readFile.read() - if postId not in content: + if post_id not in content: readFile.seek(0, 0) - readFile.write(postId + content) + readFile.write(post_id + content) except Exception as ex: print('WARN: Failed to mark post as read' + str(ex)) else: with open(readPostsFilename, 'w+') as readFile: - readFile.write(postId + '\n') + readFile.write(post_id + '\n') -def _hasReadPost(actor: str, postId: str, postCategory: str) -> bool: +def _hasReadPost(actor: str, post_id: str, postCategory: str) -> bool: """Returns true if the given post has been read by the actor """ homeDir = str(Path.home()) @@ -195,7 +195,7 @@ def _hasReadPost(actor: str, postId: str, postCategory: str) -> bool: readPostsDir = homeDir + '/.config/epicyon/' + handle readPostsFilename = readPostsDir + '/' + postCategory + '.txt' if os.path.isfile(readPostsFilename): - if postId in open(readPostsFilename).read(): + if post_id in open(readPostsFilename).read(): return True return False @@ -412,7 +412,7 @@ def _sayCommand(content: str, sayStr: str, screenreader: str, system_language, espeak) -def _desktopReplyToPost(session, postId: str, +def _desktopReplyToPost(session, post_id: str, base_dir: str, nickname: str, password: str, domain: str, port: int, http_prefix: str, cached_webfingers: {}, person_cache: {}, @@ -424,10 +424,10 @@ def _desktopReplyToPost(session, postId: str, signing_priv_key_pem: str) -> None: """Use the desktop client to send a reply to the most recent post """ - if '://' not in postId: + if '://' not in post_id: return - toNickname = getNicknameFromActor(postId) - toDomain, toPort = getDomainFromActor(postId) + toNickname = getNicknameFromActor(post_id) + toDomain, toPort = getDomainFromActor(post_id) sayStr = 'Replying to ' + toNickname + '@' + toDomain _sayCommand(sayStr, sayStr, screenreader, system_language, espeak) @@ -476,7 +476,7 @@ def _desktopReplyToPost(session, postId: str, cached_webfingers, person_cache, isArticle, system_language, low_bandwidth, content_license_url, - debug, postId, postId, + debug, post_id, post_id, conversationId, subject) == 0: sayStr = 'Reply sent' else: @@ -1720,7 +1720,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, elif commandStr == 'reply' or commandStr == 'r': if post_json_object: if post_json_object.get('id'): - postId = post_json_object['id'] + post_id = post_json_object['id'] subject = None if post_json_object['object'].get('summary'): subject = post_json_object['object']['summary'] @@ -1729,7 +1729,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, conversationId = \ post_json_object['object']['conversation'] sessionReply = createSession(proxy_type) - _desktopReplyToPost(sessionReply, postId, + _desktopReplyToPost(sessionReply, post_id, base_dir, nickname, password, domain, port, http_prefix, cached_webfingers, person_cache, @@ -2084,7 +2084,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, _desktopGetBoxPostObject(boxJson, currIndex) if post_json_object: if post_json_object.get('id'): - postId = post_json_object['id'] + post_id = post_json_object['id'] announceActor = \ post_json_object['object']['attributedTo'] sayStr = 'Announcing post by ' + \ @@ -2096,7 +2096,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, sendAnnounceViaServer(base_dir, sessionAnnounce, nickname, password, domain, port, - http_prefix, postId, + http_prefix, post_id, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem) @@ -2117,7 +2117,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, _desktopGetBoxPostObject(boxJson, currIndex) if post_json_object: if post_json_object.get('id'): - postId = post_json_object['id'] + post_id = post_json_object['id'] announceActor = \ post_json_object['object']['attributedTo'] sayStr = 'Undoing announce post by ' + \ @@ -2130,7 +2130,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, post_json_object, nickname, password, domain, port, - http_prefix, postId, + http_prefix, post_id, cached_webfingers, person_cache, True, __version__, diff --git a/happening.py b/happening.py index 7b7aae6f7..5fe6bd341 100644 --- a/happening.py +++ b/happening.py @@ -45,7 +45,7 @@ def _removeEventFromTimeline(eventId: str, tlEventsFilename: str) -> None: print('EX: ERROR: unable to save events timeline') -def saveEventPost(base_dir: str, handle: str, postId: str, +def saveEventPost(base_dir: str, handle: str, post_id: str, eventJson: {}) -> bool: """Saves an event to the calendar and/or the events timeline If an event has extra fields, as per Mobilizon, @@ -125,14 +125,14 @@ def saveEventPost(base_dir: str, handle: str, postId: str, # Does this event post already exist within the calendar month? if os.path.isfile(calendarFilename): - if postId in open(calendarFilename).read(): + if post_id in open(calendarFilename).read(): # Event post already exists return False # append the post Id to the file for the calendar month try: with open(calendarFilename, 'a+') as calendarFile: - calendarFile.write(postId + '\n') + calendarFile.write(post_id + '\n') except OSError: print('EX: unable to append ' + calendarFilename) @@ -203,9 +203,9 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str, calendarPostIds = [] recreateEventsFile = False with open(calendarFilename, 'r') as eventsFile: - for postId in eventsFile: - postId = postId.replace('\n', '').replace('\r', '') - postFilename = locatePost(base_dir, nickname, domain, postId) + for post_id in eventsFile: + post_id = post_id.replace('\n', '').replace('\r', '') + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: recreateEventsFile = True continue @@ -233,11 +233,11 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str, int(eventTime.strftime("%m")) == monthNumber and \ int(eventTime.strftime("%d")) == dayNumber: dayOfMonth = str(int(eventTime.strftime("%d"))) - if '#statuses#' in postId: + if '#statuses#' in post_id: # link to the id so that the event can be # easily deleted - tag['postId'] = postId.split('#statuses#')[1] - tag['sender'] = postId.split('#statuses#')[0] + tag['post_id'] = post_id.split('#statuses#')[1] + tag['sender'] = post_id.split('#statuses#')[0] tag['sender'] = tag['sender'].replace('#', '/') tag['public'] = publicEvent postEvent.append(tag) @@ -245,7 +245,7 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str, # tag is a place postEvent.append(tag) if postEvent and dayOfMonth: - calendarPostIds.append(postId) + calendarPostIds.append(post_id) if not events.get(dayOfMonth): events[dayOfMonth] = [] events[dayOfMonth].append(postEvent) @@ -254,8 +254,8 @@ def getTodaysEvents(base_dir: str, nickname: str, domain: str, if recreateEventsFile: try: with open(calendarFilename, 'w+') as calendarFile: - for postId in calendarPostIds: - calendarFile.write(postId + '\n') + for post_id in calendarPostIds: + calendarFile.write(post_id + '\n') except OSError: print('EX: unable to write ' + calendarFilename) @@ -278,9 +278,9 @@ def dayEventsCheck(base_dir: str, nickname: str, domain: str, eventsExist = False with open(calendarFilename, 'r') as eventsFile: - for postId in eventsFile: - postId = postId.replace('\n', '').replace('\r', '') - postFilename = locatePost(base_dir, nickname, domain, postId) + for post_id in eventsFile: + post_id = post_id.replace('\n', '').replace('\r', '') + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: continue @@ -334,9 +334,9 @@ def getThisWeeksEvents(base_dir: str, nickname: str, domain: str) -> {}: calendarPostIds = [] recreateEventsFile = False with open(calendarFilename, 'r') as eventsFile: - for postId in eventsFile: - postId = postId.replace('\n', '').replace('\r', '') - postFilename = locatePost(base_dir, nickname, domain, postId) + for post_id in eventsFile: + post_id = post_id.replace('\n', '').replace('\r', '') + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: recreateEventsFile = True continue @@ -365,7 +365,7 @@ def getThisWeeksEvents(base_dir: str, nickname: str, domain: str) -> {}: # tag is a place postEvent.append(tag) if postEvent and weekDayIndex: - calendarPostIds.append(postId) + calendarPostIds.append(post_id) if not events.get(weekDayIndex): events[weekDayIndex] = [] events[weekDayIndex].append(postEvent) @@ -374,8 +374,8 @@ def getThisWeeksEvents(base_dir: str, nickname: str, domain: str) -> {}: if recreateEventsFile: try: with open(calendarFilename, 'w+') as calendarFile: - for postId in calendarPostIds: - calendarFile.write(postId + '\n') + for post_id in calendarPostIds: + calendarFile.write(post_id + '\n') except OSError: print('EX: unable to write ' + calendarFilename) @@ -399,9 +399,9 @@ def getCalendarEvents(base_dir: str, nickname: str, domain: str, calendarPostIds = [] recreateEventsFile = False with open(calendarFilename, 'r') as eventsFile: - for postId in eventsFile: - postId = postId.replace('\n', '').replace('\r', '') - postFilename = locatePost(base_dir, nickname, domain, postId) + for post_id in eventsFile: + post_id = post_id.replace('\n', '').replace('\r', '') + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: recreateEventsFile = True continue @@ -432,7 +432,7 @@ def getCalendarEvents(base_dir: str, nickname: str, domain: str, postEvent.append(tag) if postEvent and dayOfMonth: - calendarPostIds.append(postId) + calendarPostIds.append(post_id) if not events.get(dayOfMonth): events[dayOfMonth] = [] events[dayOfMonth].append(postEvent) @@ -441,8 +441,8 @@ def getCalendarEvents(base_dir: str, nickname: str, domain: str, if recreateEventsFile: try: with open(calendarFilename, 'w+') as calendarFile: - for postId in calendarPostIds: - calendarFile.write(postId + '\n') + for post_id in calendarPostIds: + calendarFile.write(post_id + '\n') except OSError: print('EX: unable to write ' + calendarFilename) diff --git a/inbox.py b/inbox.py index 3b98c3b86..eee088483 100644 --- a/inbox.py +++ b/inbox.py @@ -132,15 +132,15 @@ def _storeLastPostId(base_dir: str, nickname: str, domain: str, It would be great if edited posts contained a back reference id to the source but we don't live in that ideal world. """ - actor = postId = None + actor = post_id = None if has_object_dict(post_json_object): if post_json_object['object'].get('attributedTo'): if isinstance(post_json_object['object']['attributedTo'], str): actor = post_json_object['object']['attributedTo'] - postId = removeIdEnding(post_json_object['object']['id']) + post_id = removeIdEnding(post_json_object['object']['id']) if not actor: actor = post_json_object['actor'] - postId = removeIdEnding(post_json_object['id']) + post_id = removeIdEnding(post_json_object['id']) if not actor: return lastpostDir = acct_dir(base_dir, nickname, domain) + '/lastpost' @@ -149,7 +149,7 @@ def _storeLastPostId(base_dir: str, nickname: str, domain: str, actorFilename = lastpostDir + '/' + actor.replace('/', '#') try: with open(actorFilename, 'w+') as fp: - fp.write(postId) + fp.write(post_id) except OSError: print('EX: Unable to write last post id to ' + actorFilename) @@ -535,16 +535,17 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str, curr_time = datetime.datetime.utcnow() - postId = None + post_id = None if post_json_object.get('id'): - postId = removeIdEnding(post_json_object['id']) + post_id = removeIdEnding(post_json_object['id']) published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") - if not postId: + if not post_id: statusNumber, published = getStatusNumber() if actor: - postId = actor + '/statuses/' + statusNumber + post_id = actor + '/statuses/' + statusNumber else: - postId = local_actor_url(http_prefix, nickname, originalDomain) + \ + post_id = \ + local_actor_url(http_prefix, nickname, originalDomain) + \ '/statuses/' + statusNumber # NOTE: don't change post_json_object['id'] before signature check @@ -553,8 +554,8 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str, handle = nickname + '@' + domain destination = base_dir + '/accounts/' + \ - handle + '/inbox/' + postId.replace('/', '#') + '.json' - filename = inbox_queueDir + '/' + postId.replace('/', '#') + '.json' + handle + '/inbox/' + post_id.replace('/', '#') + '.json' + filename = inbox_queueDir + '/' + post_id.replace('/', '#') + '.json' sharedInboxItem = False if nickname == 'inbox': @@ -572,7 +573,7 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str, newQueueItem = { 'originalId': originalPostId, - 'id': postId, + 'id': post_id, 'actor': actor, 'nickname': nickname, 'domain': domain, @@ -2662,14 +2663,14 @@ def _sendToGroupMembers(session, base_dir: str, handle: str, port: int, savePostToBox(base_dir, http_prefix, None, nickname, domain, post_json_object, 'outbox') - postId = removeIdEnding(post_json_object['object']['id']) + post_id = removeIdEnding(post_json_object['object']['id']) if debug: - print('Group announce: ' + postId) + print('Group announce: ' + post_id) announceJson = \ createAnnounce(session, base_dir, federation_list, nickname, domain, port, groupActor + '/followers', cc, - http_prefix, postId, False, False, + http_prefix, post_id, False, False, send_threads, postLog, person_cache, cached_webfingers, debug, __version__, signing_priv_key_pem) @@ -2710,7 +2711,7 @@ def _inboxUpdateCalendar(base_dir: str, handle: str, actorNickname, actorDomain): return - postId = removeIdEnding(post_json_object['id']).replace('/', '#') + post_id = removeIdEnding(post_json_object['id']).replace('/', '#') # look for events within the tags list for tagDict in post_json_object['object']['tag']: @@ -2720,7 +2721,7 @@ def _inboxUpdateCalendar(base_dir: str, handle: str, continue if not tagDict.get('startTime'): continue - saveEventPost(base_dir, handle, postId, tagDict) + saveEventPost(base_dir, handle, post_id, tagDict) def inboxUpdateIndex(boxname: str, base_dir: str, handle: str, @@ -3145,11 +3146,11 @@ def _lowFrequencyPostNotification(base_dir: str, http_prefix: str, fromDomainFull = get_full_domain(fromDomain, fromPort) if notifyWhenPersonPosts(base_dir, nickname, domain, fromNickname, fromDomainFull): - postId = removeIdEnding(jsonObj['id']) + post_id = removeIdEnding(jsonObj['id']) domFull = get_full_domain(domain, port) postLink = \ local_actor_url(http_prefix, nickname, domFull) + \ - '?notifypost=' + postId.replace('/', '-') + '?notifypost=' + post_id.replace('/', '-') _notifyPostArrival(base_dir, handle, postLink) diff --git a/newsdaemon.py b/newsdaemon.py index 05aa7cf37..a4bb76601 100644 --- a/newsdaemon.py +++ b/newsdaemon.py @@ -38,20 +38,20 @@ from inbox import storeHashTags from session import createSession -def _updateFeedsOutboxIndex(base_dir: str, domain: str, postId: str) -> None: +def _updateFeedsOutboxIndex(base_dir: str, domain: str, post_id: str) -> None: """Updates the index used for imported RSS feeds """ basePath = base_dir + '/accounts/news@' + domain indexFilename = basePath + '/outbox.index' if os.path.isfile(indexFilename): - if postId not in open(indexFilename).read(): + if post_id not in open(indexFilename).read(): try: with open(indexFilename, 'r+') as feedsFile: content = feedsFile.read() - if postId + '\n' not in content: + if post_id + '\n' not in content: feedsFile.seek(0, 0) - feedsFile.write(postId + '\n' + content) + feedsFile.write(post_id + '\n' + content) print('DEBUG: feeds post added to index') except Exception as ex: print('WARN: Failed to write entry to feeds posts index ' + @@ -59,7 +59,7 @@ def _updateFeedsOutboxIndex(base_dir: str, domain: str, postId: str) -> None: else: try: with open(indexFilename, 'w+') as feedsFile: - feedsFile.write(postId + '\n') + feedsFile.write(post_id + '\n') except OSError: print('EX: unable to write ' + indexFilename) @@ -440,7 +440,7 @@ def _newswireHashtagProcessing(session, base_dir: str, post_json_object: {}, def _createNewsMirror(base_dir: str, domain: str, - postIdNumber: str, url: str, + post_idNumber: str, url: str, max_mirrored_articles: int) -> bool: """Creates a local mirror of a news article """ @@ -472,14 +472,14 @@ def _createNewsMirror(base_dir: str, domain: str, # escape valve break - postId = indexFile.readline() - if not postId: + post_id = indexFile.readline() + if not post_id: continue - postId = postId.strip() - mirrorArticleDir = mirrorDir + '/' + postId + post_id = post_id.strip() + mirrorArticleDir = mirrorDir + '/' + post_id if os.path.isdir(mirrorArticleDir): rmtree(mirrorArticleDir, ignore_errors=False, onerror=None) - removals.append(postId) + removals.append(post_id) noOfDirs -= 1 # remove the corresponding index entries @@ -496,7 +496,7 @@ def _createNewsMirror(base_dir: str, domain: str, except OSError: print('EX: unable to write ' + mirrorIndexFilename) - mirrorArticleDir = mirrorDir + '/' + postIdNumber + mirrorArticleDir = mirrorDir + '/' + post_idNumber if os.path.isdir(mirrorArticleDir): # already mirrored return True @@ -521,13 +521,13 @@ def _createNewsMirror(base_dir: str, domain: str, if os.path.isfile(mirrorIndexFilename): try: with open(mirrorIndexFilename, 'a+') as indexFile: - indexFile.write(postIdNumber + '\n') + indexFile.write(post_idNumber + '\n') except OSError: print('EX: unable to append ' + mirrorIndexFilename) else: try: with open(mirrorIndexFilename, 'w+') as indexFile: - indexFile.write(postIdNumber + '\n') + indexFile.write(post_idNumber + '\n') except OSError: print('EX: unable to write ' + mirrorIndexFilename) @@ -676,7 +676,7 @@ def _convertRSStoActivityPub(base_dir: str, http_prefix: str, hashtags = item[6] - postId = newPostId.replace('/', '#') + post_id = newPostId.replace('/', '#') moderated = item[5] @@ -730,9 +730,9 @@ def _convertRSStoActivityPub(base_dir: str, http_prefix: str, http_prefix, domain_full, blog, translate) - clearFromPostCaches(base_dir, recentPostsCache, postId) + clearFromPostCaches(base_dir, recentPostsCache, post_id) if save_json(blog, filename): - _updateFeedsOutboxIndex(base_dir, domain, postId + '.json') + _updateFeedsOutboxIndex(base_dir, domain, post_id + '.json') # Save a file containing the time when the post arrived # this can then later be used to construct the news timeline diff --git a/outbox.py b/outbox.py index b2874a506..ed084f970 100644 --- a/outbox.py +++ b/outbox.py @@ -351,13 +351,13 @@ def postMessageToOutbox(session, translate: {}, ' is not a permitted activity type') return False if message_json.get('id'): - postId = removeIdEnding(message_json['id']) + post_id = removeIdEnding(message_json['id']) if debug: print('DEBUG: id attribute exists within POST to outbox') else: if debug: print('DEBUG: No id attribute within POST to outbox') - postId = None + post_id = None if debug: print('DEBUG: savePostToBox') if message_json['type'] != 'Upgrade': @@ -373,7 +373,7 @@ def postMessageToOutbox(session, translate: {}, savedFilename = \ savePostToBox(base_dir, http_prefix, - postId, + post_id, postToNickname, domain_full, message_json, outboxName) if not savedFilename: diff --git a/person.py b/person.py index 33e00306c..e4ea3a9de 100644 --- a/person.py +++ b/person.py @@ -1109,10 +1109,10 @@ def suspendAccount(base_dir: str, nickname: str, domain: str) -> None: def canRemovePost(base_dir: str, nickname: str, - domain: str, port: int, postId: str) -> bool: + domain: str, port: int, post_id: str) -> bool: """Returns true if the given post can be removed """ - if '/statuses/' not in postId: + if '/statuses/' not in post_id: return False domain_full = get_full_domain(domain, port) @@ -1121,7 +1121,7 @@ def canRemovePost(base_dir: str, nickname: str, adminNickname = get_config_param(base_dir, 'admin') if not adminNickname: return False - if domain_full + '/users/' + adminNickname + '/' in postId: + if domain_full + '/users/' + adminNickname + '/' in post_id: return False # is the post by a moderator? @@ -1130,7 +1130,8 @@ def canRemovePost(base_dir: str, nickname: str, with open(moderatorsFile, 'r') as f: lines = f.readlines() for moderator in lines: - if domain_full + '/users/' + moderator.strip('\n') + '/' in postId: + if domain_full + '/users/' + \ + moderator.strip('\n') + '/' in post_id: return False return True diff --git a/posts.py b/posts.py index bedac4a08..f56700cb0 100644 --- a/posts.py +++ b/posts.py @@ -905,7 +905,7 @@ def deleteAllPosts(base_dir: str, print('ERROR: deleteAllPosts ' + str(ex)) -def savePostToBox(base_dir: str, http_prefix: str, postId: str, +def savePostToBox(base_dir: str, http_prefix: str, post_id: str, nickname: str, domain: str, post_json_object: {}, boxname: str) -> str: """Saves the give json to the give box @@ -918,18 +918,18 @@ def savePostToBox(base_dir: str, http_prefix: str, postId: str, originalDomain = domain domain = remove_domain_port(domain) - if not postId: + if not post_id: statusNumber, published = getStatusNumber() - postId = \ + post_id = \ local_actor_url(http_prefix, nickname, originalDomain) + \ '/statuses/' + statusNumber - post_json_object['id'] = postId + '/activity' + post_json_object['id'] = post_id + '/activity' if has_object_dict(post_json_object): - post_json_object['object']['id'] = postId - post_json_object['object']['atomUri'] = postId + post_json_object['object']['id'] = post_id + post_json_object['object']['atomUri'] = post_id boxDir = createPersonDir(nickname, domain, base_dir, boxname) - filename = boxDir + '/' + postId.replace('/', '#') + '.json' + filename = boxDir + '/' + post_id.replace('/', '#') + '.json' save_json(post_json_object, filename) return filename @@ -969,14 +969,14 @@ def _updateHashtagsIndex(base_dir: str, tag: {}, newPostId: str) -> None: def _addSchedulePost(base_dir: str, nickname: str, domain: str, - eventDateStr: str, postId: str) -> None: + eventDateStr: str, post_id: str) -> None: """Adds a scheduled post to the index """ handle = nickname + '@' + domain scheduleIndexFilename = \ base_dir + '/accounts/' + handle + '/schedule.index' - indexStr = eventDateStr + ' ' + postId.replace('/', '#') + indexStr = eventDateStr + ' ' + post_id.replace('/', '#') if os.path.isfile(scheduleIndexFilename): if indexStr not in open(scheduleIndexFilename).read(): try: @@ -2093,8 +2093,8 @@ def createDirectMessagePost(base_dir: str, message_json['cc'] = [] message_json['object']['cc'] = [] if schedulePost: - postId = removeIdEnding(message_json['object']['id']) - savePostToBox(base_dir, http_prefix, postId, + post_id = removeIdEnding(message_json['object']['id']) + savePostToBox(base_dir, http_prefix, post_id, nickname, domain, message_json, 'scheduled') return message_json @@ -3969,8 +3969,8 @@ def archivePostsForPerson(http_prefix: str, nickname: str, domain: str, # get the existing index entries as a string newIndex = '' with open(indexFilename, 'r') as indexFile: - for postId in indexFile: - newIndex += postId + for post_id in indexFile: + newIndex += post_id indexCtr += 1 if indexCtr >= maxPostsInBox: break @@ -4535,9 +4535,9 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, if not os.path.isdir(announceCacheDir): os.mkdir(announceCacheDir) - postId = None + post_id = None if post_json_object.get('id'): - postId = removeIdEnding(post_json_object['id']) + post_id = removeIdEnding(post_json_object['id']) announceFilename = \ announceCacheDir + '/' + \ post_json_object['object'].replace('/', '#') + '.json' @@ -4611,17 +4611,17 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, print('WARN: announce json is not a dict - ' + post_json_object['object']) _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not announcedJson.get('id'): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not announcedJson.get('type'): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if announcedJson['type'] == 'Video': @@ -4633,12 +4633,12 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, announcedJson = convertedJson if '/statuses/' not in announcedJson['id']: _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not has_users_path(announcedJson['id']): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if announcedJson['type'] != 'Note' and \ @@ -4646,22 +4646,22 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, announcedJson['type'] != 'Article': # You can only announce Note or Article types _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not announcedJson.get('content'): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not announcedJson.get('published'): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not valid_post_date(announcedJson['published'], 90, debug): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if not understoodPostLanguage(base_dir, nickname, domain, @@ -4673,19 +4673,19 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, contentStr = announcedJson['content'] if dangerousMarkup(contentStr, allow_local_network_access): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if isFiltered(base_dir, nickname, domain, contentStr): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None if invalid_ciphertext(contentStr): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) print('WARN: Invalid ciphertext within announce ' + str(announcedJson)) @@ -4711,7 +4711,7 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, if announcedJson['type'] != 'Create': # Create wrap failed _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None @@ -4730,7 +4730,7 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, if isBlocked(base_dir, nickname, domain, attributedNickname, attributedDomain): _rejectAnnounce(announceFilename, - base_dir, nickname, domain, postId, + base_dir, nickname, domain, post_id, recentPostsCache) return None post_json_object = announcedJson @@ -4742,7 +4742,7 @@ def downloadAnnounce(session, base_dir: str, http_prefix: str, return None -def isMuted(base_dir: str, nickname: str, domain: str, postId: str, +def isMuted(base_dir: str, nickname: str, domain: str, post_id: str, conversationId: str) -> bool: """Returns true if the given post is muted """ @@ -4752,7 +4752,7 @@ def isMuted(base_dir: str, nickname: str, domain: str, postId: str, conversationId.replace('/', '#') + '.muted' if os.path.isfile(convMutedFilename): return True - postFilename = locatePost(base_dir, nickname, domain, postId) + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: return False if os.path.isfile(postFilename + '.muted'): @@ -5211,20 +5211,20 @@ def editedPostFilename(base_dir: str, nickname: str, domain: str, actor.replace('/', '#') if not os.path.isfile(actorFilename): return '' - postId = removeIdEnding(post_json_object['object']['id']) - lastpostId = None + post_id = removeIdEnding(post_json_object['object']['id']) + lastpost_id = None try: with open(actorFilename, 'r') as fp: - lastpostId = fp.read() + lastpost_id = fp.read() except OSError: print('EX: editedPostFilename unable to read ' + actorFilename) return '' - if not lastpostId: + if not lastpost_id: return '' - if lastpostId == postId: + if lastpost_id == post_id: return '' lastpostFilename = \ - locatePost(base_dir, nickname, domain, lastpostId, False) + locatePost(base_dir, nickname, domain, lastpost_id, False) if not lastpostFilename: return '' lastpostJson = load_json(lastpostFilename, 0) @@ -5254,11 +5254,11 @@ def editedPostFilename(base_dir: str, nickname: str, domain: str, if timeDiffSeconds > maxTimeDiffSeconds: return '' if debug: - print(postId + ' might be an edit of ' + lastpostId) + print(post_id + ' might be an edit of ' + lastpost_id) if wordsSimilarity(lastpostJson['object']['content'], post_json_object['object']['content'], 10) < 70: return '' - print(postId + ' is an edit of ' + lastpostId) + print(post_id + ' is an edit of ' + lastpost_id) return lastpostFilename diff --git a/schedule.py b/schedule.py index ec5e7eb79..5a725fc56 100644 --- a/schedule.py +++ b/schedule.py @@ -42,8 +42,8 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd, dateStr = line.split(' ')[0] if 'T' not in dateStr: continue - postId = line.split(' ', 1)[1].replace('\n', '').replace('\r', '') - postFilename = scheduleDir + postId + '.json' + post_id = line.split(' ', 1)[1].replace('\n', '').replace('\r', '') + postFilename = scheduleDir + post_id + '.json' if deleteSchedulePost: # delete extraneous scheduled posts if os.path.isfile(postFilename): @@ -89,7 +89,7 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd, if post_json_object['object'].get('published'): post_json_object['published'] = published - print('Sending scheduled post ' + postId) + print('Sending scheduled post ' + post_id) if nickname: httpd.postToNickname = nickname @@ -143,7 +143,7 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd, '/outbox/') os.rename(postFilename, outboxPostFilename) - print('Scheduled post sent ' + postId) + print('Scheduled post sent ' + post_id) indexLines.remove(line) if len(indexLines) > maxScheduledPosts: diff --git a/speaker.py b/speaker.py index 1715da6ed..217faa746 100644 --- a/speaker.py +++ b/speaker.py @@ -260,7 +260,7 @@ def _removeEmojiFromText(sayText: str) -> str: def _speakerEndpointJson(displayName: str, summary: str, content: str, sayContent: str, imageDescription: str, - links: [], gender: str, postId: str, + links: [], gender: str, post_id: str, postDM: bool, postReply: bool, followRequestsExist: bool, followRequestsList: [], @@ -277,7 +277,7 @@ def _speakerEndpointJson(displayName: str, summary: str, "published": published, "imageDescription": imageDescription, "detectedLinks": links, - "id": postId, + "id": post_id, "direct": isDirect, "replyToYou": replyToYou, "notify": { @@ -489,9 +489,9 @@ def _postToSpeakerJson(base_dir: str, http_prefix: str, content = \ translate['announces'] + ' ' + \ announcedHandle + '. ' + content - postId = None + post_id = None if post_json_object['object'].get('id'): - postId = removeIdEnding(post_json_object['object']['id']) + post_id = removeIdEnding(post_json_object['object']['id']) followRequestsExist = False followRequestsList = [] @@ -525,7 +525,7 @@ def _postToSpeakerJson(base_dir: str, http_prefix: str, return _speakerEndpointJson(speakerName, summary, content, sayContent, imageDescription, - detectedLinks, gender, postId, + detectedLinks, gender, post_id, postDM, postReply, followRequestsExist, followRequestsList, diff --git a/tests.py b/tests.py index 665700f1f..a0933415f 100644 --- a/tests.py +++ b/tests.py @@ -4232,7 +4232,8 @@ def _testReplyToPublicPost(base_dir: str) -> None: domain = 'other.site' port = 443 http_prefix = 'https' - postId = http_prefix + '://rat.site/users/ninjarodent/statuses/63746173435' + post_id = \ + http_prefix + '://rat.site/users/ninjarodent/statuses/63746173435' content = "@ninjarodent@rat.site This is a test." followersOnly = False saveToFile = False @@ -4242,7 +4243,7 @@ def _testReplyToPublicPost(base_dir: str) -> None: mediaType = None imageDescription = 'Some description' city = 'London, England' - testInReplyTo = postId + testInReplyTo = post_id testInReplyToAtomUri = None testSubject = None testSchedulePost = False diff --git a/utils.py b/utils.py index 93c3c6cc3..c8e8a4183 100644 --- a/utils.py +++ b/utils.py @@ -614,12 +614,12 @@ def removeIdEnding(idStr: str) -> str: return idStr -def removeHashFromPostId(postId: str) -> str: +def removeHashFromPostId(post_id: str) -> str: """Removes any has from a post id """ - if '#' not in postId: - return postId - return postId.split('#')[0] + if '#' not in post_id: + return post_id + return post_id.split('#')[0] def getProtocolPrefixes() -> []: @@ -1294,11 +1294,11 @@ def locateNewsArrival(base_dir: str, domain: str, def clearFromPostCaches(base_dir: str, recentPostsCache: {}, - postId: str) -> None: + post_id: str) -> None: """Clears cached html for the given post, so that edits to news will appear """ - filename = '/postcache/' + postId + '.html' + filename = '/postcache/' + post_id + '.html' for subdir, dirs, files in os.walk(base_dir + '/accounts'): for acct in dirs: if '@' not in acct: @@ -1315,14 +1315,14 @@ def clearFromPostCaches(base_dir: str, recentPostsCache: {}, str(postFilename)) # if the post is in the recent posts cache then remove it if recentPostsCache.get('index'): - if postId in recentPostsCache['index']: - recentPostsCache['index'].remove(postId) + if post_id in recentPostsCache['index']: + recentPostsCache['index'].remove(post_id) if recentPostsCache.get('json'): - if recentPostsCache['json'].get(postId): - del recentPostsCache['json'][postId] + if recentPostsCache['json'].get(post_id): + del recentPostsCache['json'][post_id] if recentPostsCache.get('html'): - if recentPostsCache['html'].get(postId): - del recentPostsCache['html'][postId] + if recentPostsCache['html'].get(post_id): + del recentPostsCache['html'][post_id] break @@ -1489,20 +1489,20 @@ def removeModerationPostFromIndex(base_dir: str, postUrl: str, debug: bool) -> None: """Removes a url from the moderation index """ - moderationIndexFile = base_dir + '/accounts/moderation.txt' - if not os.path.isfile(moderationIndexFile): + moderation_index_file = base_dir + '/accounts/moderation.txt' + if not os.path.isfile(moderation_index_file): return - postId = removeIdEnding(postUrl) - if postId in open(moderationIndexFile).read(): - with open(moderationIndexFile, 'r') as f: + post_id = removeIdEnding(postUrl) + if post_id in open(moderation_index_file).read(): + with open(moderation_index_file, 'r') as f: lines = f.readlines() - with open(moderationIndexFile, 'w+') as f: + with open(moderation_index_file, 'w+') as f: for line in lines: - if line.strip("\n").strip("\r") != postId: + if line.strip("\n").strip("\r") != post_id: f.write(line) else: if debug: - print('DEBUG: removed ' + postId + + print('DEBUG: removed ' + post_id + ' from moderation index') @@ -1579,24 +1579,24 @@ def removePostFromCache(post_json_object: {}, recentPostsCache: {}) -> None: if not recentPostsCache.get('index'): return - postId = post_json_object['id'] - if '#' in postId: - postId = postId.split('#', 1)[0] - postId = removeIdEnding(postId).replace('/', '#') - if postId not in recentPostsCache['index']: + post_id = post_json_object['id'] + if '#' in post_id: + post_id = post_id.split('#', 1)[0] + post_id = removeIdEnding(post_id).replace('/', '#') + if post_id not in recentPostsCache['index']: return if recentPostsCache.get('index'): - if postId in recentPostsCache['index']: - recentPostsCache['index'].remove(postId) + if post_id in recentPostsCache['index']: + recentPostsCache['index'].remove(post_id) if recentPostsCache.get('json'): - if recentPostsCache['json'].get(postId): - del recentPostsCache['json'][postId] + if recentPostsCache['json'].get(post_id): + del recentPostsCache['json'][post_id] if recentPostsCache.get('html'): - if recentPostsCache['html'].get(postId): - del recentPostsCache['html'][postId] + if recentPostsCache['html'].get(post_id): + del recentPostsCache['html'][post_id] def _deleteCachedHtml(base_dir: str, nickname: str, domain: str, @@ -1632,7 +1632,7 @@ def _deleteHashtagsOnPost(base_dir: str, post_json_object: {}) -> None: return # get the id of the post - postId = removeIdEnding(post_json_object['object']['id']) + post_id = removeIdEnding(post_json_object['object']['id']) for tag in post_json_object['object']['tag']: if not tag.get('type'): continue @@ -1644,7 +1644,7 @@ def _deleteHashtagsOnPost(base_dir: str, post_json_object: {}) -> None: tagIndexFilename = base_dir + '/tags/' + tag['name'][1:] + '.txt' if not os.path.isfile(tagIndexFilename): continue - # remove postId from the tag index file + # remove post_id from the tag index file lines = None with open(tagIndexFilename, 'r') as f: lines = f.readlines() @@ -1652,7 +1652,7 @@ def _deleteHashtagsOnPost(base_dir: str, post_json_object: {}) -> None: continue newlines = '' for fileLine in lines: - if postId in fileLine: + if post_id in fileLine: # skip over the deleted post continue newlines += fileLine @@ -1682,16 +1682,16 @@ def _deleteConversationPost(base_dir: str, nickname: str, domain: str, conversationDir = acct_dir(base_dir, nickname, domain) + '/conversation' conversationId = post_json_object['object']['conversation'] conversationId = conversationId.replace('/', '#') - postId = post_json_object['object']['id'] + post_id = post_json_object['object']['id'] conversationFilename = conversationDir + '/' + conversationId if not os.path.isfile(conversationFilename): return False conversationStr = '' with open(conversationFilename, 'r') as fp: conversationStr = fp.read() - if postId + '\n' not in conversationStr: + if post_id + '\n' not in conversationStr: return False - conversationStr = conversationStr.replace(postId + '\n', '') + conversationStr = conversationStr.replace(post_id + '\n', '') if conversationStr: with open(conversationFilename, 'w+') as fp: fp.write(conversationStr) @@ -1771,8 +1771,8 @@ def deletePost(base_dir: str, http_prefix: str, if has_object_dict(post_json_object): if post_json_object['object'].get('moderationStatus'): if post_json_object.get('id'): - postId = removeIdEnding(post_json_object['id']) - removeModerationPostFromIndex(base_dir, postId, debug) + post_id = removeIdEnding(post_json_object['id']) + removeModerationPostFromIndex(base_dir, post_id, debug) # remove any hashtags index entries if hasObject: @@ -2013,31 +2013,31 @@ def updateRecentPostsCache(recentPostsCache: {}, max_recent_posts: int, """ if not post_json_object.get('id'): return - postId = post_json_object['id'] - if '#' in postId: - postId = postId.split('#', 1)[0] - postId = removeIdEnding(postId).replace('/', '#') + post_id = post_json_object['id'] + if '#' in post_id: + post_id = post_id.split('#', 1)[0] + post_id = removeIdEnding(post_id).replace('/', '#') if recentPostsCache.get('index'): - if postId in recentPostsCache['index']: + if post_id in recentPostsCache['index']: return - recentPostsCache['index'].append(postId) + recentPostsCache['index'].append(post_id) post_json_object['muted'] = False - recentPostsCache['json'][postId] = json.dumps(post_json_object) - recentPostsCache['html'][postId] = htmlStr + recentPostsCache['json'][post_id] = json.dumps(post_json_object) + recentPostsCache['html'][post_id] = htmlStr while len(recentPostsCache['html'].items()) > max_recent_posts: - postId = recentPostsCache['index'][0] + post_id = recentPostsCache['index'][0] recentPostsCache['index'].pop(0) - if recentPostsCache['json'].get(postId): - del recentPostsCache['json'][postId] - if recentPostsCache['html'].get(postId): - del recentPostsCache['html'][postId] + if recentPostsCache['json'].get(post_id): + del recentPostsCache['json'][post_id] + if recentPostsCache['html'].get(post_id): + del recentPostsCache['html'][post_id] else: - recentPostsCache['index'] = [postId] + recentPostsCache['index'] = [post_id] recentPostsCache['json'] = {} recentPostsCache['html'] = {} - recentPostsCache['json'][postId] = json.dumps(post_json_object) - recentPostsCache['html'][postId] = htmlStr + recentPostsCache['json'][post_id] = json.dumps(post_json_object) + recentPostsCache['html'][post_id] = htmlStr def fileLastModified(filename: str) -> str: @@ -2555,11 +2555,11 @@ def camelCaseSplit(text: str) -> str: def rejectPostId(base_dir: str, nickname: str, domain: str, - postId: str, recentPostsCache: {}) -> None: + post_id: str, recentPostsCache: {}) -> None: """ Marks the given post as rejected, for example an announce which is too old """ - postFilename = locatePost(base_dir, nickname, domain, postId) + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: return diff --git a/webapp_calendar.py b/webapp_calendar.py index eed9b2e58..181bd03c9 100644 --- a/webapp_calendar.py +++ b/webapp_calendar.py @@ -33,7 +33,7 @@ from webapp_utils import htmlKeyboardNavigation def htmlCalendarDeleteConfirm(cssCache: {}, translate: {}, base_dir: str, path: str, http_prefix: str, - domain_full: str, postId: str, postTime: str, + domain_full: str, post_id: str, postTime: str, year: int, monthNumber: int, dayNumber: int, calling_domain: str) -> str: """Shows a screen asking to confirm the deletion of a calendar event @@ -41,7 +41,7 @@ def htmlCalendarDeleteConfirm(cssCache: {}, translate: {}, base_dir: str, nickname = getNicknameFromActor(path) actor = local_actor_url(http_prefix, nickname, domain_full) domain, port = getDomainFromActor(actor) - messageId = actor + '/statuses/' + postId + messageId = actor + '/statuses/' + post_id postFilename = locatePost(base_dir, nickname, domain, messageId) if not postFilename: @@ -137,15 +137,15 @@ def _htmlCalendarDay(person_cache: {}, cssCache: {}, translate: {}, eventTime = None eventDescription = None eventPlace = None - postId = None + post_id = None senderName = '' senderActor = None eventIsPublic = False # get the time place and description for ev in eventPost: if ev['type'] == 'Event': - if ev.get('postId'): - postId = ev['postId'] + if ev.get('post_id'): + post_id = ev['post_id'] if ev.get('startTime'): eventDate = \ datetime.strptime(ev['startTime'], @@ -184,11 +184,13 @@ def _htmlCalendarDay(person_cache: {}, cssCache: {}, translate: {}, translate['Reminder'] + ': ' + eventDescription deleteButtonStr = '' - if postId: + if post_id: deleteButtonStr = \ '\n' + \
                     translate['Delete this event'] + ' |' + \ '' + \ '' + \ '' + \
         announcesStr + '\n' + \ - ' @unattributed\n' @@ -934,13 +934,13 @@ def _announceWithDisplayNameHtml(translate: {}, announcesStr = 'announces' if translate.get(announcesStr): announcesStr = translate[announcesStr] - postId = removeIdEnding(post_json_object['object']['id']) + post_id = removeIdEnding(post_json_object['object']['id']) return ' ' + \
         announcesStr + '\n' + \ - ' ' + announceDisplayName + '\n' @@ -2251,13 +2251,13 @@ def htmlEmojiReactionPicker(cssCache: {}, reactionsJson = load_json(reactionsFilename) emojiPicksStr = '' baseUrl = '/users/' + nickname - postId = removeIdEnding(post_json_object['id']) + post_id = removeIdEnding(post_json_object['id']) for category, item in reactionsJson.items(): emojiPicksStr += '
\n' for emojiContent in item: emojiContentEncoded = urllib.parse.quote_plus(emojiContent) emojiUrl = \ - baseUrl + '?react=' + postId + \ + baseUrl + '?react=' + post_id + \ '?actor=' + post_json_object['actor'] + \ '?tl=' + boxName + \ '?page=' + str(pageNumber) + \ diff --git a/webapp_search.py b/webapp_search.py index b10806ed6..cb71630f0 100644 --- a/webapp_search.py +++ b/webapp_search.py @@ -818,20 +818,20 @@ def htmlHashtagSearch(cssCache: {}, '">\n \n' index = startIndex while index <= endIndex: - postId = lines[index].strip('\n').strip('\r') - if ' ' not in postId: - nickname = getNicknameFromActor(postId) + post_id = lines[index].strip('\n').strip('\r') + if ' ' not in post_id: + nickname = getNicknameFromActor(post_id) if not nickname: index += 1 continue else: - postFields = postId.split(' ') + postFields = post_id.split(' ') if len(postFields) != 3: index += 1 continue nickname = postFields[1] - postId = postFields[2] - postFilename = locatePost(base_dir, nickname, domain, postId) + post_id = postFields[2] + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: index += 1 continue @@ -939,24 +939,24 @@ def rssHashtagSearch(nickname: str, domain: str, port: int, hashtagFeed = \ rss2TagHeader(hashtag, http_prefix, domain_full) for index in range(len(lines)): - postId = lines[index].strip('\n').strip('\r') - if ' ' not in postId: - nickname = getNicknameFromActor(postId) + post_id = lines[index].strip('\n').strip('\r') + if ' ' not in post_id: + nickname = getNicknameFromActor(post_id) if not nickname: index += 1 if index >= maxFeedLength: break continue else: - postFields = postId.split(' ') + postFields = post_id.split(' ') if len(postFields) != 3: index += 1 if index >= maxFeedLength: break continue nickname = postFields[1] - postId = postFields[2] - postFilename = locatePost(base_dir, nickname, domain, postId) + post_id = postFields[2] + postFilename = locatePost(base_dir, nickname, domain, post_id) if not postFilename: index += 1 if index >= maxFeedLength: diff --git a/webapp_timeline.py b/webapp_timeline.py index f8920edc0..633680f4e 100644 --- a/webapp_timeline.py +++ b/webapp_timeline.py @@ -876,9 +876,9 @@ def htmlTimeline(cssCache: {}, defaultTimeline: str, # is the post in the memory cache of recent ones? currTlStr = None if boxName != 'tlmedia' and recentPostsCache.get('html'): - postId = removeIdEnding(item['id']).replace('/', '#') - if recentPostsCache['html'].get(postId): - currTlStr = recentPostsCache['html'][postId] + post_id = removeIdEnding(item['id']).replace('/', '#') + if recentPostsCache['html'].get(post_id): + currTlStr = recentPostsCache['html'][post_id] currTlStr = \ preparePostFromHtmlCache(nickname, currTlStr,