diff --git a/auth.py b/auth.py index b780799d2..d51bb0893 100644 --- a/auth.py +++ b/auth.py @@ -275,13 +275,13 @@ def recordLoginFailure(base_dir: str, ipAddress: str, writeType = 'a+' if not os.path.isfile(failureLog): writeType = 'w+' - currTime = datetime.datetime.utcnow() - currTimeStr = currTime.strftime("%Y-%m-%d %H:%M:%SZ") + curr_time = datetime.datetime.utcnow() + curr_timeStr = curr_time.strftime("%Y-%m-%d %H:%M:%SZ") try: with open(failureLog, writeType) as fp: # here we use a similar format to an ssh log, so that # systems such as fail2ban can parse it - fp.write(currTimeStr + ' ' + + fp.write(curr_timeStr + ' ' + 'ip-127-0-0-1 sshd[20710]: ' + 'Disconnecting invalid user epicyon ' + ipAddress + ' port 443: ' + diff --git a/blocking.py b/blocking.py index 7858116c1..dfc2d11b3 100644 --- a/blocking.py +++ b/blocking.py @@ -272,11 +272,11 @@ def updateBlockedCache(base_dir: str, blockedCacheUpdateSecs: int) -> int: """Updates the cache of globally blocked domains held in memory """ - currTime = int(time.time()) - if blockedCacheLastUpdated > currTime: + curr_time = int(time.time()) + if blockedCacheLastUpdated > curr_time: print('WARN: Cache updated in the future') blockedCacheLastUpdated = 0 - secondsSinceLastUpdate = currTime - blockedCacheLastUpdated + secondsSinceLastUpdate = curr_time - blockedCacheLastUpdated if secondsSinceLastUpdate < blockedCacheUpdateSecs: return blockedCacheLastUpdated globalBlockingFilename = base_dir + '/accounts/blocking.txt' @@ -293,7 +293,7 @@ def updateBlockedCache(base_dir: str, blockedCache += blockedLines except OSError as ex: print('EX: unable to read ' + globalBlockingFilename + ' ' + str(ex)) - return currTime + return curr_time def _getShortDomain(domain: str) -> str: @@ -963,8 +963,8 @@ def broch_modeLapses(base_dir: str, lapseDays: int) -> bool: return False if not modifiedDate: return False - currTime = datetime.datetime.utcnow() - daysSinceBroch = (currTime - modifiedDate).days + curr_time = datetime.datetime.utcnow() + daysSinceBroch = (curr_time - modifiedDate).days if daysSinceBroch >= lapseDays: removed = False try: diff --git a/cache.py b/cache.py index dc805f1b5..26e82845f 100644 --- a/cache.py +++ b/cache.py @@ -59,10 +59,10 @@ def storePersonInCache(base_dir: str, personUrl: str, # This is not an actor or person account return - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() person_cache[personUrl] = { "actor": personJson, - "timestamp": currTime.strftime("%Y-%m-%dT%H:%M:%SZ") + "timestamp": curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") } if not base_dir: return @@ -98,9 +98,9 @@ def getPersonFromCache(base_dir: str, personUrl: str, person_cache: {}, if person_cache.get(personUrl): if not loadedFromFile: # update the timestamp for the last time the actor was retrieved - currTime = datetime.datetime.utcnow() - currTimeStr = currTime.strftime("%Y-%m-%dT%H:%M:%SZ") - person_cache[personUrl]['timestamp'] = currTimeStr + curr_time = datetime.datetime.utcnow() + curr_timeStr = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") + person_cache[personUrl]['timestamp'] = curr_timeStr return person_cache[personUrl]['actor'] return None @@ -108,12 +108,12 @@ def getPersonFromCache(base_dir: str, personUrl: str, person_cache: {}, def expirePersonCache(person_cache: {}): """Expires old entries from the cache in memory """ - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() removals = [] for personUrl, cacheJson in person_cache.items(): cacheTime = datetime.datetime.strptime(cacheJson['timestamp'], "%Y-%m-%dT%H:%M:%SZ") - daysSinceCached = (currTime - cacheTime).days + daysSinceCached = (curr_time - cacheTime).days if daysSinceCached > 2: removals.append(personUrl) if len(removals) > 0: diff --git a/categories.py b/categories.py index eb141a0dc..bf1589dfc 100644 --- a/categories.py +++ b/categories.py @@ -42,8 +42,8 @@ def getHashtagCategories(base_dir: str, hashtagCategories = {} if recent: - currTime = datetime.datetime.utcnow() - daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days + curr_time = datetime.datetime.utcnow() + daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days recently = daysSinceEpoch - 1 for subdir, dirs, files in os.walk(base_dir + '/tags'): diff --git a/city.py b/city.py index f8d3f19c7..26c234a28 100644 --- a/city.py +++ b/city.py @@ -88,7 +88,7 @@ def _getDecoyCamera(decoySeed: int) -> (str, str, int): return cameras[index][0], cameras[index][1], serialNumber -def _getCityPulse(currTimeOfDay, decoySeed: int) -> (float, float): +def _getCityPulse(curr_timeOfDay, decoySeed: int) -> (float, float): """This simulates expected average patterns of movement in a city. Jane or Joe average lives and works in the city, commuting in and out of the central district for work. They have a unique @@ -101,11 +101,11 @@ def _getCityPulse(currTimeOfDay, decoySeed: int) -> (float, float): variance = 3 busyStates = (PERSON_WORK, PERSON_SHOP, PERSON_PLAY, PERSON_PARTY) dataDecoyState = PERSON_SLEEP - weekday = currTimeOfDay.weekday() + weekday = curr_timeOfDay.weekday() minHour = 7 + randint(0, variance) maxHour = 17 + randint(0, variance) - if currTimeOfDay.hour > minHour: - if currTimeOfDay.hour <= maxHour: + if curr_timeOfDay.hour > minHour: + if curr_timeOfDay.hour <= maxHour: if weekday < 5: dataDecoyState = PERSON_WORK elif weekday == 5: @@ -177,7 +177,7 @@ def parseNogoString(nogoLine: str) -> []: def spoofGeolocation(base_dir: str, - city: str, currTime, decoySeed: int, + city: str, curr_time, decoySeed: int, citiesList: [], nogoList: []) -> (float, float, str, str, str, str, int): @@ -255,7 +255,7 @@ def spoofGeolocation(base_dir: str, approxTimeZone = int(longitude / 15.0) if longdirection == 'E': approxTimeZone = -approxTimeZone - currTimeAdjusted = currTime - \ + curr_timeAdjusted = curr_time - \ datetime.timedelta(hours=approxTimeZone) camMake, camModel, camSerialNumber = \ _getDecoyCamera(decoySeed) @@ -264,7 +264,7 @@ def spoofGeolocation(base_dir: str, while not validCoord: # patterns of activity change in the city over time (distanceFromCityCenter, angleRadians) = \ - _getCityPulse(currTimeAdjusted, decoySeed + seedOffset) + _getCityPulse(curr_timeAdjusted, decoySeed + seedOffset) # The city radius value is in longitude and the reference # is Manchester. Adjust for the radius of the chosen city. if areaKm2 > 1: diff --git a/daemon.py b/daemon.py index 68bef6839..dc66fd9f6 100644 --- a/daemon.py +++ b/daemon.py @@ -410,28 +410,28 @@ class PubServer(BaseHTTPRequestHandler): if not uaStr: return - currTime = int(time.time()) + curr_time = int(time.time()) if self.server.knownCrawlers.get(uaStr): self.server.knownCrawlers[uaStr]['hits'] += 1 - self.server.knownCrawlers[uaStr]['lastseen'] = currTime + self.server.knownCrawlers[uaStr]['lastseen'] = curr_time else: self.server.knownCrawlers[uaStr] = { - "lastseen": currTime, + "lastseen": curr_time, "hits": 1 } - if currTime - self.server.lastKnownCrawler >= 30: + if curr_time - self.server.lastKnownCrawler >= 30: # remove any old observations removeCrawlers = [] for ua, item in self.server.knownCrawlers.items(): - if currTime - item['lastseen'] >= 60 * 60 * 24 * 30: + if curr_time - item['lastseen'] >= 60 * 60 * 24 * 30: removeCrawlers.append(ua) for ua in removeCrawlers: del self.server.knownCrawlers[ua] # save the list of crawlers saveJson(self.server.knownCrawlers, self.server.base_dir + '/accounts/knownCrawlers.json') - self.server.lastKnownCrawler = currTime + self.server.lastKnownCrawler = curr_time def _get_instance_url(self, callingDomain: str) -> str: """Returns the URL for this instance @@ -13039,10 +13039,10 @@ class PubServer(BaseHTTPRequestHandler): if not isModerator(base_dir, nickname): return False crawlersList = [] - currTime = int(time.time()) + curr_time = int(time.time()) recentCrawlers = 60 * 60 * 24 * 30 for uaStr, item in knownCrawlers.items(): - if item['lastseen'] - currTime < recentCrawlers: + if item['lastseen'] - curr_time < recentCrawlers: hitsStr = str(item['hits']).zfill(8) crawlersList.append(hitsStr + ' ' + uaStr) crawlersList.sort(reverse=True) @@ -14879,16 +14879,16 @@ class PubServer(BaseHTTPRequestHandler): # This busy state helps to avoid flooding # Resources which are expected to be called from a web page # should be above this - currTimeGET = int(time.time() * 1000) + curr_timeGET = int(time.time() * 1000) if self.server.GETbusy: - if currTimeGET - self.server.lastGET < 500: + if curr_timeGET - self.server.lastGET < 500: if self.server.debug: print('DEBUG: GET Busy') self.send_response(429) self.end_headers() return self.server.GETbusy = True - self.server.lastGET = currTimeGET + self.server.lastGET = curr_timeGET # returns after this point should set GETbusy to False @@ -17510,14 +17510,14 @@ class PubServer(BaseHTTPRequestHandler): self._400() return - currTimePOST = int(time.time() * 1000) + curr_timePOST = int(time.time() * 1000) if self.server.POSTbusy: - if currTimePOST - self.server.lastPOST < 500: + if curr_timePOST - self.server.lastPOST < 500: self.send_response(429) self.end_headers() return self.server.POSTbusy = True - self.server.lastPOST = currTimePOST + self.server.lastPOST = curr_timePOST uaStr = self._getUserAgent() diff --git a/desktop_client.py b/desktop_client.py index 3fd54c12f..137389974 100644 --- a/desktop_client.py +++ b/desktop_client.py @@ -1366,7 +1366,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, _sayCommand(sayStr, sayStr, screenreader, system_language, espeak) - currTimeline = 'inbox' + curr_timeline = 'inbox' pageNumber = 1 post_json_object = {} @@ -1445,7 +1445,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, boxJson = c2sBoxJson(base_dir, session, nickname, password, domain, port, http_prefix, - currTimeline, pageNumber, + curr_timeline, pageNumber, debug, signing_priv_key_pem) followRequestsJson = \ @@ -1457,7 +1457,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, debug, __version__, signing_priv_key_pem) - if not (currTimeline == 'inbox' and pageNumber == 1): + if not (curr_timeline == 'inbox' and pageNumber == 1): # monitor the inbox to generate notifications inboxJson = c2sBoxJson(base_dir, session, nickname, password, @@ -1492,7 +1492,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, if timelineFirstId != prevTimelineFirstId: _desktopClearScreen() _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, None, system_language, espeak, pageNumber, @@ -1535,15 +1535,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, elif commandStr.startswith('show dm'): pageNumber = 1 prevTimelineFirstId = '' - currTimeline = 'dm' + curr_timeline = 'dm' boxJson = c2sBoxJson(base_dir, session, nickname, password, domain, port, http_prefix, - currTimeline, pageNumber, + curr_timeline, pageNumber, debug, signing_priv_key_pem) if boxJson: _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, screenreader, system_language, espeak, pageNumber, @@ -1552,15 +1552,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, elif commandStr.startswith('show rep'): pageNumber = 1 prevTimelineFirstId = '' - currTimeline = 'tlreplies' + curr_timeline = 'tlreplies' boxJson = c2sBoxJson(base_dir, session, nickname, password, domain, port, http_prefix, - currTimeline, pageNumber, + curr_timeline, pageNumber, debug, signing_priv_key_pem) if boxJson: _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, screenreader, system_language, espeak, pageNumber, @@ -1570,15 +1570,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, elif commandStr.startswith('show b'): pageNumber = 1 prevTimelineFirstId = '' - currTimeline = 'tlbookmarks' + curr_timeline = 'tlbookmarks' boxJson = c2sBoxJson(base_dir, session, nickname, password, domain, port, http_prefix, - currTimeline, pageNumber, + curr_timeline, pageNumber, debug, signing_priv_key_pem) if boxJson: _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, screenreader, system_language, espeak, pageNumber, @@ -1589,15 +1589,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, commandStr.startswith('show out')): pageNumber = 1 prevTimelineFirstId = '' - currTimeline = 'outbox' + curr_timeline = 'outbox' boxJson = c2sBoxJson(base_dir, session, nickname, password, domain, port, http_prefix, - currTimeline, pageNumber, + curr_timeline, pageNumber, debug, signing_priv_key_pem) if boxJson: _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, screenreader, system_language, espeak, pageNumber, @@ -1606,7 +1606,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, commandStr == 'clear'): pageNumber = 1 prevTimelineFirstId = '' - currTimeline = 'inbox' + curr_timeline = 'inbox' refreshTimeline = True elif commandStr.startswith('next'): pageNumber += 1 @@ -1620,11 +1620,11 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, boxJson = c2sBoxJson(base_dir, session, nickname, password, domain, port, http_prefix, - currTimeline, pageNumber, + curr_timeline, pageNumber, debug, signing_priv_key_pem) if boxJson: _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, screenreader, system_language, espeak, pageNumber, @@ -1640,7 +1640,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, postIndex = int(postIndexStr) post_json_object = \ _readLocalBoxPost(session, nickname, domain, - http_prefix, base_dir, currTimeline, + http_prefix, base_dir, curr_timeline, pageNumber, postIndex, boxJson, system_language, screenreader, espeak, translate, yourActor, @@ -1663,7 +1663,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, actor_json = \ _desktopShowProfile(session, nickname, domain, http_prefix, base_dir, - currTimeline, + curr_timeline, pageNumber, postIndex, boxJson, system_language, screenreader, @@ -1681,7 +1681,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, _desktopShowBanner() _desktopShowProfileFromHandle(session, nickname, domain, http_prefix, base_dir, - currTimeline, + curr_timeline, profileHandle, system_language, screenreader, @@ -1703,7 +1703,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, actor_json = \ _desktopShowProfile(session, nickname, domain, http_prefix, base_dir, - currTimeline, + curr_timeline, pageNumber, postIndex, boxJson, system_language, screenreader, @@ -2511,7 +2511,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str, if refreshTimeline: if boxJson: _desktopShowBox(indent, followRequestsJson, - yourActor, currTimeline, boxJson, + yourActor, curr_timeline, boxJson, translate, screenreader, system_language, espeak, pageNumber, diff --git a/httpsig.py b/httpsig.py index 6a00a60c0..9408a2c4e 100644 --- a/httpsig.py +++ b/httpsig.py @@ -162,12 +162,12 @@ def signPostHeadersNew(dateStr: str, privateKeyPem: str, timeFormat = "%a, %d %b %Y %H:%M:%S %Z" if not dateStr: - currTime = gmtime() - dateStr = strftime(timeFormat, currTime) + curr_time = gmtime() + dateStr = strftime(timeFormat, curr_time) else: - currTime = datetime.datetime.strptime(dateStr, timeFormat) + curr_time = datetime.datetime.strptime(dateStr, timeFormat) secondsSinceEpoch = \ - int((currTime - datetime.datetime(1970, 1, 1)).total_seconds()) + int((curr_time - datetime.datetime(1970, 1, 1)).total_seconds()) keyID = local_actor_url(http_prefix, nickname, domain) + '#main-key' if not messageBodyJsonStr: headers = { diff --git a/inbox.py b/inbox.py index 4a8a11a18..fd18a40be 100644 --- a/inbox.py +++ b/inbox.py @@ -533,12 +533,12 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str, return None originalPostId = removeIdEnding(post_json_object['id']) - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() postId = None if post_json_object.get('id'): postId = removeIdEnding(post_json_object['id']) - published = currTime.strftime("%Y-%m-%dT%H:%M:%SZ") + published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") if not postId: statusNumber, published = getStatusNumber() if actor: @@ -2780,8 +2780,8 @@ def _updateLastSeen(base_dir: str, handle: str, actor: str) -> None: if not os.path.isdir(lastSeenPath): os.mkdir(lastSeenPath) lastSeenFilename = lastSeenPath + '/' + actor.replace('/', '#') + '.txt' - currTime = datetime.datetime.utcnow() - daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days + curr_time = datetime.datetime.utcnow() + daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days # has the value changed? if os.path.isfile(lastSeenFilename): with open(lastSeenFilename, 'r') as lastSeenFile: @@ -2815,12 +2815,12 @@ def _bounceDM(senderPostId: str, session, http_prefix: str, # Don't send out bounce messages too frequently. # Otherwise an adversary could try to DoS your instance # by continuously sending DMs to you - currTime = int(time.time()) - if currTime - lastBounceMessage[0] < 60: + curr_time = int(time.time()) + if curr_time - lastBounceMessage[0] < 60: return False # record the last time that a bounce was generated - lastBounceMessage[0] = currTime + lastBounceMessage[0] = curr_time senderNickname = sendingHandle.split('@')[0] group_account = False @@ -4152,15 +4152,15 @@ def runInboxQueue(recentPostsCache: {}, max_recent_posts: int, _restoreQueueItems(base_dir, queue) continue - currTime = int(time.time()) + curr_time = int(time.time()) # recreate the session periodically - if not session or currTime - session_last_update > 21600: + if not session or curr_time - session_last_update > 21600: print('Regenerating inbox queue session at 6hr interval') session = createSession(proxy_type) if not session: continue - session_last_update = currTime + session_last_update = curr_time # oldest item first queue.sort() @@ -4193,14 +4193,14 @@ def runInboxQueue(recentPostsCache: {}, max_recent_posts: int, continue # clear the daily quotas for maximum numbers of received posts - if currTime - quotasLastUpdateDaily > 60 * 60 * 24: + if curr_time - quotasLastUpdateDaily > 60 * 60 * 24: quotasDaily = { 'domains': {}, 'accounts': {} } - quotasLastUpdateDaily = currTime + quotasLastUpdateDaily = curr_time - if currTime - quotasLastUpdatePerMin > 60: + if curr_time - quotasLastUpdatePerMin > 60: # clear the per minute quotas for maximum numbers of received posts quotasPerMin = { 'domains': {}, @@ -4211,7 +4211,7 @@ def runInboxQueue(recentPostsCache: {}, max_recent_posts: int, if verifyAllSigs is not None: verify_all_signatures = verifyAllSigs # change the last time that this was done - quotasLastUpdatePerMin = currTime + quotasLastUpdatePerMin = curr_time if _inboxQuotaExceeded(queue, queueFilename, queueJson, quotasDaily, quotasPerMin, diff --git a/media.py b/media.py index c61b69a2c..a9b8ef65f 100644 --- a/media.py +++ b/media.py @@ -131,13 +131,13 @@ def _spoofMetaData(base_dir: str, nickname: str, domain: str, if os.path.isfile('/usr/bin/exiftool'): print('Spoofing metadata in ' + outputFilename + ' using exiftool') - currTimeAdjusted = \ + curr_timeAdjusted = \ datetime.datetime.utcnow() - \ datetime.timedelta(minutes=randint(2, 120)) - published = currTimeAdjusted.strftime("%Y:%m:%d %H:%M:%S+00:00") + published = curr_timeAdjusted.strftime("%Y:%m:%d %H:%M:%S+00:00") (latitude, longitude, latitudeRef, longitudeRef, camMake, camModel, camSerialNumber) = \ - spoofGeolocation(base_dir, spoofCity, currTimeAdjusted, + spoofGeolocation(base_dir, spoofCity, curr_timeAdjusted, decoySeed, None, None) if os.system('exiftool -artist=@"' + nickname + '@' + domain + '" ' + '-Make="' + camMake + '" ' + @@ -236,8 +236,8 @@ def createMediaDirs(base_dir: str, mediaPath: str) -> None: def getMediaPath() -> str: - currTime = datetime.datetime.utcnow() - weeksSinceEpoch = int((currTime - datetime.datetime(1970, 1, 1)).days / 7) + curr_time = datetime.datetime.utcnow() + weeksSinceEpoch = int((curr_time - datetime.datetime(1970, 1, 1)).days / 7) return 'media/' + str(weeksSinceEpoch) @@ -371,8 +371,8 @@ def archiveMedia(base_dir: str, archive_directory: str, maxWeeks: int) -> None: if maxWeeks == 0: return - currTime = datetime.datetime.utcnow() - weeksSinceEpoch = int((currTime - datetime.datetime(1970, 1, 1)).days/7) + curr_time = datetime.datetime.utcnow() + weeksSinceEpoch = int((curr_time - datetime.datetime(1970, 1, 1)).days/7) minWeek = weeksSinceEpoch - maxWeeks if archive_directory: diff --git a/newsdaemon.py b/newsdaemon.py index d9339a07e..adf18c617 100644 --- a/newsdaemon.py +++ b/newsdaemon.py @@ -655,8 +655,8 @@ def _convertRSStoActivityPub(base_dir: str, http_prefix: str, blog['news'] = True # note the time of arrival - currTime = datetime.datetime.utcnow() - blog['object']['arrived'] = currTime.strftime("%Y-%m-%dT%H:%M:%SZ") + curr_time = datetime.datetime.utcnow() + blog['object']['arrived'] = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") # change the id, based upon the published time blog['object']['replies']['id'] = idStr diff --git a/person.py b/person.py index e6e3f5b01..bacbe011e 100644 --- a/person.py +++ b/person.py @@ -1321,9 +1321,9 @@ def isPersonSnoozed(base_dir: str, nickname: str, domain: str, # is there a time appended? if snoozedTimeStr.isdigit(): snoozedTime = int(snoozedTimeStr) - currTime = int(time.time()) + curr_time = int(time.time()) # has the snooze timed out? - if int(currTime - snoozedTime) > 60 * 60 * 24: + if int(curr_time - snoozedTime) > 60 * 60 * 24: replaceStr = line else: replaceStr = line diff --git a/posts.py b/posts.py index 1f6c4ecef..6f213e090 100644 --- a/posts.py +++ b/posts.py @@ -1925,9 +1925,9 @@ def createQuestionPost(base_dir: str, message_json['object']['type'] = 'Question' message_json['object']['oneOf'] = [] message_json['object']['votersCount'] = 0 - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() daysSinceEpoch = \ - int((currTime - datetime.datetime(1970, 1, 1)).days + durationDays) + int((curr_time - datetime.datetime(1970, 1, 1)).days + durationDays) endTime = datetime.datetime(1970, 1, 1) + \ datetime.timedelta(daysSinceEpoch) message_json['object']['endTime'] = endTime.strftime("%Y-%m-%dT%H:%M:%SZ") diff --git a/schedule.py b/schedule.py index c13154111..6119e20be 100644 --- a/schedule.py +++ b/schedule.py @@ -28,8 +28,8 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd, return # get the current time as an int - currTime = datetime.datetime.utcnow() - daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days + curr_time = datetime.datetime.utcnow() + daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days scheduleDir = base_dir + '/accounts/' + handle + '/scheduled/' indexLines = [] @@ -64,9 +64,9 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd, if daysSinceEpoch < postDaysSinceEpoch: continue if daysSinceEpoch == postDaysSinceEpoch: - if currTime.time().hour < postTime.time().hour: + if curr_time.time().hour < postTime.time().hour: continue - if currTime.time().minute < postTime.time().minute: + if curr_time.time().minute < postTime.time().minute: continue if not os.path.isfile(postFilename): print('WARN: schedule missing postFilename=' + postFilename) diff --git a/shares.py b/shares.py index 59638e9d3..865a3541a 100644 --- a/shares.py +++ b/shares.py @@ -424,10 +424,10 @@ def _expireSharesForAccount(base_dir: str, nickname: str, domain: str, sharesJson = loadJson(sharesFilename, 1, 2) if not sharesJson: return - currTime = int(time.time()) + curr_time = int(time.time()) deleteItemID = [] for itemID, item in sharesJson.items(): - if currTime > item['expire']: + if curr_time > item['expire']: deleteItemID.append(itemID) if not deleteItemID: return @@ -1639,10 +1639,10 @@ def _generateNextSharesTokenUpdate(base_dir: str, if nextUpdateStr: if nextUpdateStr.isdigit(): nextUpdateSec = int(nextUpdateStr) - currTime = int(time.time()) + curr_time = int(time.time()) updated = False if nextUpdateSec: - if currTime > nextUpdateSec: + if curr_time > nextUpdateSec: nextUpdateDays = randint(minDays, maxDays) nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays) nextUpdateSec += nextUpdateInterval @@ -1650,7 +1650,7 @@ def _generateNextSharesTokenUpdate(base_dir: str, else: nextUpdateDays = randint(minDays, maxDays) nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays) - nextUpdateSec = currTime + nextUpdateInterval + nextUpdateSec = curr_time + nextUpdateInterval updated = True if updated: with open(tokenUpdateFilename, 'w+') as fp: @@ -1685,8 +1685,8 @@ def _regenerateSharesToken(base_dir: str, domain_full: str, nextUpdateSec = int(nextUpdateStr) if not nextUpdateSec: return - currTime = int(time.time()) - if currTime <= nextUpdateSec: + curr_time = int(time.time()) + if curr_time <= nextUpdateSec: return createSharedItemFederationToken(base_dir, domain_full, True, None) _generateNextSharesTokenUpdate(base_dir, minDays, maxDays) @@ -1768,7 +1768,7 @@ def _dfcToSharesFormat(catalogJson: {}, _loadDfcIds(base_dir, system_language, productType, http_prefix, domain_full) - currTime = int(time.time()) + curr_time = int(time.time()) for item in catalogJson['DFC:supplies']: if not item.get('@id') or \ not item.get('@type') or \ @@ -1793,7 +1793,7 @@ def _dfcToSharesFormat(catalogJson: {}, expiryTimeSec = dateStringToSeconds(item['DFC:expiryDate']) if not expiryTimeSec: continue - if expiryTimeSec < currTime: + if expiryTimeSec < curr_time: # has expired continue diff --git a/tests.py b/tests.py index 214bb9065..3503ff768 100644 --- a/tests.py +++ b/tests.py @@ -5248,10 +5248,10 @@ def _testSpoofGeolocation() -> None: assert not pointInNogo(testSquare, -5, -5) assert not pointInNogo(testSquare, -5, 5) nogoList = [] - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() decoySeed = 7634681 cityRadius = 0.1 - coords = spoofGeolocation('', 'los angeles', currTime, + coords = spoofGeolocation('', 'los angeles', curr_time, decoySeed, citiesList, nogoList) assert coords[0] >= 34.0536909 - cityRadius assert coords[0] <= 34.0536909 + cityRadius @@ -5263,7 +5263,7 @@ def _testSpoofGeolocation() -> None: assert len(coords[5]) > 4 assert coords[6] > 0 nogoList = [] - coords = spoofGeolocation('', 'unknown', currTime, + coords = spoofGeolocation('', 'unknown', curr_time, decoySeed, citiesList, nogoList) assert coords[0] >= 51.8744 - cityRadius assert coords[0] <= 51.8744 + cityRadius @@ -5292,8 +5292,8 @@ def _testSpoofGeolocation() -> None: if hour < 10: hourStr = '0' + hourStr dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14" - currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") - coords = spoofGeolocation('', 'new york, usa', currTime, + curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") + coords = spoofGeolocation('', 'new york, usa', curr_time, decoySeed, citiesList, nogoList) longitude = coords[1] if coords[3] == 'W': @@ -5329,8 +5329,8 @@ def _testSpoofGeolocation() -> None: if hour < 10: hourStr = '0' + hourStr dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14" - currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") - coords = spoofGeolocation('', 'london, england', currTime, + curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") + coords = spoofGeolocation('', 'london, england', curr_time, decoySeed, citiesList, nogoList) longitude = coords[1] if coords[3] == 'W': @@ -5379,8 +5379,8 @@ def _testSpoofGeolocation() -> None: if hour < 10: hourStr = '0' + hourStr dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14" - currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") - coords = spoofGeolocation('', 'SAN FRANCISCO, USA', currTime, + curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") + coords = spoofGeolocation('', 'SAN FRANCISCO, USA', curr_time, decoySeed, citiesList, nogoList) longitude = coords[1] if coords[3] == 'W': @@ -5433,8 +5433,8 @@ def _testSpoofGeolocation() -> None: if hour < 10: hourStr = '0' + hourStr dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14" - currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") - coords = spoofGeolocation('', 'SEATTLE, USA', currTime, + curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M") + coords = spoofGeolocation('', 'SEATTLE, USA', curr_time, decoySeed, citiesList, nogoList) longitude = coords[1] if coords[3] == 'W': diff --git a/threads.py b/threads.py index 04d3e69f2..fede4f3d5 100644 --- a/threads.py +++ b/threads.py @@ -83,7 +83,7 @@ def removeDormantThreads(base_dir: str, threadsList: [], debug: bool, timeoutSecs = int(timeoutMins * 60) dormantThreads = [] - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() changed = False # which threads are dormant? @@ -93,19 +93,19 @@ def removeDormantThreads(base_dir: str, threadsList: [], debug: bool, if th.isStarted: if not th.is_alive(): - if (currTime - th.startTime).total_seconds() > 10: + if (curr_time - th.startTime).total_seconds() > 10: if debug: print('DEBUG: ' + 'thread is not alive ten seconds after start') removeThread = True # timeout for started threads - if (currTime - th.startTime).total_seconds() > timeoutSecs: + if (curr_time - th.startTime).total_seconds() > timeoutSecs: if debug: print('DEBUG: started thread timed out') removeThread = True else: # timeout for threads which havn't been started - if (currTime - th.startTime).total_seconds() > timeoutSecs: + if (curr_time - th.startTime).total_seconds() > timeoutSecs: if debug: print('DEBUG: unstarted thread timed out') removeThread = True @@ -146,7 +146,7 @@ def removeDormantThreads(base_dir: str, threadsList: [], debug: bool, sendLogFilename = base_dir + '/send.csv' try: with open(sendLogFilename, 'a+') as logFile: - logFile.write(currTime.strftime("%Y-%m-%dT%H:%M:%SZ") + + logFile.write(curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") + ',' + str(noOfActiveThreads) + ',' + str(len(threadsList)) + '\n') except OSError: diff --git a/utils.py b/utils.py index d689d5b63..085ae8702 100644 --- a/utils.py +++ b/utils.py @@ -260,26 +260,26 @@ def is_dormant(base_dir: str, nickname: str, domain: str, actor: str, """Is the given followed actor dormant, from the standpoint of the given account """ - lastSeenFilename = acct_dir(base_dir, nickname, domain) + \ + last_seen_filename = acct_dir(base_dir, nickname, domain) + \ '/lastseen/' + actor.replace('/', '#') + '.txt' - if not os.path.isfile(lastSeenFilename): + if not os.path.isfile(last_seen_filename): return False - daysSinceEpochStr = None + days_since_epoch_str = None try: - with open(lastSeenFilename, 'r') as lastSeenFile: - daysSinceEpochStr = lastSeenFile.read() + with open(last_seen_filename, 'r') as last_seen_file: + days_since_epoch_str = last_seen_file.read() except OSError: - print('EX: failed to read last seen ' + lastSeenFilename) + print('EX: failed to read last seen ' + last_seen_filename) return False - if daysSinceEpochStr: - daysSinceEpoch = int(daysSinceEpochStr) - currTime = datetime.datetime.utcnow() - currDaysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days + if days_since_epoch_str: + days_since_epoch = int(days_since_epoch_str) + curr_time = datetime.datetime.utcnow() + currDaysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days timeDiffMonths = \ - int((currDaysSinceEpoch - daysSinceEpoch) / 30) + int((currDaysSinceEpoch - days_since_epoch) / 30) if timeDiffMonths >= dormant_months: return True return False @@ -716,25 +716,25 @@ def getStatusNumber(publishedStr: str = None) -> (str, str): """Returns the status number and published date """ if not publishedStr: - currTime = datetime.datetime.utcnow() + curr_time = datetime.datetime.utcnow() else: - currTime = \ + curr_time = \ datetime.datetime.strptime(publishedStr, '%Y-%m-%dT%H:%M:%SZ') - daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days + days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days # status is the number of seconds since epoch statusNumber = \ - str(((daysSinceEpoch * 24 * 60 * 60) + - (currTime.hour * 60 * 60) + - (currTime.minute * 60) + - currTime.second) * 1000 + - int(currTime.microsecond / 1000)) + str(((days_since_epoch * 24 * 60 * 60) + + (curr_time.hour * 60 * 60) + + (curr_time.minute * 60) + + curr_time.second) * 1000 + + int(curr_time.microsecond / 1000)) # See https://github.com/tootsuite/mastodon/blob/ # 995f8b389a66ab76ec92d9a240de376f1fc13a38/lib/mastodon/snowflake.rb # use the leftover microseconds as the sequence number - sequenceId = currTime.microsecond % 1000 + sequenceId = curr_time.microsecond % 1000 # shift by 16bits "sequence data" statusNumber = str((int(statusNumber) << 16) + sequenceId) - published = currTime.strftime("%Y-%m-%dT%H:%M:%SZ") + published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") return statusNumber, published @@ -1920,7 +1920,7 @@ def noOfActiveAccountsMonthly(base_dir: str, months: int) -> bool: """Returns the number of accounts on the system this month """ accountCtr = 0 - currTime = int(time.time()) + curr_time = int(time.time()) monthSeconds = int(60*60*24*30*months) for subdir, dirs, files in os.walk(base_dir + '/accounts'): for account in dirs: @@ -1933,7 +1933,7 @@ def noOfActiveAccountsMonthly(base_dir: str, months: int) -> bool: with open(lastUsedFilename, 'r') as lastUsedFile: lastUsed = lastUsedFile.read() if lastUsed.isdigit(): - timeDiff = (currTime - int(lastUsed)) + timeDiff = (curr_time - int(lastUsed)) if timeDiff < monthSeconds: accountCtr += 1 break @@ -2518,9 +2518,9 @@ def isRecentPost(post_json_object: {}, maxDays: int) -> bool: return False if not isinstance(post_json_object['object']['published'], str): return False - currTime = datetime.datetime.utcnow() - daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days - recently = daysSinceEpoch - maxDays + curr_time = datetime.datetime.utcnow() + days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days + recently = days_since_epoch - maxDays publishedDateStr = post_json_object['object']['published'] try: diff --git a/webapp_hashtagswarm.py b/webapp_hashtagswarm.py index 865412d2c..fb9f8d4a4 100644 --- a/webapp_hashtagswarm.py +++ b/webapp_hashtagswarm.py @@ -65,8 +65,8 @@ def htmlHashTagSwarm(base_dir: str, actor: str, translate: {}) -> str: """Returns a tag swarm of today's hashtags """ maxTagLength = 42 - currTime = datetime.utcnow() - daysSinceEpoch = (currTime - datetime(1970, 1, 1)).days + curr_time = datetime.utcnow() + daysSinceEpoch = (curr_time - datetime(1970, 1, 1)).days daysSinceEpochStr = str(daysSinceEpoch) + ' ' daysSinceEpochStr2 = str(daysSinceEpoch - 1) + ' ' recently = daysSinceEpoch - 1 diff --git a/webapp_login.py b/webapp_login.py index 9aea8238b..1752a3254 100644 --- a/webapp_login.py +++ b/webapp_login.py @@ -28,8 +28,8 @@ def htmlGetLoginCredentials(loginParams: str, if not loginParams.startswith('username='): return None, None, None # minimum time between login attempts - currTime = int(time.time()) - if currTime < last_login_time+10: + curr_time = int(time.time()) + if curr_time < last_login_time+10: return None, None, None if '&' not in loginParams: return None, None, None