mirror of https://gitlab.com/bashrc2/epicyon
Snake case
parent
3a9b6095ac
commit
cdf7fbd8ac
6
auth.py
6
auth.py
|
@ -275,13 +275,13 @@ def recordLoginFailure(base_dir: str, ipAddress: str,
|
||||||
writeType = 'a+'
|
writeType = 'a+'
|
||||||
if not os.path.isfile(failureLog):
|
if not os.path.isfile(failureLog):
|
||||||
writeType = 'w+'
|
writeType = 'w+'
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
currTimeStr = currTime.strftime("%Y-%m-%d %H:%M:%SZ")
|
curr_timeStr = curr_time.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||||
try:
|
try:
|
||||||
with open(failureLog, writeType) as fp:
|
with open(failureLog, writeType) as fp:
|
||||||
# here we use a similar format to an ssh log, so that
|
# here we use a similar format to an ssh log, so that
|
||||||
# systems such as fail2ban can parse it
|
# systems such as fail2ban can parse it
|
||||||
fp.write(currTimeStr + ' ' +
|
fp.write(curr_timeStr + ' ' +
|
||||||
'ip-127-0-0-1 sshd[20710]: ' +
|
'ip-127-0-0-1 sshd[20710]: ' +
|
||||||
'Disconnecting invalid user epicyon ' +
|
'Disconnecting invalid user epicyon ' +
|
||||||
ipAddress + ' port 443: ' +
|
ipAddress + ' port 443: ' +
|
||||||
|
|
12
blocking.py
12
blocking.py
|
@ -272,11 +272,11 @@ def updateBlockedCache(base_dir: str,
|
||||||
blockedCacheUpdateSecs: int) -> int:
|
blockedCacheUpdateSecs: int) -> int:
|
||||||
"""Updates the cache of globally blocked domains held in memory
|
"""Updates the cache of globally blocked domains held in memory
|
||||||
"""
|
"""
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
if blockedCacheLastUpdated > currTime:
|
if blockedCacheLastUpdated > curr_time:
|
||||||
print('WARN: Cache updated in the future')
|
print('WARN: Cache updated in the future')
|
||||||
blockedCacheLastUpdated = 0
|
blockedCacheLastUpdated = 0
|
||||||
secondsSinceLastUpdate = currTime - blockedCacheLastUpdated
|
secondsSinceLastUpdate = curr_time - blockedCacheLastUpdated
|
||||||
if secondsSinceLastUpdate < blockedCacheUpdateSecs:
|
if secondsSinceLastUpdate < blockedCacheUpdateSecs:
|
||||||
return blockedCacheLastUpdated
|
return blockedCacheLastUpdated
|
||||||
globalBlockingFilename = base_dir + '/accounts/blocking.txt'
|
globalBlockingFilename = base_dir + '/accounts/blocking.txt'
|
||||||
|
@ -293,7 +293,7 @@ def updateBlockedCache(base_dir: str,
|
||||||
blockedCache += blockedLines
|
blockedCache += blockedLines
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
print('EX: unable to read ' + globalBlockingFilename + ' ' + str(ex))
|
print('EX: unable to read ' + globalBlockingFilename + ' ' + str(ex))
|
||||||
return currTime
|
return curr_time
|
||||||
|
|
||||||
|
|
||||||
def _getShortDomain(domain: str) -> str:
|
def _getShortDomain(domain: str) -> str:
|
||||||
|
@ -963,8 +963,8 @@ def broch_modeLapses(base_dir: str, lapseDays: int) -> bool:
|
||||||
return False
|
return False
|
||||||
if not modifiedDate:
|
if not modifiedDate:
|
||||||
return False
|
return False
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
daysSinceBroch = (currTime - modifiedDate).days
|
daysSinceBroch = (curr_time - modifiedDate).days
|
||||||
if daysSinceBroch >= lapseDays:
|
if daysSinceBroch >= lapseDays:
|
||||||
removed = False
|
removed = False
|
||||||
try:
|
try:
|
||||||
|
|
14
cache.py
14
cache.py
|
@ -59,10 +59,10 @@ def storePersonInCache(base_dir: str, personUrl: str,
|
||||||
# This is not an actor or person account
|
# This is not an actor or person account
|
||||||
return
|
return
|
||||||
|
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
person_cache[personUrl] = {
|
person_cache[personUrl] = {
|
||||||
"actor": personJson,
|
"actor": personJson,
|
||||||
"timestamp": currTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
"timestamp": curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
}
|
}
|
||||||
if not base_dir:
|
if not base_dir:
|
||||||
return
|
return
|
||||||
|
@ -98,9 +98,9 @@ def getPersonFromCache(base_dir: str, personUrl: str, person_cache: {},
|
||||||
if person_cache.get(personUrl):
|
if person_cache.get(personUrl):
|
||||||
if not loadedFromFile:
|
if not loadedFromFile:
|
||||||
# update the timestamp for the last time the actor was retrieved
|
# update the timestamp for the last time the actor was retrieved
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
currTimeStr = currTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
curr_timeStr = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
person_cache[personUrl]['timestamp'] = currTimeStr
|
person_cache[personUrl]['timestamp'] = curr_timeStr
|
||||||
return person_cache[personUrl]['actor']
|
return person_cache[personUrl]['actor']
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -108,12 +108,12 @@ def getPersonFromCache(base_dir: str, personUrl: str, person_cache: {},
|
||||||
def expirePersonCache(person_cache: {}):
|
def expirePersonCache(person_cache: {}):
|
||||||
"""Expires old entries from the cache in memory
|
"""Expires old entries from the cache in memory
|
||||||
"""
|
"""
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
removals = []
|
removals = []
|
||||||
for personUrl, cacheJson in person_cache.items():
|
for personUrl, cacheJson in person_cache.items():
|
||||||
cacheTime = datetime.datetime.strptime(cacheJson['timestamp'],
|
cacheTime = datetime.datetime.strptime(cacheJson['timestamp'],
|
||||||
"%Y-%m-%dT%H:%M:%SZ")
|
"%Y-%m-%dT%H:%M:%SZ")
|
||||||
daysSinceCached = (currTime - cacheTime).days
|
daysSinceCached = (curr_time - cacheTime).days
|
||||||
if daysSinceCached > 2:
|
if daysSinceCached > 2:
|
||||||
removals.append(personUrl)
|
removals.append(personUrl)
|
||||||
if len(removals) > 0:
|
if len(removals) > 0:
|
||||||
|
|
|
@ -42,8 +42,8 @@ def getHashtagCategories(base_dir: str,
|
||||||
hashtagCategories = {}
|
hashtagCategories = {}
|
||||||
|
|
||||||
if recent:
|
if recent:
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||||
recently = daysSinceEpoch - 1
|
recently = daysSinceEpoch - 1
|
||||||
|
|
||||||
for subdir, dirs, files in os.walk(base_dir + '/tags'):
|
for subdir, dirs, files in os.walk(base_dir + '/tags'):
|
||||||
|
|
14
city.py
14
city.py
|
@ -88,7 +88,7 @@ def _getDecoyCamera(decoySeed: int) -> (str, str, int):
|
||||||
return cameras[index][0], cameras[index][1], serialNumber
|
return cameras[index][0], cameras[index][1], serialNumber
|
||||||
|
|
||||||
|
|
||||||
def _getCityPulse(currTimeOfDay, decoySeed: int) -> (float, float):
|
def _getCityPulse(curr_timeOfDay, decoySeed: int) -> (float, float):
|
||||||
"""This simulates expected average patterns of movement in a city.
|
"""This simulates expected average patterns of movement in a city.
|
||||||
Jane or Joe average lives and works in the city, commuting in
|
Jane or Joe average lives and works in the city, commuting in
|
||||||
and out of the central district for work. They have a unique
|
and out of the central district for work. They have a unique
|
||||||
|
@ -101,11 +101,11 @@ def _getCityPulse(currTimeOfDay, decoySeed: int) -> (float, float):
|
||||||
variance = 3
|
variance = 3
|
||||||
busyStates = (PERSON_WORK, PERSON_SHOP, PERSON_PLAY, PERSON_PARTY)
|
busyStates = (PERSON_WORK, PERSON_SHOP, PERSON_PLAY, PERSON_PARTY)
|
||||||
dataDecoyState = PERSON_SLEEP
|
dataDecoyState = PERSON_SLEEP
|
||||||
weekday = currTimeOfDay.weekday()
|
weekday = curr_timeOfDay.weekday()
|
||||||
minHour = 7 + randint(0, variance)
|
minHour = 7 + randint(0, variance)
|
||||||
maxHour = 17 + randint(0, variance)
|
maxHour = 17 + randint(0, variance)
|
||||||
if currTimeOfDay.hour > minHour:
|
if curr_timeOfDay.hour > minHour:
|
||||||
if currTimeOfDay.hour <= maxHour:
|
if curr_timeOfDay.hour <= maxHour:
|
||||||
if weekday < 5:
|
if weekday < 5:
|
||||||
dataDecoyState = PERSON_WORK
|
dataDecoyState = PERSON_WORK
|
||||||
elif weekday == 5:
|
elif weekday == 5:
|
||||||
|
@ -177,7 +177,7 @@ def parseNogoString(nogoLine: str) -> []:
|
||||||
|
|
||||||
|
|
||||||
def spoofGeolocation(base_dir: str,
|
def spoofGeolocation(base_dir: str,
|
||||||
city: str, currTime, decoySeed: int,
|
city: str, curr_time, decoySeed: int,
|
||||||
citiesList: [],
|
citiesList: [],
|
||||||
nogoList: []) -> (float, float, str, str,
|
nogoList: []) -> (float, float, str, str,
|
||||||
str, str, int):
|
str, str, int):
|
||||||
|
@ -255,7 +255,7 @@ def spoofGeolocation(base_dir: str,
|
||||||
approxTimeZone = int(longitude / 15.0)
|
approxTimeZone = int(longitude / 15.0)
|
||||||
if longdirection == 'E':
|
if longdirection == 'E':
|
||||||
approxTimeZone = -approxTimeZone
|
approxTimeZone = -approxTimeZone
|
||||||
currTimeAdjusted = currTime - \
|
curr_timeAdjusted = curr_time - \
|
||||||
datetime.timedelta(hours=approxTimeZone)
|
datetime.timedelta(hours=approxTimeZone)
|
||||||
camMake, camModel, camSerialNumber = \
|
camMake, camModel, camSerialNumber = \
|
||||||
_getDecoyCamera(decoySeed)
|
_getDecoyCamera(decoySeed)
|
||||||
|
@ -264,7 +264,7 @@ def spoofGeolocation(base_dir: str,
|
||||||
while not validCoord:
|
while not validCoord:
|
||||||
# patterns of activity change in the city over time
|
# patterns of activity change in the city over time
|
||||||
(distanceFromCityCenter, angleRadians) = \
|
(distanceFromCityCenter, angleRadians) = \
|
||||||
_getCityPulse(currTimeAdjusted, decoySeed + seedOffset)
|
_getCityPulse(curr_timeAdjusted, decoySeed + seedOffset)
|
||||||
# The city radius value is in longitude and the reference
|
# The city radius value is in longitude and the reference
|
||||||
# is Manchester. Adjust for the radius of the chosen city.
|
# is Manchester. Adjust for the radius of the chosen city.
|
||||||
if areaKm2 > 1:
|
if areaKm2 > 1:
|
||||||
|
|
28
daemon.py
28
daemon.py
|
@ -410,28 +410,28 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
if not uaStr:
|
if not uaStr:
|
||||||
return
|
return
|
||||||
|
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
if self.server.knownCrawlers.get(uaStr):
|
if self.server.knownCrawlers.get(uaStr):
|
||||||
self.server.knownCrawlers[uaStr]['hits'] += 1
|
self.server.knownCrawlers[uaStr]['hits'] += 1
|
||||||
self.server.knownCrawlers[uaStr]['lastseen'] = currTime
|
self.server.knownCrawlers[uaStr]['lastseen'] = curr_time
|
||||||
else:
|
else:
|
||||||
self.server.knownCrawlers[uaStr] = {
|
self.server.knownCrawlers[uaStr] = {
|
||||||
"lastseen": currTime,
|
"lastseen": curr_time,
|
||||||
"hits": 1
|
"hits": 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if currTime - self.server.lastKnownCrawler >= 30:
|
if curr_time - self.server.lastKnownCrawler >= 30:
|
||||||
# remove any old observations
|
# remove any old observations
|
||||||
removeCrawlers = []
|
removeCrawlers = []
|
||||||
for ua, item in self.server.knownCrawlers.items():
|
for ua, item in self.server.knownCrawlers.items():
|
||||||
if currTime - item['lastseen'] >= 60 * 60 * 24 * 30:
|
if curr_time - item['lastseen'] >= 60 * 60 * 24 * 30:
|
||||||
removeCrawlers.append(ua)
|
removeCrawlers.append(ua)
|
||||||
for ua in removeCrawlers:
|
for ua in removeCrawlers:
|
||||||
del self.server.knownCrawlers[ua]
|
del self.server.knownCrawlers[ua]
|
||||||
# save the list of crawlers
|
# save the list of crawlers
|
||||||
saveJson(self.server.knownCrawlers,
|
saveJson(self.server.knownCrawlers,
|
||||||
self.server.base_dir + '/accounts/knownCrawlers.json')
|
self.server.base_dir + '/accounts/knownCrawlers.json')
|
||||||
self.server.lastKnownCrawler = currTime
|
self.server.lastKnownCrawler = curr_time
|
||||||
|
|
||||||
def _get_instance_url(self, callingDomain: str) -> str:
|
def _get_instance_url(self, callingDomain: str) -> str:
|
||||||
"""Returns the URL for this instance
|
"""Returns the URL for this instance
|
||||||
|
@ -13039,10 +13039,10 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
if not isModerator(base_dir, nickname):
|
if not isModerator(base_dir, nickname):
|
||||||
return False
|
return False
|
||||||
crawlersList = []
|
crawlersList = []
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
recentCrawlers = 60 * 60 * 24 * 30
|
recentCrawlers = 60 * 60 * 24 * 30
|
||||||
for uaStr, item in knownCrawlers.items():
|
for uaStr, item in knownCrawlers.items():
|
||||||
if item['lastseen'] - currTime < recentCrawlers:
|
if item['lastseen'] - curr_time < recentCrawlers:
|
||||||
hitsStr = str(item['hits']).zfill(8)
|
hitsStr = str(item['hits']).zfill(8)
|
||||||
crawlersList.append(hitsStr + ' ' + uaStr)
|
crawlersList.append(hitsStr + ' ' + uaStr)
|
||||||
crawlersList.sort(reverse=True)
|
crawlersList.sort(reverse=True)
|
||||||
|
@ -14879,16 +14879,16 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
# This busy state helps to avoid flooding
|
# This busy state helps to avoid flooding
|
||||||
# Resources which are expected to be called from a web page
|
# Resources which are expected to be called from a web page
|
||||||
# should be above this
|
# should be above this
|
||||||
currTimeGET = int(time.time() * 1000)
|
curr_timeGET = int(time.time() * 1000)
|
||||||
if self.server.GETbusy:
|
if self.server.GETbusy:
|
||||||
if currTimeGET - self.server.lastGET < 500:
|
if curr_timeGET - self.server.lastGET < 500:
|
||||||
if self.server.debug:
|
if self.server.debug:
|
||||||
print('DEBUG: GET Busy')
|
print('DEBUG: GET Busy')
|
||||||
self.send_response(429)
|
self.send_response(429)
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
return
|
return
|
||||||
self.server.GETbusy = True
|
self.server.GETbusy = True
|
||||||
self.server.lastGET = currTimeGET
|
self.server.lastGET = curr_timeGET
|
||||||
|
|
||||||
# returns after this point should set GETbusy to False
|
# returns after this point should set GETbusy to False
|
||||||
|
|
||||||
|
@ -17510,14 +17510,14 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
self._400()
|
self._400()
|
||||||
return
|
return
|
||||||
|
|
||||||
currTimePOST = int(time.time() * 1000)
|
curr_timePOST = int(time.time() * 1000)
|
||||||
if self.server.POSTbusy:
|
if self.server.POSTbusy:
|
||||||
if currTimePOST - self.server.lastPOST < 500:
|
if curr_timePOST - self.server.lastPOST < 500:
|
||||||
self.send_response(429)
|
self.send_response(429)
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
return
|
return
|
||||||
self.server.POSTbusy = True
|
self.server.POSTbusy = True
|
||||||
self.server.lastPOST = currTimePOST
|
self.server.lastPOST = curr_timePOST
|
||||||
|
|
||||||
uaStr = self._getUserAgent()
|
uaStr = self._getUserAgent()
|
||||||
|
|
||||||
|
|
|
@ -1366,7 +1366,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
_sayCommand(sayStr, sayStr, screenreader,
|
_sayCommand(sayStr, sayStr, screenreader,
|
||||||
system_language, espeak)
|
system_language, espeak)
|
||||||
|
|
||||||
currTimeline = 'inbox'
|
curr_timeline = 'inbox'
|
||||||
pageNumber = 1
|
pageNumber = 1
|
||||||
|
|
||||||
post_json_object = {}
|
post_json_object = {}
|
||||||
|
@ -1445,7 +1445,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
boxJson = c2sBoxJson(base_dir, session,
|
boxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
currTimeline, pageNumber,
|
curr_timeline, pageNumber,
|
||||||
debug, signing_priv_key_pem)
|
debug, signing_priv_key_pem)
|
||||||
|
|
||||||
followRequestsJson = \
|
followRequestsJson = \
|
||||||
|
@ -1457,7 +1457,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
debug, __version__,
|
debug, __version__,
|
||||||
signing_priv_key_pem)
|
signing_priv_key_pem)
|
||||||
|
|
||||||
if not (currTimeline == 'inbox' and pageNumber == 1):
|
if not (curr_timeline == 'inbox' and pageNumber == 1):
|
||||||
# monitor the inbox to generate notifications
|
# monitor the inbox to generate notifications
|
||||||
inboxJson = c2sBoxJson(base_dir, session,
|
inboxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
|
@ -1492,7 +1492,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
if timelineFirstId != prevTimelineFirstId:
|
if timelineFirstId != prevTimelineFirstId:
|
||||||
_desktopClearScreen()
|
_desktopClearScreen()
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
None, system_language, espeak,
|
None, system_language, espeak,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
|
@ -1535,15 +1535,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
elif commandStr.startswith('show dm'):
|
elif commandStr.startswith('show dm'):
|
||||||
pageNumber = 1
|
pageNumber = 1
|
||||||
prevTimelineFirstId = ''
|
prevTimelineFirstId = ''
|
||||||
currTimeline = 'dm'
|
curr_timeline = 'dm'
|
||||||
boxJson = c2sBoxJson(base_dir, session,
|
boxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
currTimeline, pageNumber,
|
curr_timeline, pageNumber,
|
||||||
debug, signing_priv_key_pem)
|
debug, signing_priv_key_pem)
|
||||||
if boxJson:
|
if boxJson:
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
screenreader, system_language, espeak,
|
screenreader, system_language, espeak,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
|
@ -1552,15 +1552,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
elif commandStr.startswith('show rep'):
|
elif commandStr.startswith('show rep'):
|
||||||
pageNumber = 1
|
pageNumber = 1
|
||||||
prevTimelineFirstId = ''
|
prevTimelineFirstId = ''
|
||||||
currTimeline = 'tlreplies'
|
curr_timeline = 'tlreplies'
|
||||||
boxJson = c2sBoxJson(base_dir, session,
|
boxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
currTimeline, pageNumber,
|
curr_timeline, pageNumber,
|
||||||
debug, signing_priv_key_pem)
|
debug, signing_priv_key_pem)
|
||||||
if boxJson:
|
if boxJson:
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
screenreader, system_language, espeak,
|
screenreader, system_language, espeak,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
|
@ -1570,15 +1570,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
elif commandStr.startswith('show b'):
|
elif commandStr.startswith('show b'):
|
||||||
pageNumber = 1
|
pageNumber = 1
|
||||||
prevTimelineFirstId = ''
|
prevTimelineFirstId = ''
|
||||||
currTimeline = 'tlbookmarks'
|
curr_timeline = 'tlbookmarks'
|
||||||
boxJson = c2sBoxJson(base_dir, session,
|
boxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
currTimeline, pageNumber,
|
curr_timeline, pageNumber,
|
||||||
debug, signing_priv_key_pem)
|
debug, signing_priv_key_pem)
|
||||||
if boxJson:
|
if boxJson:
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
screenreader, system_language, espeak,
|
screenreader, system_language, espeak,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
|
@ -1589,15 +1589,15 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
commandStr.startswith('show out')):
|
commandStr.startswith('show out')):
|
||||||
pageNumber = 1
|
pageNumber = 1
|
||||||
prevTimelineFirstId = ''
|
prevTimelineFirstId = ''
|
||||||
currTimeline = 'outbox'
|
curr_timeline = 'outbox'
|
||||||
boxJson = c2sBoxJson(base_dir, session,
|
boxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
currTimeline, pageNumber,
|
curr_timeline, pageNumber,
|
||||||
debug, signing_priv_key_pem)
|
debug, signing_priv_key_pem)
|
||||||
if boxJson:
|
if boxJson:
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
screenreader, system_language, espeak,
|
screenreader, system_language, espeak,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
|
@ -1606,7 +1606,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
commandStr == 'clear'):
|
commandStr == 'clear'):
|
||||||
pageNumber = 1
|
pageNumber = 1
|
||||||
prevTimelineFirstId = ''
|
prevTimelineFirstId = ''
|
||||||
currTimeline = 'inbox'
|
curr_timeline = 'inbox'
|
||||||
refreshTimeline = True
|
refreshTimeline = True
|
||||||
elif commandStr.startswith('next'):
|
elif commandStr.startswith('next'):
|
||||||
pageNumber += 1
|
pageNumber += 1
|
||||||
|
@ -1620,11 +1620,11 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
boxJson = c2sBoxJson(base_dir, session,
|
boxJson = c2sBoxJson(base_dir, session,
|
||||||
nickname, password,
|
nickname, password,
|
||||||
domain, port, http_prefix,
|
domain, port, http_prefix,
|
||||||
currTimeline, pageNumber,
|
curr_timeline, pageNumber,
|
||||||
debug, signing_priv_key_pem)
|
debug, signing_priv_key_pem)
|
||||||
if boxJson:
|
if boxJson:
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
screenreader, system_language, espeak,
|
screenreader, system_language, espeak,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
|
@ -1640,7 +1640,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
postIndex = int(postIndexStr)
|
postIndex = int(postIndexStr)
|
||||||
post_json_object = \
|
post_json_object = \
|
||||||
_readLocalBoxPost(session, nickname, domain,
|
_readLocalBoxPost(session, nickname, domain,
|
||||||
http_prefix, base_dir, currTimeline,
|
http_prefix, base_dir, curr_timeline,
|
||||||
pageNumber, postIndex, boxJson,
|
pageNumber, postIndex, boxJson,
|
||||||
system_language, screenreader,
|
system_language, screenreader,
|
||||||
espeak, translate, yourActor,
|
espeak, translate, yourActor,
|
||||||
|
@ -1663,7 +1663,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
actor_json = \
|
actor_json = \
|
||||||
_desktopShowProfile(session, nickname, domain,
|
_desktopShowProfile(session, nickname, domain,
|
||||||
http_prefix, base_dir,
|
http_prefix, base_dir,
|
||||||
currTimeline,
|
curr_timeline,
|
||||||
pageNumber, postIndex,
|
pageNumber, postIndex,
|
||||||
boxJson,
|
boxJson,
|
||||||
system_language, screenreader,
|
system_language, screenreader,
|
||||||
|
@ -1681,7 +1681,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
_desktopShowBanner()
|
_desktopShowBanner()
|
||||||
_desktopShowProfileFromHandle(session, nickname, domain,
|
_desktopShowProfileFromHandle(session, nickname, domain,
|
||||||
http_prefix, base_dir,
|
http_prefix, base_dir,
|
||||||
currTimeline,
|
curr_timeline,
|
||||||
profileHandle,
|
profileHandle,
|
||||||
system_language,
|
system_language,
|
||||||
screenreader,
|
screenreader,
|
||||||
|
@ -1703,7 +1703,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
actor_json = \
|
actor_json = \
|
||||||
_desktopShowProfile(session, nickname, domain,
|
_desktopShowProfile(session, nickname, domain,
|
||||||
http_prefix, base_dir,
|
http_prefix, base_dir,
|
||||||
currTimeline,
|
curr_timeline,
|
||||||
pageNumber, postIndex,
|
pageNumber, postIndex,
|
||||||
boxJson,
|
boxJson,
|
||||||
system_language, screenreader,
|
system_language, screenreader,
|
||||||
|
@ -2511,7 +2511,7 @@ def runDesktopClient(base_dir: str, proxy_type: str, http_prefix: str,
|
||||||
if refreshTimeline:
|
if refreshTimeline:
|
||||||
if boxJson:
|
if boxJson:
|
||||||
_desktopShowBox(indent, followRequestsJson,
|
_desktopShowBox(indent, followRequestsJson,
|
||||||
yourActor, currTimeline, boxJson,
|
yourActor, curr_timeline, boxJson,
|
||||||
translate,
|
translate,
|
||||||
screenreader, system_language,
|
screenreader, system_language,
|
||||||
espeak, pageNumber,
|
espeak, pageNumber,
|
||||||
|
|
|
@ -162,12 +162,12 @@ def signPostHeadersNew(dateStr: str, privateKeyPem: str,
|
||||||
|
|
||||||
timeFormat = "%a, %d %b %Y %H:%M:%S %Z"
|
timeFormat = "%a, %d %b %Y %H:%M:%S %Z"
|
||||||
if not dateStr:
|
if not dateStr:
|
||||||
currTime = gmtime()
|
curr_time = gmtime()
|
||||||
dateStr = strftime(timeFormat, currTime)
|
dateStr = strftime(timeFormat, curr_time)
|
||||||
else:
|
else:
|
||||||
currTime = datetime.datetime.strptime(dateStr, timeFormat)
|
curr_time = datetime.datetime.strptime(dateStr, timeFormat)
|
||||||
secondsSinceEpoch = \
|
secondsSinceEpoch = \
|
||||||
int((currTime - datetime.datetime(1970, 1, 1)).total_seconds())
|
int((curr_time - datetime.datetime(1970, 1, 1)).total_seconds())
|
||||||
keyID = local_actor_url(http_prefix, nickname, domain) + '#main-key'
|
keyID = local_actor_url(http_prefix, nickname, domain) + '#main-key'
|
||||||
if not messageBodyJsonStr:
|
if not messageBodyJsonStr:
|
||||||
headers = {
|
headers = {
|
||||||
|
|
28
inbox.py
28
inbox.py
|
@ -533,12 +533,12 @@ def savePostToInboxQueue(base_dir: str, http_prefix: str,
|
||||||
return None
|
return None
|
||||||
originalPostId = removeIdEnding(post_json_object['id'])
|
originalPostId = removeIdEnding(post_json_object['id'])
|
||||||
|
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
|
|
||||||
postId = None
|
postId = None
|
||||||
if post_json_object.get('id'):
|
if post_json_object.get('id'):
|
||||||
postId = removeIdEnding(post_json_object['id'])
|
postId = removeIdEnding(post_json_object['id'])
|
||||||
published = currTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
if not postId:
|
if not postId:
|
||||||
statusNumber, published = getStatusNumber()
|
statusNumber, published = getStatusNumber()
|
||||||
if actor:
|
if actor:
|
||||||
|
@ -2780,8 +2780,8 @@ def _updateLastSeen(base_dir: str, handle: str, actor: str) -> None:
|
||||||
if not os.path.isdir(lastSeenPath):
|
if not os.path.isdir(lastSeenPath):
|
||||||
os.mkdir(lastSeenPath)
|
os.mkdir(lastSeenPath)
|
||||||
lastSeenFilename = lastSeenPath + '/' + actor.replace('/', '#') + '.txt'
|
lastSeenFilename = lastSeenPath + '/' + actor.replace('/', '#') + '.txt'
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||||
# has the value changed?
|
# has the value changed?
|
||||||
if os.path.isfile(lastSeenFilename):
|
if os.path.isfile(lastSeenFilename):
|
||||||
with open(lastSeenFilename, 'r') as lastSeenFile:
|
with open(lastSeenFilename, 'r') as lastSeenFile:
|
||||||
|
@ -2815,12 +2815,12 @@ def _bounceDM(senderPostId: str, session, http_prefix: str,
|
||||||
# Don't send out bounce messages too frequently.
|
# Don't send out bounce messages too frequently.
|
||||||
# Otherwise an adversary could try to DoS your instance
|
# Otherwise an adversary could try to DoS your instance
|
||||||
# by continuously sending DMs to you
|
# by continuously sending DMs to you
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
if currTime - lastBounceMessage[0] < 60:
|
if curr_time - lastBounceMessage[0] < 60:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# record the last time that a bounce was generated
|
# record the last time that a bounce was generated
|
||||||
lastBounceMessage[0] = currTime
|
lastBounceMessage[0] = curr_time
|
||||||
|
|
||||||
senderNickname = sendingHandle.split('@')[0]
|
senderNickname = sendingHandle.split('@')[0]
|
||||||
group_account = False
|
group_account = False
|
||||||
|
@ -4152,15 +4152,15 @@ def runInboxQueue(recentPostsCache: {}, max_recent_posts: int,
|
||||||
_restoreQueueItems(base_dir, queue)
|
_restoreQueueItems(base_dir, queue)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
|
|
||||||
# recreate the session periodically
|
# recreate the session periodically
|
||||||
if not session or currTime - session_last_update > 21600:
|
if not session or curr_time - session_last_update > 21600:
|
||||||
print('Regenerating inbox queue session at 6hr interval')
|
print('Regenerating inbox queue session at 6hr interval')
|
||||||
session = createSession(proxy_type)
|
session = createSession(proxy_type)
|
||||||
if not session:
|
if not session:
|
||||||
continue
|
continue
|
||||||
session_last_update = currTime
|
session_last_update = curr_time
|
||||||
|
|
||||||
# oldest item first
|
# oldest item first
|
||||||
queue.sort()
|
queue.sort()
|
||||||
|
@ -4193,14 +4193,14 @@ def runInboxQueue(recentPostsCache: {}, max_recent_posts: int,
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# clear the daily quotas for maximum numbers of received posts
|
# clear the daily quotas for maximum numbers of received posts
|
||||||
if currTime - quotasLastUpdateDaily > 60 * 60 * 24:
|
if curr_time - quotasLastUpdateDaily > 60 * 60 * 24:
|
||||||
quotasDaily = {
|
quotasDaily = {
|
||||||
'domains': {},
|
'domains': {},
|
||||||
'accounts': {}
|
'accounts': {}
|
||||||
}
|
}
|
||||||
quotasLastUpdateDaily = currTime
|
quotasLastUpdateDaily = curr_time
|
||||||
|
|
||||||
if currTime - quotasLastUpdatePerMin > 60:
|
if curr_time - quotasLastUpdatePerMin > 60:
|
||||||
# clear the per minute quotas for maximum numbers of received posts
|
# clear the per minute quotas for maximum numbers of received posts
|
||||||
quotasPerMin = {
|
quotasPerMin = {
|
||||||
'domains': {},
|
'domains': {},
|
||||||
|
@ -4211,7 +4211,7 @@ def runInboxQueue(recentPostsCache: {}, max_recent_posts: int,
|
||||||
if verifyAllSigs is not None:
|
if verifyAllSigs is not None:
|
||||||
verify_all_signatures = verifyAllSigs
|
verify_all_signatures = verifyAllSigs
|
||||||
# change the last time that this was done
|
# change the last time that this was done
|
||||||
quotasLastUpdatePerMin = currTime
|
quotasLastUpdatePerMin = curr_time
|
||||||
|
|
||||||
if _inboxQuotaExceeded(queue, queueFilename,
|
if _inboxQuotaExceeded(queue, queueFilename,
|
||||||
queueJson, quotasDaily, quotasPerMin,
|
queueJson, quotasDaily, quotasPerMin,
|
||||||
|
|
14
media.py
14
media.py
|
@ -131,13 +131,13 @@ def _spoofMetaData(base_dir: str, nickname: str, domain: str,
|
||||||
|
|
||||||
if os.path.isfile('/usr/bin/exiftool'):
|
if os.path.isfile('/usr/bin/exiftool'):
|
||||||
print('Spoofing metadata in ' + outputFilename + ' using exiftool')
|
print('Spoofing metadata in ' + outputFilename + ' using exiftool')
|
||||||
currTimeAdjusted = \
|
curr_timeAdjusted = \
|
||||||
datetime.datetime.utcnow() - \
|
datetime.datetime.utcnow() - \
|
||||||
datetime.timedelta(minutes=randint(2, 120))
|
datetime.timedelta(minutes=randint(2, 120))
|
||||||
published = currTimeAdjusted.strftime("%Y:%m:%d %H:%M:%S+00:00")
|
published = curr_timeAdjusted.strftime("%Y:%m:%d %H:%M:%S+00:00")
|
||||||
(latitude, longitude, latitudeRef, longitudeRef,
|
(latitude, longitude, latitudeRef, longitudeRef,
|
||||||
camMake, camModel, camSerialNumber) = \
|
camMake, camModel, camSerialNumber) = \
|
||||||
spoofGeolocation(base_dir, spoofCity, currTimeAdjusted,
|
spoofGeolocation(base_dir, spoofCity, curr_timeAdjusted,
|
||||||
decoySeed, None, None)
|
decoySeed, None, None)
|
||||||
if os.system('exiftool -artist=@"' + nickname + '@' + domain + '" ' +
|
if os.system('exiftool -artist=@"' + nickname + '@' + domain + '" ' +
|
||||||
'-Make="' + camMake + '" ' +
|
'-Make="' + camMake + '" ' +
|
||||||
|
@ -236,8 +236,8 @@ def createMediaDirs(base_dir: str, mediaPath: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def getMediaPath() -> str:
|
def getMediaPath() -> str:
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
weeksSinceEpoch = int((currTime - datetime.datetime(1970, 1, 1)).days / 7)
|
weeksSinceEpoch = int((curr_time - datetime.datetime(1970, 1, 1)).days / 7)
|
||||||
return 'media/' + str(weeksSinceEpoch)
|
return 'media/' + str(weeksSinceEpoch)
|
||||||
|
|
||||||
|
|
||||||
|
@ -371,8 +371,8 @@ def archiveMedia(base_dir: str, archive_directory: str, maxWeeks: int) -> None:
|
||||||
if maxWeeks == 0:
|
if maxWeeks == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
weeksSinceEpoch = int((currTime - datetime.datetime(1970, 1, 1)).days/7)
|
weeksSinceEpoch = int((curr_time - datetime.datetime(1970, 1, 1)).days/7)
|
||||||
minWeek = weeksSinceEpoch - maxWeeks
|
minWeek = weeksSinceEpoch - maxWeeks
|
||||||
|
|
||||||
if archive_directory:
|
if archive_directory:
|
||||||
|
|
|
@ -655,8 +655,8 @@ def _convertRSStoActivityPub(base_dir: str, http_prefix: str,
|
||||||
blog['news'] = True
|
blog['news'] = True
|
||||||
|
|
||||||
# note the time of arrival
|
# note the time of arrival
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
blog['object']['arrived'] = currTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
blog['object']['arrived'] = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
||||||
# change the id, based upon the published time
|
# change the id, based upon the published time
|
||||||
blog['object']['replies']['id'] = idStr
|
blog['object']['replies']['id'] = idStr
|
||||||
|
|
|
@ -1321,9 +1321,9 @@ def isPersonSnoozed(base_dir: str, nickname: str, domain: str,
|
||||||
# is there a time appended?
|
# is there a time appended?
|
||||||
if snoozedTimeStr.isdigit():
|
if snoozedTimeStr.isdigit():
|
||||||
snoozedTime = int(snoozedTimeStr)
|
snoozedTime = int(snoozedTimeStr)
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
# has the snooze timed out?
|
# has the snooze timed out?
|
||||||
if int(currTime - snoozedTime) > 60 * 60 * 24:
|
if int(curr_time - snoozedTime) > 60 * 60 * 24:
|
||||||
replaceStr = line
|
replaceStr = line
|
||||||
else:
|
else:
|
||||||
replaceStr = line
|
replaceStr = line
|
||||||
|
|
4
posts.py
4
posts.py
|
@ -1925,9 +1925,9 @@ def createQuestionPost(base_dir: str,
|
||||||
message_json['object']['type'] = 'Question'
|
message_json['object']['type'] = 'Question'
|
||||||
message_json['object']['oneOf'] = []
|
message_json['object']['oneOf'] = []
|
||||||
message_json['object']['votersCount'] = 0
|
message_json['object']['votersCount'] = 0
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
daysSinceEpoch = \
|
daysSinceEpoch = \
|
||||||
int((currTime - datetime.datetime(1970, 1, 1)).days + durationDays)
|
int((curr_time - datetime.datetime(1970, 1, 1)).days + durationDays)
|
||||||
endTime = datetime.datetime(1970, 1, 1) + \
|
endTime = datetime.datetime(1970, 1, 1) + \
|
||||||
datetime.timedelta(daysSinceEpoch)
|
datetime.timedelta(daysSinceEpoch)
|
||||||
message_json['object']['endTime'] = endTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
message_json['object']['endTime'] = endTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
|
|
@ -28,8 +28,8 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd,
|
||||||
return
|
return
|
||||||
|
|
||||||
# get the current time as an int
|
# get the current time as an int
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
daysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||||
|
|
||||||
scheduleDir = base_dir + '/accounts/' + handle + '/scheduled/'
|
scheduleDir = base_dir + '/accounts/' + handle + '/scheduled/'
|
||||||
indexLines = []
|
indexLines = []
|
||||||
|
@ -64,9 +64,9 @@ def _updatePostSchedule(base_dir: str, handle: str, httpd,
|
||||||
if daysSinceEpoch < postDaysSinceEpoch:
|
if daysSinceEpoch < postDaysSinceEpoch:
|
||||||
continue
|
continue
|
||||||
if daysSinceEpoch == postDaysSinceEpoch:
|
if daysSinceEpoch == postDaysSinceEpoch:
|
||||||
if currTime.time().hour < postTime.time().hour:
|
if curr_time.time().hour < postTime.time().hour:
|
||||||
continue
|
continue
|
||||||
if currTime.time().minute < postTime.time().minute:
|
if curr_time.time().minute < postTime.time().minute:
|
||||||
continue
|
continue
|
||||||
if not os.path.isfile(postFilename):
|
if not os.path.isfile(postFilename):
|
||||||
print('WARN: schedule missing postFilename=' + postFilename)
|
print('WARN: schedule missing postFilename=' + postFilename)
|
||||||
|
|
18
shares.py
18
shares.py
|
@ -424,10 +424,10 @@ def _expireSharesForAccount(base_dir: str, nickname: str, domain: str,
|
||||||
sharesJson = loadJson(sharesFilename, 1, 2)
|
sharesJson = loadJson(sharesFilename, 1, 2)
|
||||||
if not sharesJson:
|
if not sharesJson:
|
||||||
return
|
return
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
deleteItemID = []
|
deleteItemID = []
|
||||||
for itemID, item in sharesJson.items():
|
for itemID, item in sharesJson.items():
|
||||||
if currTime > item['expire']:
|
if curr_time > item['expire']:
|
||||||
deleteItemID.append(itemID)
|
deleteItemID.append(itemID)
|
||||||
if not deleteItemID:
|
if not deleteItemID:
|
||||||
return
|
return
|
||||||
|
@ -1639,10 +1639,10 @@ def _generateNextSharesTokenUpdate(base_dir: str,
|
||||||
if nextUpdateStr:
|
if nextUpdateStr:
|
||||||
if nextUpdateStr.isdigit():
|
if nextUpdateStr.isdigit():
|
||||||
nextUpdateSec = int(nextUpdateStr)
|
nextUpdateSec = int(nextUpdateStr)
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
updated = False
|
updated = False
|
||||||
if nextUpdateSec:
|
if nextUpdateSec:
|
||||||
if currTime > nextUpdateSec:
|
if curr_time > nextUpdateSec:
|
||||||
nextUpdateDays = randint(minDays, maxDays)
|
nextUpdateDays = randint(minDays, maxDays)
|
||||||
nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays)
|
nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays)
|
||||||
nextUpdateSec += nextUpdateInterval
|
nextUpdateSec += nextUpdateInterval
|
||||||
|
@ -1650,7 +1650,7 @@ def _generateNextSharesTokenUpdate(base_dir: str,
|
||||||
else:
|
else:
|
||||||
nextUpdateDays = randint(minDays, maxDays)
|
nextUpdateDays = randint(minDays, maxDays)
|
||||||
nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays)
|
nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays)
|
||||||
nextUpdateSec = currTime + nextUpdateInterval
|
nextUpdateSec = curr_time + nextUpdateInterval
|
||||||
updated = True
|
updated = True
|
||||||
if updated:
|
if updated:
|
||||||
with open(tokenUpdateFilename, 'w+') as fp:
|
with open(tokenUpdateFilename, 'w+') as fp:
|
||||||
|
@ -1685,8 +1685,8 @@ def _regenerateSharesToken(base_dir: str, domain_full: str,
|
||||||
nextUpdateSec = int(nextUpdateStr)
|
nextUpdateSec = int(nextUpdateStr)
|
||||||
if not nextUpdateSec:
|
if not nextUpdateSec:
|
||||||
return
|
return
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
if currTime <= nextUpdateSec:
|
if curr_time <= nextUpdateSec:
|
||||||
return
|
return
|
||||||
createSharedItemFederationToken(base_dir, domain_full, True, None)
|
createSharedItemFederationToken(base_dir, domain_full, True, None)
|
||||||
_generateNextSharesTokenUpdate(base_dir, minDays, maxDays)
|
_generateNextSharesTokenUpdate(base_dir, minDays, maxDays)
|
||||||
|
@ -1768,7 +1768,7 @@ def _dfcToSharesFormat(catalogJson: {},
|
||||||
_loadDfcIds(base_dir, system_language, productType,
|
_loadDfcIds(base_dir, system_language, productType,
|
||||||
http_prefix, domain_full)
|
http_prefix, domain_full)
|
||||||
|
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
for item in catalogJson['DFC:supplies']:
|
for item in catalogJson['DFC:supplies']:
|
||||||
if not item.get('@id') or \
|
if not item.get('@id') or \
|
||||||
not item.get('@type') or \
|
not item.get('@type') or \
|
||||||
|
@ -1793,7 +1793,7 @@ def _dfcToSharesFormat(catalogJson: {},
|
||||||
expiryTimeSec = dateStringToSeconds(item['DFC:expiryDate'])
|
expiryTimeSec = dateStringToSeconds(item['DFC:expiryDate'])
|
||||||
if not expiryTimeSec:
|
if not expiryTimeSec:
|
||||||
continue
|
continue
|
||||||
if expiryTimeSec < currTime:
|
if expiryTimeSec < curr_time:
|
||||||
# has expired
|
# has expired
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
22
tests.py
22
tests.py
|
@ -5248,10 +5248,10 @@ def _testSpoofGeolocation() -> None:
|
||||||
assert not pointInNogo(testSquare, -5, -5)
|
assert not pointInNogo(testSquare, -5, -5)
|
||||||
assert not pointInNogo(testSquare, -5, 5)
|
assert not pointInNogo(testSquare, -5, 5)
|
||||||
nogoList = []
|
nogoList = []
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
decoySeed = 7634681
|
decoySeed = 7634681
|
||||||
cityRadius = 0.1
|
cityRadius = 0.1
|
||||||
coords = spoofGeolocation('', 'los angeles', currTime,
|
coords = spoofGeolocation('', 'los angeles', curr_time,
|
||||||
decoySeed, citiesList, nogoList)
|
decoySeed, citiesList, nogoList)
|
||||||
assert coords[0] >= 34.0536909 - cityRadius
|
assert coords[0] >= 34.0536909 - cityRadius
|
||||||
assert coords[0] <= 34.0536909 + cityRadius
|
assert coords[0] <= 34.0536909 + cityRadius
|
||||||
|
@ -5263,7 +5263,7 @@ def _testSpoofGeolocation() -> None:
|
||||||
assert len(coords[5]) > 4
|
assert len(coords[5]) > 4
|
||||||
assert coords[6] > 0
|
assert coords[6] > 0
|
||||||
nogoList = []
|
nogoList = []
|
||||||
coords = spoofGeolocation('', 'unknown', currTime,
|
coords = spoofGeolocation('', 'unknown', curr_time,
|
||||||
decoySeed, citiesList, nogoList)
|
decoySeed, citiesList, nogoList)
|
||||||
assert coords[0] >= 51.8744 - cityRadius
|
assert coords[0] >= 51.8744 - cityRadius
|
||||||
assert coords[0] <= 51.8744 + cityRadius
|
assert coords[0] <= 51.8744 + cityRadius
|
||||||
|
@ -5292,8 +5292,8 @@ def _testSpoofGeolocation() -> None:
|
||||||
if hour < 10:
|
if hour < 10:
|
||||||
hourStr = '0' + hourStr
|
hourStr = '0' + hourStr
|
||||||
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
||||||
currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
||||||
coords = spoofGeolocation('', 'new york, usa', currTime,
|
coords = spoofGeolocation('', 'new york, usa', curr_time,
|
||||||
decoySeed, citiesList, nogoList)
|
decoySeed, citiesList, nogoList)
|
||||||
longitude = coords[1]
|
longitude = coords[1]
|
||||||
if coords[3] == 'W':
|
if coords[3] == 'W':
|
||||||
|
@ -5329,8 +5329,8 @@ def _testSpoofGeolocation() -> None:
|
||||||
if hour < 10:
|
if hour < 10:
|
||||||
hourStr = '0' + hourStr
|
hourStr = '0' + hourStr
|
||||||
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
||||||
currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
||||||
coords = spoofGeolocation('', 'london, england', currTime,
|
coords = spoofGeolocation('', 'london, england', curr_time,
|
||||||
decoySeed, citiesList, nogoList)
|
decoySeed, citiesList, nogoList)
|
||||||
longitude = coords[1]
|
longitude = coords[1]
|
||||||
if coords[3] == 'W':
|
if coords[3] == 'W':
|
||||||
|
@ -5379,8 +5379,8 @@ def _testSpoofGeolocation() -> None:
|
||||||
if hour < 10:
|
if hour < 10:
|
||||||
hourStr = '0' + hourStr
|
hourStr = '0' + hourStr
|
||||||
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
||||||
currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
||||||
coords = spoofGeolocation('', 'SAN FRANCISCO, USA', currTime,
|
coords = spoofGeolocation('', 'SAN FRANCISCO, USA', curr_time,
|
||||||
decoySeed, citiesList, nogoList)
|
decoySeed, citiesList, nogoList)
|
||||||
longitude = coords[1]
|
longitude = coords[1]
|
||||||
if coords[3] == 'W':
|
if coords[3] == 'W':
|
||||||
|
@ -5433,8 +5433,8 @@ def _testSpoofGeolocation() -> None:
|
||||||
if hour < 10:
|
if hour < 10:
|
||||||
hourStr = '0' + hourStr
|
hourStr = '0' + hourStr
|
||||||
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
dateTimeStr = "2021-05-" + str(dayNumber) + " " + hourStr + ":14"
|
||||||
currTime = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
curr_time = datetime.datetime.strptime(dateTimeStr, "%Y-%m-%d %H:%M")
|
||||||
coords = spoofGeolocation('', 'SEATTLE, USA', currTime,
|
coords = spoofGeolocation('', 'SEATTLE, USA', curr_time,
|
||||||
decoySeed, citiesList, nogoList)
|
decoySeed, citiesList, nogoList)
|
||||||
longitude = coords[1]
|
longitude = coords[1]
|
||||||
if coords[3] == 'W':
|
if coords[3] == 'W':
|
||||||
|
|
10
threads.py
10
threads.py
|
@ -83,7 +83,7 @@ def removeDormantThreads(base_dir: str, threadsList: [], debug: bool,
|
||||||
|
|
||||||
timeoutSecs = int(timeoutMins * 60)
|
timeoutSecs = int(timeoutMins * 60)
|
||||||
dormantThreads = []
|
dormantThreads = []
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
# which threads are dormant?
|
# which threads are dormant?
|
||||||
|
@ -93,19 +93,19 @@ def removeDormantThreads(base_dir: str, threadsList: [], debug: bool,
|
||||||
|
|
||||||
if th.isStarted:
|
if th.isStarted:
|
||||||
if not th.is_alive():
|
if not th.is_alive():
|
||||||
if (currTime - th.startTime).total_seconds() > 10:
|
if (curr_time - th.startTime).total_seconds() > 10:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: ' +
|
print('DEBUG: ' +
|
||||||
'thread is not alive ten seconds after start')
|
'thread is not alive ten seconds after start')
|
||||||
removeThread = True
|
removeThread = True
|
||||||
# timeout for started threads
|
# timeout for started threads
|
||||||
if (currTime - th.startTime).total_seconds() > timeoutSecs:
|
if (curr_time - th.startTime).total_seconds() > timeoutSecs:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: started thread timed out')
|
print('DEBUG: started thread timed out')
|
||||||
removeThread = True
|
removeThread = True
|
||||||
else:
|
else:
|
||||||
# timeout for threads which havn't been started
|
# timeout for threads which havn't been started
|
||||||
if (currTime - th.startTime).total_seconds() > timeoutSecs:
|
if (curr_time - th.startTime).total_seconds() > timeoutSecs:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: unstarted thread timed out')
|
print('DEBUG: unstarted thread timed out')
|
||||||
removeThread = True
|
removeThread = True
|
||||||
|
@ -146,7 +146,7 @@ def removeDormantThreads(base_dir: str, threadsList: [], debug: bool,
|
||||||
sendLogFilename = base_dir + '/send.csv'
|
sendLogFilename = base_dir + '/send.csv'
|
||||||
try:
|
try:
|
||||||
with open(sendLogFilename, 'a+') as logFile:
|
with open(sendLogFilename, 'a+') as logFile:
|
||||||
logFile.write(currTime.strftime("%Y-%m-%dT%H:%M:%SZ") +
|
logFile.write(curr_time.strftime("%Y-%m-%dT%H:%M:%SZ") +
|
||||||
',' + str(noOfActiveThreads) +
|
',' + str(noOfActiveThreads) +
|
||||||
',' + str(len(threadsList)) + '\n')
|
',' + str(len(threadsList)) + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
|
|
52
utils.py
52
utils.py
|
@ -260,26 +260,26 @@ def is_dormant(base_dir: str, nickname: str, domain: str, actor: str,
|
||||||
"""Is the given followed actor dormant, from the standpoint
|
"""Is the given followed actor dormant, from the standpoint
|
||||||
of the given account
|
of the given account
|
||||||
"""
|
"""
|
||||||
lastSeenFilename = acct_dir(base_dir, nickname, domain) + \
|
last_seen_filename = acct_dir(base_dir, nickname, domain) + \
|
||||||
'/lastseen/' + actor.replace('/', '#') + '.txt'
|
'/lastseen/' + actor.replace('/', '#') + '.txt'
|
||||||
|
|
||||||
if not os.path.isfile(lastSeenFilename):
|
if not os.path.isfile(last_seen_filename):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
daysSinceEpochStr = None
|
days_since_epoch_str = None
|
||||||
try:
|
try:
|
||||||
with open(lastSeenFilename, 'r') as lastSeenFile:
|
with open(last_seen_filename, 'r') as last_seen_file:
|
||||||
daysSinceEpochStr = lastSeenFile.read()
|
days_since_epoch_str = last_seen_file.read()
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: failed to read last seen ' + lastSeenFilename)
|
print('EX: failed to read last seen ' + last_seen_filename)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if daysSinceEpochStr:
|
if days_since_epoch_str:
|
||||||
daysSinceEpoch = int(daysSinceEpochStr)
|
days_since_epoch = int(days_since_epoch_str)
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
currDaysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
currDaysSinceEpoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||||
timeDiffMonths = \
|
timeDiffMonths = \
|
||||||
int((currDaysSinceEpoch - daysSinceEpoch) / 30)
|
int((currDaysSinceEpoch - days_since_epoch) / 30)
|
||||||
if timeDiffMonths >= dormant_months:
|
if timeDiffMonths >= dormant_months:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -716,25 +716,25 @@ def getStatusNumber(publishedStr: str = None) -> (str, str):
|
||||||
"""Returns the status number and published date
|
"""Returns the status number and published date
|
||||||
"""
|
"""
|
||||||
if not publishedStr:
|
if not publishedStr:
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
else:
|
else:
|
||||||
currTime = \
|
curr_time = \
|
||||||
datetime.datetime.strptime(publishedStr, '%Y-%m-%dT%H:%M:%SZ')
|
datetime.datetime.strptime(publishedStr, '%Y-%m-%dT%H:%M:%SZ')
|
||||||
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||||
# status is the number of seconds since epoch
|
# status is the number of seconds since epoch
|
||||||
statusNumber = \
|
statusNumber = \
|
||||||
str(((daysSinceEpoch * 24 * 60 * 60) +
|
str(((days_since_epoch * 24 * 60 * 60) +
|
||||||
(currTime.hour * 60 * 60) +
|
(curr_time.hour * 60 * 60) +
|
||||||
(currTime.minute * 60) +
|
(curr_time.minute * 60) +
|
||||||
currTime.second) * 1000 +
|
curr_time.second) * 1000 +
|
||||||
int(currTime.microsecond / 1000))
|
int(curr_time.microsecond / 1000))
|
||||||
# See https://github.com/tootsuite/mastodon/blob/
|
# See https://github.com/tootsuite/mastodon/blob/
|
||||||
# 995f8b389a66ab76ec92d9a240de376f1fc13a38/lib/mastodon/snowflake.rb
|
# 995f8b389a66ab76ec92d9a240de376f1fc13a38/lib/mastodon/snowflake.rb
|
||||||
# use the leftover microseconds as the sequence number
|
# use the leftover microseconds as the sequence number
|
||||||
sequenceId = currTime.microsecond % 1000
|
sequenceId = curr_time.microsecond % 1000
|
||||||
# shift by 16bits "sequence data"
|
# shift by 16bits "sequence data"
|
||||||
statusNumber = str((int(statusNumber) << 16) + sequenceId)
|
statusNumber = str((int(statusNumber) << 16) + sequenceId)
|
||||||
published = currTime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
published = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||||
return statusNumber, published
|
return statusNumber, published
|
||||||
|
|
||||||
|
|
||||||
|
@ -1920,7 +1920,7 @@ def noOfActiveAccountsMonthly(base_dir: str, months: int) -> bool:
|
||||||
"""Returns the number of accounts on the system this month
|
"""Returns the number of accounts on the system this month
|
||||||
"""
|
"""
|
||||||
accountCtr = 0
|
accountCtr = 0
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
monthSeconds = int(60*60*24*30*months)
|
monthSeconds = int(60*60*24*30*months)
|
||||||
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
|
@ -1933,7 +1933,7 @@ def noOfActiveAccountsMonthly(base_dir: str, months: int) -> bool:
|
||||||
with open(lastUsedFilename, 'r') as lastUsedFile:
|
with open(lastUsedFilename, 'r') as lastUsedFile:
|
||||||
lastUsed = lastUsedFile.read()
|
lastUsed = lastUsedFile.read()
|
||||||
if lastUsed.isdigit():
|
if lastUsed.isdigit():
|
||||||
timeDiff = (currTime - int(lastUsed))
|
timeDiff = (curr_time - int(lastUsed))
|
||||||
if timeDiff < monthSeconds:
|
if timeDiff < monthSeconds:
|
||||||
accountCtr += 1
|
accountCtr += 1
|
||||||
break
|
break
|
||||||
|
@ -2518,9 +2518,9 @@ def isRecentPost(post_json_object: {}, maxDays: int) -> bool:
|
||||||
return False
|
return False
|
||||||
if not isinstance(post_json_object['object']['published'], str):
|
if not isinstance(post_json_object['object']['published'], str):
|
||||||
return False
|
return False
|
||||||
currTime = datetime.datetime.utcnow()
|
curr_time = datetime.datetime.utcnow()
|
||||||
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||||
recently = daysSinceEpoch - maxDays
|
recently = days_since_epoch - maxDays
|
||||||
|
|
||||||
publishedDateStr = post_json_object['object']['published']
|
publishedDateStr = post_json_object['object']['published']
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -65,8 +65,8 @@ def htmlHashTagSwarm(base_dir: str, actor: str, translate: {}) -> str:
|
||||||
"""Returns a tag swarm of today's hashtags
|
"""Returns a tag swarm of today's hashtags
|
||||||
"""
|
"""
|
||||||
maxTagLength = 42
|
maxTagLength = 42
|
||||||
currTime = datetime.utcnow()
|
curr_time = datetime.utcnow()
|
||||||
daysSinceEpoch = (currTime - datetime(1970, 1, 1)).days
|
daysSinceEpoch = (curr_time - datetime(1970, 1, 1)).days
|
||||||
daysSinceEpochStr = str(daysSinceEpoch) + ' '
|
daysSinceEpochStr = str(daysSinceEpoch) + ' '
|
||||||
daysSinceEpochStr2 = str(daysSinceEpoch - 1) + ' '
|
daysSinceEpochStr2 = str(daysSinceEpoch - 1) + ' '
|
||||||
recently = daysSinceEpoch - 1
|
recently = daysSinceEpoch - 1
|
||||||
|
|
|
@ -28,8 +28,8 @@ def htmlGetLoginCredentials(loginParams: str,
|
||||||
if not loginParams.startswith('username='):
|
if not loginParams.startswith('username='):
|
||||||
return None, None, None
|
return None, None, None
|
||||||
# minimum time between login attempts
|
# minimum time between login attempts
|
||||||
currTime = int(time.time())
|
curr_time = int(time.time())
|
||||||
if currTime < last_login_time+10:
|
if curr_time < last_login_time+10:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
if '&' not in loginParams:
|
if '&' not in loginParams:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
|
|
Loading…
Reference in New Issue