mirror of https://gitlab.com/bashrc2/epicyon
Preparing for utcnow deprecation
parent
122644688c
commit
d40fde1136
4
auth.py
4
auth.py
|
@ -12,12 +12,12 @@ import hashlib
|
|||
import binascii
|
||||
import os
|
||||
import secrets
|
||||
import datetime
|
||||
from utils import is_system_account
|
||||
from utils import is_memorial_account
|
||||
from utils import has_users_path
|
||||
from utils import text_in_file
|
||||
from utils import remove_eol
|
||||
from utils import date_utcnow
|
||||
|
||||
|
||||
def _hash_password(password: str) -> str:
|
||||
|
@ -295,7 +295,7 @@ def record_login_failure(base_dir: str, ip_address: str,
|
|||
write_type = 'a+'
|
||||
if not os.path.isfile(failure_log):
|
||||
write_type = 'w+'
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
curr_time_str = curr_time.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
try:
|
||||
with open(failure_log, write_type, encoding='utf-8') as fp_fail:
|
||||
|
|
15
blocking.py
15
blocking.py
|
@ -10,8 +10,9 @@ __module_group__ = "Core"
|
|||
import os
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime
|
||||
from session import get_json_valid
|
||||
from utils import date_from_string_format
|
||||
from utils import date_utcnow
|
||||
from utils import remove_eol
|
||||
from utils import has_object_string
|
||||
from utils import has_object_string_object
|
||||
|
@ -1540,16 +1541,12 @@ def broch_modeLapses(base_dir: str, lapseDays: int) -> bool:
|
|||
if not os.path.isfile(allow_filename):
|
||||
return False
|
||||
last_modified = file_last_modified(allow_filename)
|
||||
modified_date = None
|
||||
try:
|
||||
modified_date = \
|
||||
datetime.strptime(last_modified, "%Y-%m-%dT%H:%M:%SZ")
|
||||
except BaseException:
|
||||
modified_date = \
|
||||
date_from_string_format(last_modified, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if not modified_date:
|
||||
print('EX: broch_modeLapses date not parsed ' + str(last_modified))
|
||||
return False
|
||||
if not modified_date:
|
||||
return False
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
days_since_broch = (curr_time - modified_date).days
|
||||
if days_since_broch >= lapseDays:
|
||||
removed = False
|
||||
|
|
8
blog.py
8
blog.py
|
@ -8,7 +8,6 @@ __status__ = "Production"
|
|||
__module_group__ = "ActivityPub"
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from content import replace_emoji_from_tags
|
||||
from webapp_utils import html_header_with_external_style
|
||||
|
@ -17,6 +16,7 @@ from webapp_utils import html_footer
|
|||
from webapp_utils import get_post_attachments_as_html
|
||||
from webapp_utils import edit_text_area
|
||||
from webapp_media import add_embedded_elements
|
||||
from utils import date_from_string_format
|
||||
from utils import get_attributed_to
|
||||
from utils import remove_eol
|
||||
from utils import text_in_file
|
||||
|
@ -381,7 +381,8 @@ def _html_blog_post_rss2(domain: str, post_json_object: {},
|
|||
if post_json_object['object'].get('summary') and \
|
||||
post_json_object['object'].get('published'):
|
||||
published = post_json_object['object']['published']
|
||||
pub_date = datetime.strptime(published, "%Y-%m-%dT%H:%M:%SZ")
|
||||
pub_date = date_from_string_format(published,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
title_str = escape_text(post_json_object['object']['summary'])
|
||||
rss_date_str = pub_date.strftime("%a, %d %b %Y %H:%M:%S UT")
|
||||
content = \
|
||||
|
@ -414,7 +415,8 @@ def _html_blog_post_rss3(domain: str, post_json_object: {},
|
|||
if post_json_object['object'].get('summary') and \
|
||||
post_json_object['object'].get('published'):
|
||||
published = post_json_object['object']['published']
|
||||
pub_date = datetime.strptime(published, "%Y-%m-%dT%H:%M:%SZ")
|
||||
pub_date = date_from_string_format(published,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
title_str = post_json_object['object']['summary']
|
||||
rss_date_str = pub_date.strftime("%a, %d %b %Y %H:%M:%S UT")
|
||||
content = \
|
||||
|
|
13
cache.py
13
cache.py
|
@ -8,7 +8,6 @@ __status__ = "Production"
|
|||
__module_group__ = "Core"
|
||||
|
||||
import os
|
||||
import datetime
|
||||
from session import url_exists
|
||||
from session import get_json
|
||||
from session import get_json_valid
|
||||
|
@ -16,6 +15,8 @@ from utils import load_json
|
|||
from utils import save_json
|
||||
from utils import get_file_case_insensitive
|
||||
from utils import get_user_paths
|
||||
from utils import date_utcnow
|
||||
from utils import date_from_string_format
|
||||
|
||||
|
||||
def remove_person_from_cache(base_dir: str, person_url: str,
|
||||
|
@ -85,7 +86,7 @@ def store_person_in_cache(base_dir: str, person_url: str,
|
|||
# This is not an actor or person account
|
||||
return
|
||||
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
person_cache[person_url] = {
|
||||
"actor": person_json,
|
||||
"timestamp": curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
@ -124,7 +125,7 @@ def get_person_from_cache(base_dir: str, person_url: str,
|
|||
if person_cache.get(person_url):
|
||||
if not loaded_from_file:
|
||||
# update the timestamp for the last time the actor was retrieved
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
curr_time_str = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
person_cache[person_url]['timestamp'] = curr_time_str
|
||||
return person_cache[person_url]['actor']
|
||||
|
@ -134,11 +135,11 @@ def get_person_from_cache(base_dir: str, person_url: str,
|
|||
def expire_person_cache(person_cache: {}):
|
||||
"""Expires old entries from the cache in memory
|
||||
"""
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
removals = []
|
||||
for person_url, cache_json in person_cache.items():
|
||||
cache_time = datetime.datetime.strptime(cache_json['timestamp'],
|
||||
"%Y-%m-%dT%H:%M:%SZ")
|
||||
cache_time = date_from_string_format(cache_json['timestamp'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
days_since_cached = (curr_time - cache_time).days
|
||||
if days_since_cached > 2:
|
||||
removals.append(person_url)
|
||||
|
|
|
@ -9,6 +9,8 @@ __module_group__ = "RSS Feeds"
|
|||
|
||||
import os
|
||||
import datetime
|
||||
from utils import date_utcnow
|
||||
from utils import date_epoch
|
||||
|
||||
MAX_TAG_LENGTH = 42
|
||||
|
||||
|
@ -117,8 +119,8 @@ def get_hashtag_categories(base_dir: str,
|
|||
hashtag_categories = {}
|
||||
|
||||
if recent:
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
curr_time = date_utcnow()
|
||||
days_since_epoch = (curr_time - date_epoch()).days
|
||||
recently = days_since_epoch - 1
|
||||
|
||||
for _, _, files in os.walk(base_dir + '/tags'):
|
||||
|
@ -151,10 +153,10 @@ def get_hashtag_categories(base_dir: str,
|
|||
mod_time_since_epoc = \
|
||||
os.path.getmtime(tags_filename)
|
||||
last_modified_date = \
|
||||
datetime.datetime.fromtimestamp(mod_time_since_epoc)
|
||||
datetime.datetime.fromtimestamp(mod_time_since_epoc,
|
||||
datetime.timezone.utc)
|
||||
file_days_since_epoch = \
|
||||
(last_modified_date -
|
||||
datetime.datetime(1970, 1, 1)).days
|
||||
(last_modified_date - date_epoch()).days
|
||||
if file_days_since_epoch < recently:
|
||||
continue
|
||||
|
||||
|
|
15
daemon.py
15
daemon.py
|
@ -300,6 +300,7 @@ from languages import set_actor_languages
|
|||
from languages import get_understood_languages
|
||||
from like import update_likes_collection
|
||||
from reaction import update_reaction_collection
|
||||
from utils import date_from_string_format
|
||||
from utils import corp_servers
|
||||
from utils import get_attributed_to
|
||||
from utils import get_memorials
|
||||
|
@ -6396,8 +6397,8 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
# update newswire
|
||||
pub_date = post_json_object['object']['published']
|
||||
published_date = \
|
||||
datetime.datetime.strptime(pub_date,
|
||||
"%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(pub_date,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if self.server.newswire.get(str(published_date)):
|
||||
self.server.newswire[published_date][0] = \
|
||||
news_post_title
|
||||
|
@ -9305,7 +9306,9 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
media_file_type = media_file_mime_type(media_filename)
|
||||
|
||||
media_tm = os.path.getmtime(media_filename)
|
||||
last_modified_time = datetime.datetime.fromtimestamp(media_tm)
|
||||
last_modified_time = \
|
||||
datetime.datetime.fromtimestamp(media_tm,
|
||||
datetime.timezone.utc)
|
||||
last_modified_time_str = \
|
||||
last_modified_time.strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||
|
||||
|
@ -16743,7 +16746,8 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
return True
|
||||
|
||||
avatar_tm = os.path.getmtime(avatar_filename)
|
||||
last_modified_time = datetime.datetime.fromtimestamp(avatar_tm)
|
||||
last_modified_time = \
|
||||
datetime.datetime.fromtimestamp(avatar_tm, datetime.timezone.utc)
|
||||
last_modified_time_str = \
|
||||
last_modified_time.strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||
|
||||
|
@ -21490,7 +21494,8 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
file_length = os.path.getsize(media_filename)
|
||||
media_tm = os.path.getmtime(media_filename)
|
||||
last_modified_time = \
|
||||
datetime.datetime.fromtimestamp(media_tm)
|
||||
datetime.datetime.fromtimestamp(media_tm,
|
||||
datetime.timezone.utc)
|
||||
time_format_str = '%a, %d %b %Y %H:%M:%S GMT'
|
||||
last_modified_time_str = \
|
||||
last_modified_time.strftime(time_format_str)
|
||||
|
|
|
@ -20,6 +20,7 @@ from utils import locate_post
|
|||
from utils import delete_post
|
||||
from utils import remove_moderation_post_from_index
|
||||
from utils import local_actor_url
|
||||
from utils import date_utcnow
|
||||
from session import post_json
|
||||
from webfinger import webfinger_handle
|
||||
from auth import create_basic_auth_header
|
||||
|
@ -182,7 +183,7 @@ def remove_old_hashtags(base_dir: str, max_months: int) -> str:
|
|||
"""
|
||||
max_months = min(max_months, 11)
|
||||
max_days_since_epoch = \
|
||||
(datetime.utcnow() - datetime(1970, 1 + max_months, 1)).days
|
||||
(date_utcnow() - datetime(1970, 1 + max_months, 1)).days
|
||||
remove_hashtags = []
|
||||
|
||||
for _, _, files in os.walk(base_dir + '/tags'):
|
||||
|
@ -192,7 +193,9 @@ def remove_old_hashtags(base_dir: str, max_months: int) -> str:
|
|||
continue
|
||||
# get last modified datetime
|
||||
mod_time_since_epoc = os.path.getmtime(tags_filename)
|
||||
last_modified_date = datetime.fromtimestamp(mod_time_since_epoc)
|
||||
last_modified_date = \
|
||||
datetime.fromtimestamp(mod_time_since_epoc,
|
||||
datetime.timezone.utc)
|
||||
file_days_since_epoch = \
|
||||
(last_modified_date - datetime(1970, 1, 1)).days
|
||||
|
||||
|
|
33
happening.py
33
happening.py
|
@ -13,6 +13,7 @@ from hashlib import md5
|
|||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
from utils import date_from_string_format
|
||||
from utils import acct_handle_dir
|
||||
from utils import is_public_post
|
||||
from utils import load_json
|
||||
|
@ -103,8 +104,8 @@ def save_event_post(base_dir: str, handle: str, post_id: str,
|
|||
os.mkdir(calendar_path)
|
||||
|
||||
# get the year, month and day from the event
|
||||
event_time = datetime.strptime(event_json['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
event_time = date_from_string_format(event_json['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
event_year = int(event_time.strftime("%Y"))
|
||||
if event_year < 2020 or event_year >= 2100:
|
||||
return False
|
||||
|
@ -243,8 +244,8 @@ def _sort_todays_events(post_events_list: []) -> []:
|
|||
# only check events (not places)
|
||||
if tag['type'] == 'Event':
|
||||
event_time = \
|
||||
datetime.strptime(tag['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(tag['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
post_events_dict[event_time] = post_event
|
||||
break
|
||||
|
||||
|
@ -327,8 +328,8 @@ def get_todays_events(base_dir: str, nickname: str, domain: str,
|
|||
if not tag.get('startTime'):
|
||||
continue
|
||||
event_time = \
|
||||
datetime.strptime(tag['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(tag['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if int(event_time.strftime("%Y")) == year and \
|
||||
int(event_time.strftime("%m")) == month_number and \
|
||||
int(event_time.strftime("%d")) == day_number:
|
||||
|
@ -407,12 +408,12 @@ def _icalendar_day(base_dir: str, nickname: str, domain: str,
|
|||
post_id = evnt['id']
|
||||
if evnt.get('startTime'):
|
||||
event_start = \
|
||||
datetime.strptime(evnt['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(evnt['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if evnt.get('endTime'):
|
||||
event_end = \
|
||||
datetime.strptime(evnt['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(evnt['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if 'public' in evnt:
|
||||
if evnt['public'] is True:
|
||||
event_is_public = True
|
||||
|
@ -608,8 +609,8 @@ def day_events_check(base_dir: str, nickname: str, domain: str,
|
|||
if not tag.get('startTime'):
|
||||
continue
|
||||
event_time = \
|
||||
datetime.strptime(tag['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(tag['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if int(event_time.strftime("%d")) != day_number:
|
||||
continue
|
||||
if int(event_time.strftime("%m")) != month_number:
|
||||
|
@ -666,8 +667,8 @@ def get_this_weeks_events(base_dir: str, nickname: str, domain: str) -> {}:
|
|||
if not tag.get('startTime'):
|
||||
continue
|
||||
event_time = \
|
||||
datetime.strptime(tag['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(tag['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if now <= event_time <= end_of_week:
|
||||
week_day_index = (event_time - now).days()
|
||||
post_event.append(tag)
|
||||
|
@ -742,8 +743,8 @@ def get_calendar_events(base_dir: str, nickname: str, domain: str,
|
|||
if not tag.get('startTime'):
|
||||
continue
|
||||
event_time = \
|
||||
datetime.strptime(tag['startTime'],
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(tag['startTime'],
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if int(event_time.strftime("%Y")) == year and \
|
||||
int(event_time.strftime("%m")) == month_number:
|
||||
day_of_month = str(int(event_time.strftime("%d")))
|
||||
|
|
20
httpsig.py
20
httpsig.py
|
@ -21,11 +21,13 @@ from cryptography.hazmat.primitives import hashes
|
|||
from cryptography.hazmat.primitives.asymmetric import utils as hazutils
|
||||
import base64
|
||||
from time import gmtime, strftime
|
||||
import datetime
|
||||
from utils import get_full_domain
|
||||
from utils import get_sha_256
|
||||
from utils import get_sha_512
|
||||
from utils import local_actor_url
|
||||
from utils import date_utcnow
|
||||
from utils import date_epoch
|
||||
from utils import date_from_string_format
|
||||
|
||||
|
||||
def message_content_digest(message_body_json_str: str,
|
||||
|
@ -167,9 +169,9 @@ def sign_post_headers_new(date_str: str, private_key_pem: str,
|
|||
curr_time = gmtime()
|
||||
date_str = strftime(time_format, curr_time)
|
||||
else:
|
||||
curr_time = datetime.datetime.strptime(date_str, time_format)
|
||||
curr_time = date_from_string_format(date_str, [time_format])
|
||||
seconds_since_epoch = \
|
||||
int((curr_time - datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
int((curr_time - date_epoch()).total_seconds())
|
||||
key_id = local_actor_url(http_prefix, nickname, domain) + '#main-key'
|
||||
if not message_body_json_str:
|
||||
headers = {
|
||||
|
@ -299,19 +301,13 @@ def _verify_recent_signature(signed_date_str: str) -> bool:
|
|||
"""Checks whether the given time taken from the header is within
|
||||
12 hours of the current time
|
||||
"""
|
||||
curr_date = datetime.datetime.utcnow()
|
||||
curr_date = date_utcnow()
|
||||
formats = ("%a, %d %b %Y %H:%M:%S %Z",
|
||||
"%a, %d %b %Y %H:%M:%S %z")
|
||||
signed_date = None
|
||||
for date_format in formats:
|
||||
try:
|
||||
signed_date = \
|
||||
datetime.datetime.strptime(signed_date_str, date_format)
|
||||
except BaseException:
|
||||
continue
|
||||
break
|
||||
signed_date = date_from_string_format(signed_date_str, formats)
|
||||
if not signed_date:
|
||||
return False
|
||||
|
||||
time_diff_sec = (curr_date - signed_date).total_seconds()
|
||||
# 12 hours tollerance
|
||||
if time_diff_sec > 43200:
|
||||
|
|
15
inbox.py
15
inbox.py
|
@ -18,6 +18,9 @@ from languages import understood_post_language
|
|||
from like import update_likes_collection
|
||||
from reaction import update_reaction_collection
|
||||
from reaction import valid_emoji_content
|
||||
from utils import date_from_string_format
|
||||
from utils import date_epoch
|
||||
from utils import date_utcnow
|
||||
from utils import contains_statuses
|
||||
from utils import get_actor_from_post_id
|
||||
from utils import contains_invalid_actor_url_chars
|
||||
|
@ -295,12 +298,12 @@ def _update_cached_hashtag_swarm(base_dir: str, nickname: str, domain: str,
|
|||
modified_date = None
|
||||
try:
|
||||
modified_date = \
|
||||
datetime.datetime.strptime(last_modified, "%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(last_modified, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
except BaseException:
|
||||
print('EX: unable to parse last modified cache date ' +
|
||||
str(last_modified))
|
||||
if modified_date:
|
||||
curr_date = datetime.datetime.utcnow()
|
||||
curr_date = date_utcnow()
|
||||
time_diff = curr_date - modified_date
|
||||
diff_mins = int(time_diff.total_seconds() / 60)
|
||||
if diff_mins < 30:
|
||||
|
@ -391,7 +394,7 @@ def store_hash_tags(base_dir: str, nickname: str, domain: str,
|
|||
if not valid_hash_tag(tag_name):
|
||||
continue
|
||||
tags_filename = tags_dir + '/' + tag_name + '.txt'
|
||||
days_diff = datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)
|
||||
days_diff = date_utcnow() - date_epoch()
|
||||
days_since_epoch = days_diff.days
|
||||
tag_line = \
|
||||
str(days_since_epoch) + ' ' + nickname + ' ' + post_url + '\n'
|
||||
|
@ -804,7 +807,7 @@ def save_post_to_inbox_queue(base_dir: str, http_prefix: str,
|
|||
return None
|
||||
original_post_id = remove_id_ending(post_json_object['id'])
|
||||
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
|
||||
post_id = None
|
||||
if post_json_object.get('id'):
|
||||
|
@ -3999,8 +4002,8 @@ def _update_last_seen(base_dir: str, handle: str, actor: str) -> None:
|
|||
os.mkdir(last_seen_path)
|
||||
last_seen_filename = \
|
||||
last_seen_path + '/' + actor.replace('/', '#') + '.txt'
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
curr_time = date_utcnow()
|
||||
days_since_epoch = (curr_time - date_epoch()).days
|
||||
# has the value changed?
|
||||
if os.path.isfile(last_seen_filename):
|
||||
with open(last_seen_filename, 'r',
|
||||
|
|
|
@ -12,7 +12,6 @@ __module_group__ = "Security"
|
|||
import random
|
||||
import base64
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_public_key
|
||||
|
@ -22,6 +21,7 @@ from cryptography.hazmat.primitives.asymmetric import utils as hazutils
|
|||
from pyjsonld import normalize
|
||||
from context import has_valid_context
|
||||
from utils import get_sha_256
|
||||
from utils import date_utcnow
|
||||
|
||||
|
||||
def _options_hash(doc: {}) -> str:
|
||||
|
@ -103,7 +103,7 @@ def generate_json_signature(doc: {}, private_key_pem: str) -> None:
|
|||
"type": "RsaSignature2017",
|
||||
"nonce": '%030x' % random.randrange(16**64),
|
||||
"creator": doc["actor"] + "#main-key",
|
||||
"created": datetime.utcnow().replace(microsecond=0).isoformat() + "Z",
|
||||
"created": date_utcnow().replace(microsecond=0).isoformat() + "Z",
|
||||
}
|
||||
doc["signature"] = options
|
||||
to_be_signed = _options_hash(doc) + _doc_hash(doc)
|
||||
|
|
12
maps.py
12
maps.py
|
@ -9,7 +9,6 @@ __module_group__ = "Core"
|
|||
|
||||
|
||||
import os
|
||||
import datetime
|
||||
from utils import is_float
|
||||
from utils import acct_dir
|
||||
from utils import load_json
|
||||
|
@ -17,6 +16,9 @@ from utils import save_json
|
|||
from utils import locate_post
|
||||
from utils import remove_html
|
||||
from utils import has_object_dict
|
||||
from utils import date_utcnow
|
||||
from utils import date_epoch
|
||||
from utils import date_from_string_format
|
||||
|
||||
|
||||
def _geocoords_to_osm_link(osm_domain: str, zoom: int,
|
||||
|
@ -490,8 +492,8 @@ def add_tag_map_links(tag_maps_dir: str, tag_name: str,
|
|||
|
||||
# combine map links with the existing list
|
||||
secs_since_epoch = \
|
||||
int((datetime.datetime.strptime(published, '%Y-%m-%dT%H:%M:%SZ') -
|
||||
datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
int((date_from_string_format(published, ['%Y-%m-%dT%H:%M:%S%z']) -
|
||||
date_epoch()).total_seconds())
|
||||
links_changed = False
|
||||
for link in map_links:
|
||||
line = str(secs_since_epoch) + ' ' + link + ' ' + post_url
|
||||
|
@ -633,8 +635,8 @@ def _hashtag_map_within_hours(base_dir: str, tag_name: str,
|
|||
last number of hours
|
||||
"""
|
||||
secs_since_epoch = \
|
||||
int((datetime.datetime.utcnow() -
|
||||
datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
int((date_utcnow() -
|
||||
date_epoch()).total_seconds())
|
||||
curr_hours_since_epoch = int(secs_since_epoch / (60 * 60))
|
||||
start_hours_since_epoch = curr_hours_since_epoch - abs(hours)
|
||||
end_hours_since_epoch = curr_hours_since_epoch + 2
|
||||
|
|
12
media.py
12
media.py
|
@ -15,6 +15,8 @@ import random
|
|||
from random import randint
|
||||
from hashlib import sha1
|
||||
from auth import create_password
|
||||
from utils import date_epoch
|
||||
from utils import date_utcnow
|
||||
from utils import safe_system_string
|
||||
from utils import get_base_content_from_post
|
||||
from utils import get_full_domain
|
||||
|
@ -336,7 +338,7 @@ def _spoof_meta_data(base_dir: str, nickname: str, domain: str,
|
|||
if os.path.isfile('/usr/bin/exiftool'):
|
||||
print('Spoofing metadata in ' + output_filename + ' using exiftool')
|
||||
curr_time_adjusted = \
|
||||
datetime.datetime.utcnow() - \
|
||||
date_utcnow() - \
|
||||
datetime.timedelta(minutes=randint(2, 120))
|
||||
published = curr_time_adjusted.strftime("%Y:%m:%d %H:%M:%S+00:00")
|
||||
(latitude, longitude, latitude_ref, longitude_ref,
|
||||
|
@ -491,9 +493,9 @@ def create_media_dirs(base_dir: str, media_path: str) -> None:
|
|||
def get_media_path() -> str:
|
||||
"""Returns the path for stored media
|
||||
"""
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
weeks_since_epoch = \
|
||||
int((curr_time - datetime.datetime(1970, 1, 1)).days / 7)
|
||||
int((curr_time - date_epoch()).days / 7)
|
||||
return 'media/' + str(weeks_since_epoch)
|
||||
|
||||
|
||||
|
@ -676,8 +678,8 @@ def archive_media(base_dir: str, archive_directory: str,
|
|||
if max_weeks == 0:
|
||||
return
|
||||
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
weeks_since_epoch = int((curr_time - datetime.datetime(1970, 1, 1)).days/7)
|
||||
curr_time = date_utcnow()
|
||||
weeks_since_epoch = int((curr_time - date_epoch()).days/7)
|
||||
min_week = weeks_since_epoch - max_weeks
|
||||
|
||||
if archive_directory:
|
||||
|
|
|
@ -15,7 +15,6 @@ __module_group__ = "Web Interface Columns"
|
|||
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import html
|
||||
from shutil import rmtree
|
||||
from subprocess import Popen
|
||||
|
@ -24,6 +23,8 @@ from newswire import get_dict_from_newswire
|
|||
# from posts import send_signed_json
|
||||
from posts import create_news_post
|
||||
from posts import archive_posts_for_person
|
||||
from utils import date_from_string_format
|
||||
from utils import date_utcnow
|
||||
from utils import valid_hash_tag
|
||||
from utils import get_base_content_from_post
|
||||
from utils import remove_html
|
||||
|
@ -572,7 +573,7 @@ def _convert_rss_to_activitypub(base_dir: str, http_prefix: str,
|
|||
else:
|
||||
try:
|
||||
date_str_with_offset = \
|
||||
datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S%z")
|
||||
date_from_string_format(date_str, ["%Y-%m-%d %H:%M:%S%z"])
|
||||
except BaseException:
|
||||
print('EX: Newswire strptime failed ' + str(date_str))
|
||||
continue
|
||||
|
@ -667,7 +668,7 @@ def _convert_rss_to_activitypub(base_dir: str, http_prefix: str,
|
|||
blog['news'] = True
|
||||
|
||||
# note the time of arrival
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
blog['object']['arrived'] = curr_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
# change the id, based upon the published time
|
||||
|
|
15
newswire.py
15
newswire.py
|
@ -14,12 +14,12 @@ import random
|
|||
import time
|
||||
from socket import error as SocketError
|
||||
import errno
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import timezone
|
||||
from collections import OrderedDict
|
||||
from utils import valid_post_date
|
||||
from categories import set_hashtag_category
|
||||
from utils import date_from_string_format
|
||||
from utils import acct_handle_dir
|
||||
from utils import remove_eol
|
||||
from utils import get_domain_from_actor
|
||||
|
@ -314,8 +314,8 @@ def parse_feed_date(pub_date: str, unique_string_identifier: str) -> str:
|
|||
"%a, %d %b %Y %H:%M:%S AKST",
|
||||
"%a, %d %b %Y %H:%M:%S HST",
|
||||
"%a, %d %b %Y %H:%M:%S UT",
|
||||
"%Y-%m-%dT%H:%M:%SZ",
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
"%Y-%m-%dT%H:%M:%S%z",
|
||||
"%Y-%m-%dT%H:%M:%S%Z")
|
||||
published_date = None
|
||||
for date_format in formats:
|
||||
if ',' in pub_date and ',' not in date_format:
|
||||
|
@ -348,7 +348,7 @@ def parse_feed_date(pub_date: str, unique_string_identifier: str) -> str:
|
|||
timezone_str = '-' + ending.split('-')[1]
|
||||
pub_date2 = pub_date2.split('.')[0] + timezone_str
|
||||
try:
|
||||
published_date = datetime.strptime(pub_date2, date_format)
|
||||
published_date = date_from_string_format(pub_date2, [date_format])
|
||||
except BaseException:
|
||||
continue
|
||||
|
||||
|
@ -1420,10 +1420,11 @@ def get_rs_sfrom_dict(base_dir: str, newswire: {},
|
|||
published = published.replace(' ', 'T')
|
||||
else:
|
||||
published_with_offset = \
|
||||
datetime.strptime(published, "%Y-%m-%d %H:%M:%S%z")
|
||||
published = published_with_offset.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(published, ["%Y-%m-%d %H:%M:%S%z"])
|
||||
published = published_with_offset.strftime("%Y-%m-%dT%H:%M:%S%z")
|
||||
try:
|
||||
pub_date = datetime.strptime(published, "%Y-%m-%dT%H:%M:%SZ")
|
||||
pub_date = date_from_string_format(published,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
except BaseException as ex:
|
||||
print('WARN: Unable to convert date ' + published + ' ' + str(ex))
|
||||
continue
|
||||
|
|
|
@ -11,7 +11,6 @@ import time
|
|||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import datetime
|
||||
import pyqrcode
|
||||
from random import randint
|
||||
from pathlib import Path
|
||||
|
@ -38,6 +37,7 @@ from roles import set_role
|
|||
from roles import actor_roles_from_list
|
||||
from roles import get_actor_roles_list
|
||||
from media import process_meta_data
|
||||
from utils import date_utcnow
|
||||
from utils import get_memorials
|
||||
from utils import is_account_dir
|
||||
from utils import valid_hash_tag
|
||||
|
@ -1852,7 +1852,7 @@ def get_person_avatar_url(base_dir: str, person_url: str,
|
|||
def add_actor_update_timestamp(actor_json: {}) -> None:
|
||||
"""Adds 'updated' fields with a timestamp
|
||||
"""
|
||||
updated_time = datetime.datetime.utcnow()
|
||||
updated_time = date_utcnow()
|
||||
curr_date_str = updated_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
actor_json['updated'] = curr_date_str
|
||||
# add updated timestamp to avatar and banner
|
||||
|
|
34
posts.py
34
posts.py
|
@ -34,6 +34,9 @@ from webfinger import webfinger_handle
|
|||
from httpsig import create_signed_header
|
||||
from siteactive import site_is_active
|
||||
from languages import understood_post_language
|
||||
from utils import date_from_string_format
|
||||
from utils import date_epoch
|
||||
from utils import date_utcnow
|
||||
from utils import get_attributed_to
|
||||
from utils import contains_statuses
|
||||
from utils import contains_invalid_actor_url_chars
|
||||
|
@ -1050,7 +1053,7 @@ def _update_hashtags_index(base_dir: str, tag: {}, new_post_id: str,
|
|||
new_post_id = new_post_id.replace('/', '#')
|
||||
|
||||
if not os.path.isfile(tags_filename):
|
||||
days_diff = datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)
|
||||
days_diff = date_utcnow() - date_epoch()
|
||||
days_since_epoch = days_diff.days
|
||||
tag_line = \
|
||||
str(days_since_epoch) + ' ' + nickname + ' ' + \
|
||||
|
@ -1065,8 +1068,7 @@ def _update_hashtags_index(base_dir: str, tag: {}, new_post_id: str,
|
|||
else:
|
||||
# prepend to tags index file
|
||||
if not text_in_file(new_post_id, tags_filename):
|
||||
days_diff = \
|
||||
datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)
|
||||
days_diff = date_utcnow() - date_epoch()
|
||||
days_since_epoch = days_diff.days
|
||||
tag_line = \
|
||||
str(days_since_epoch) + ' ' + nickname + ' ' + \
|
||||
|
@ -2287,10 +2289,10 @@ def create_question_post(base_dir: str,
|
|||
message_json['object']['type'] = 'Question'
|
||||
message_json['object']['oneOf'] = []
|
||||
message_json['object']['votersCount'] = 0
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
days_since_epoch = \
|
||||
int((curr_time - datetime.datetime(1970, 1, 1)).days + duration_days)
|
||||
end_time = datetime.datetime(1970, 1, 1) + \
|
||||
int((curr_time - date_epoch()).days + duration_days)
|
||||
end_time = date_epoch() + \
|
||||
datetime.timedelta(days_since_epoch)
|
||||
message_json['object']['endTime'] = end_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
for question_option in q_options:
|
||||
|
@ -3812,7 +3814,7 @@ def send_to_followers(server, session, session_onion, session_i2p,
|
|||
elif domain.endswith('.i2p'):
|
||||
curr_proxy_type = 'i2p'
|
||||
|
||||
sending_start_time = datetime.datetime.utcnow()
|
||||
sending_start_time = date_utcnow()
|
||||
print('Sending post to followers begins ' +
|
||||
sending_start_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
|
||||
sending_ctr = 0
|
||||
|
@ -3997,7 +3999,7 @@ def send_to_followers(server, session, session_onion, session_i2p,
|
|||
if debug:
|
||||
print('DEBUG: End of send_to_followers')
|
||||
|
||||
sending_end_time = datetime.datetime.utcnow()
|
||||
sending_end_time = date_utcnow()
|
||||
sending_mins = \
|
||||
int((sending_end_time - sending_start_time).total_seconds() / 60)
|
||||
print('Sending post to followers ends ' + str(sending_mins) + ' mins')
|
||||
|
@ -4380,7 +4382,7 @@ def _passed_newswire_voting(newswire_votes_threshold: int,
|
|||
if not arrival_date:
|
||||
return True
|
||||
# how long has elapsed since this post arrived?
|
||||
curr_date = datetime.datetime.utcnow()
|
||||
curr_date = date_utcnow()
|
||||
time_diff_mins = \
|
||||
int((curr_date - arrival_date).total_seconds() / 60)
|
||||
# has the voting time elapsed?
|
||||
|
@ -6363,17 +6365,15 @@ def c2s_box_json(session, nickname: str, password: str,
|
|||
def seconds_between_published(published1: str, published2: str) -> int:
|
||||
"""Returns the number of seconds between two published dates
|
||||
"""
|
||||
try:
|
||||
published1_time = \
|
||||
datetime.datetime.strptime(published1, '%Y-%m-%dT%H:%M:%SZ')
|
||||
except BaseException:
|
||||
published1_time = \
|
||||
date_from_string_format(published1, ['%Y-%m-%dT%H:%M:%S%z'])
|
||||
if not published1_time:
|
||||
print('EX: seconds_between_published unable to parse date 1 ' +
|
||||
str(published1))
|
||||
return -1
|
||||
try:
|
||||
published2_time = \
|
||||
datetime.datetime.strptime(published2, '%Y-%m-%dT%H:%M:%SZ')
|
||||
except BaseException:
|
||||
published2_time = \
|
||||
date_from_string_format(published2, ['%Y-%m-%dT%H:%M:%S%z'])
|
||||
if not published2_time:
|
||||
print('EX: seconds_between_published unable to parse date 2 ' +
|
||||
str(published2))
|
||||
return -1
|
||||
|
|
12
schedule.py
12
schedule.py
|
@ -9,7 +9,8 @@ __module_group__ = "Calendar"
|
|||
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
from utils import date_from_string_format
|
||||
from utils import date_epoch
|
||||
from utils import acct_handle_dir
|
||||
from utils import has_object_dict
|
||||
from utils import get_status_number
|
||||
|
@ -17,6 +18,7 @@ from utils import load_json
|
|||
from utils import is_account_dir
|
||||
from utils import acct_dir
|
||||
from utils import remove_eol
|
||||
from utils import date_utcnow
|
||||
from outbox import post_message_to_outbox
|
||||
from session import create_session
|
||||
from threads import begin_thread
|
||||
|
@ -34,8 +36,8 @@ def _update_post_schedule(base_dir: str, handle: str, httpd,
|
|||
return
|
||||
|
||||
# get the current time as an int
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
curr_time = date_utcnow()
|
||||
days_since_epoch = (curr_time - date_epoch()).days
|
||||
|
||||
schedule_dir = acct_handle_dir(base_dir, handle) + '/scheduled/'
|
||||
index_lines = []
|
||||
|
@ -64,10 +66,10 @@ def _update_post_schedule(base_dir: str, handle: str, httpd,
|
|||
index_lines.append(line)
|
||||
# convert string date to int
|
||||
post_time = \
|
||||
datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(date_str, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
post_time = post_time.replace(tzinfo=None)
|
||||
post_days_since_epoch = \
|
||||
(post_time - datetime.datetime(1970, 1, 1)).days
|
||||
(post_time - date_epoch()).days
|
||||
if days_since_epoch < post_days_since_epoch:
|
||||
continue
|
||||
if days_since_epoch == post_days_since_epoch:
|
||||
|
|
|
@ -23,6 +23,7 @@ from session import post_json
|
|||
from session import post_image
|
||||
from session import create_session
|
||||
from session import get_json_valid
|
||||
from utils import date_utcnow
|
||||
from utils import dangerous_markup
|
||||
from utils import remove_html
|
||||
from utils import get_media_extensions
|
||||
|
@ -1303,7 +1304,7 @@ def shares_catalog_account_endpoint(base_dir: str, http_prefix: str,
|
|||
"DFC:supplies": []
|
||||
}
|
||||
|
||||
curr_date = datetime.datetime.utcnow()
|
||||
curr_date = date_utcnow()
|
||||
curr_date_str = curr_date.strftime("%Y-%m-%d")
|
||||
|
||||
shares_filename = \
|
||||
|
@ -1340,7 +1341,8 @@ def shares_catalog_account_endpoint(base_dir: str, http_prefix: str,
|
|||
if not re.match(match_pattern, description):
|
||||
continue
|
||||
|
||||
expire_date = datetime.datetime.fromtimestamp(item['expire'])
|
||||
expire_date = datetime.datetime.fromtimestamp(item['expire'],
|
||||
datetime.timezone.utc)
|
||||
expire_date_str = expire_date.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
share_id = _get_valid_shared_item_id(owner, item['displayName'])
|
||||
|
@ -1390,7 +1392,7 @@ def shares_catalog_endpoint(base_dir: str, http_prefix: str,
|
|||
"DFC:supplies": []
|
||||
}
|
||||
|
||||
curr_date = datetime.datetime.utcnow()
|
||||
curr_date = date_utcnow()
|
||||
curr_date_str = curr_date.strftime("%Y-%m-%d")
|
||||
|
||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
||||
|
|
35
tests.py
35
tests.py
|
@ -56,6 +56,8 @@ from follow import clear_followers
|
|||
from follow import send_follow_request_via_server
|
||||
from follow import send_unfollow_request_via_server
|
||||
from siteactive import site_is_active
|
||||
from utils import date_from_string_format
|
||||
from utils import date_utcnow
|
||||
from utils import is_right_to_left_text
|
||||
from utils import remove_markup_tag
|
||||
from utils import remove_style_within_html
|
||||
|
@ -6415,7 +6417,7 @@ def _test_spoofed_geolocation() -> None:
|
|||
assert not point_in_nogo(test_square, -5, -5)
|
||||
assert not point_in_nogo(test_square, -5, 5)
|
||||
nogo_list = []
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
decoy_seed = 7634681
|
||||
city_radius = 0.1
|
||||
coords = spoof_geolocation('', 'los angeles', curr_time,
|
||||
|
@ -6459,7 +6461,7 @@ def _test_spoofed_geolocation() -> None:
|
|||
if hour < 10:
|
||||
hour_str = '0' + hour_str
|
||||
date_time_str = "2021-05-" + str(day_number) + " " + hour_str + ":14"
|
||||
curr_time = datetime.datetime.strptime(date_time_str, "%Y-%m-%d %H:%M")
|
||||
curr_time = date_from_string_format(date_time_str, ["%Y-%m-%d %H:%M"])
|
||||
coords = spoof_geolocation('', 'new york, usa', curr_time,
|
||||
decoy_seed, cities_list, nogo_list)
|
||||
longitude = coords[1]
|
||||
|
@ -6496,7 +6498,7 @@ def _test_spoofed_geolocation() -> None:
|
|||
if hour < 10:
|
||||
hour_str = '0' + hour_str
|
||||
date_time_str = "2021-05-" + str(day_number) + " " + hour_str + ":14"
|
||||
curr_time = datetime.datetime.strptime(date_time_str, "%Y-%m-%d %H:%M")
|
||||
curr_time = date_from_string_format(date_time_str, ["%Y-%m-%d %H:%M"])
|
||||
coords = spoof_geolocation('', 'london, england', curr_time,
|
||||
decoy_seed, cities_list, nogo_list)
|
||||
longitude = coords[1]
|
||||
|
@ -6546,7 +6548,7 @@ def _test_spoofed_geolocation() -> None:
|
|||
if hour < 10:
|
||||
hour_str = '0' + hour_str
|
||||
date_time_str = "2021-05-" + str(day_number) + " " + hour_str + ":14"
|
||||
curr_time = datetime.datetime.strptime(date_time_str, "%Y-%m-%d %H:%M")
|
||||
curr_time = date_from_string_format(date_time_str, ["%Y-%m-%d %H:%M"])
|
||||
coords = spoof_geolocation('', 'SAN FRANCISCO, USA', curr_time,
|
||||
decoy_seed, cities_list, nogo_list)
|
||||
longitude = coords[1]
|
||||
|
@ -6600,7 +6602,7 @@ def _test_spoofed_geolocation() -> None:
|
|||
if hour < 10:
|
||||
hour_str = '0' + hour_str
|
||||
date_time_str = "2021-05-" + str(day_number) + " " + hour_str + ":14"
|
||||
curr_time = datetime.datetime.strptime(date_time_str, "%Y-%m-%d %H:%M")
|
||||
curr_time = date_from_string_format(date_time_str, ["%Y-%m-%d %H:%M"])
|
||||
coords = spoof_geolocation('', 'SEATTLE, USA', curr_time,
|
||||
decoy_seed, cities_list, nogo_list)
|
||||
longitude = coords[1]
|
||||
|
@ -6847,7 +6849,15 @@ def _test_date_conversions() -> None:
|
|||
print('test_date_conversions')
|
||||
date_str = "2021-05-16T14:37:41Z"
|
||||
date_sec = date_string_to_seconds(date_str)
|
||||
date_str2 = "2021-05-16T14:38:44Z"
|
||||
date_sec2 = date_string_to_seconds(date_str2)
|
||||
sec_diff = date_sec2 - date_sec
|
||||
if sec_diff != 63:
|
||||
print('seconds diff = ' + str(sec_diff))
|
||||
assert sec_diff == 63
|
||||
date_str2 = date_seconds_to_string(date_sec)
|
||||
if date_str != date_str2:
|
||||
print(str(date_sec) + ' ' + str(date_str) + ' != ' + str(date_str2))
|
||||
assert date_str == date_str2
|
||||
|
||||
|
||||
|
@ -7443,7 +7453,7 @@ def _test_published_to_local_timezone() -> None:
|
|||
published_str = '2022-02-25T20:15:00Z'
|
||||
timezone = 'Europe/Berlin'
|
||||
published = \
|
||||
datetime.datetime.strptime(published_str, "%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(published_str, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
datetime_object = \
|
||||
convert_published_to_local_timezone(published, timezone)
|
||||
local_time_str = datetime_object.strftime("%a %b %d, %H:%M")
|
||||
|
@ -7451,7 +7461,7 @@ def _test_published_to_local_timezone() -> None:
|
|||
|
||||
timezone = 'Asia/Seoul'
|
||||
published = \
|
||||
datetime.datetime.strptime(published_str, "%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(published_str, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
datetime_object = \
|
||||
convert_published_to_local_timezone(published, timezone)
|
||||
local_time_str = datetime_object.strftime("%a %b %d, %H:%M")
|
||||
|
@ -8200,6 +8210,16 @@ def _test_format_mixed_rtl() -> None:
|
|||
assert result == expected
|
||||
|
||||
|
||||
def _test_dateformat():
|
||||
print('dateformat')
|
||||
date_str = 'Mon, 20 Nov 2023 16:51:15 GMT'
|
||||
formats = ("%a, %d %b %Y %H:%M:%S %Z",
|
||||
"%a, %d %b %Y %H:%M:%S %z")
|
||||
dtime = date_from_string_format(date_str, formats)
|
||||
print(str(dtime))
|
||||
assert dtime.tzinfo
|
||||
|
||||
|
||||
def run_all_tests():
|
||||
base_dir = os.getcwd()
|
||||
print('Running tests...')
|
||||
|
@ -8217,6 +8237,7 @@ def run_all_tests():
|
|||
_test_checkbox_names()
|
||||
_test_thread_functions()
|
||||
_test_functions()
|
||||
_test_dateformat()
|
||||
_test_is_right_to_left()
|
||||
_test_format_mixed_rtl()
|
||||
_test_remove_tag()
|
||||
|
|
|
@ -10,13 +10,13 @@ __module_group__ = "Core"
|
|||
import threading
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
from socket import error as SocketError
|
||||
from utils import date_utcnow
|
||||
|
||||
|
||||
class thread_with_trace(threading.Thread):
|
||||
def __init__(self, *args, **keywords):
|
||||
self.start_time = datetime.datetime.utcnow()
|
||||
self.start_time = date_utcnow()
|
||||
self.is_started = False
|
||||
tries = 0
|
||||
while tries < 3:
|
||||
|
@ -96,7 +96,7 @@ def remove_dormant_threads(base_dir: str, threads_list: [], debug: bool,
|
|||
|
||||
timeout_secs = int(timeout_mins * 60)
|
||||
dormant_threads = []
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
changed = False
|
||||
|
||||
# which threads are dormant?
|
||||
|
|
136
utils.py
136
utils.py
|
@ -46,6 +46,64 @@ INVALID_ACTOR_URL_CHARACTERS = (
|
|||
)
|
||||
|
||||
|
||||
def _utc_mktime(utc_tuple):
|
||||
"""Returns number of seconds elapsed since epoch
|
||||
Note that no timezone are taken into consideration.
|
||||
utc tuple must be: (year, month, day, hour, minute, second)
|
||||
"""
|
||||
|
||||
if len(utc_tuple) == 6:
|
||||
utc_tuple += (0, 0, 0)
|
||||
return time.mktime(utc_tuple) - time.mktime((1970, 1, 1, 0, 0, 0, 0, 0, 0))
|
||||
|
||||
|
||||
def _datetime_to_timestamp(dtime):
|
||||
"""Converts a datetime object to UTC timestamp"""
|
||||
return int(_utc_mktime(dtime.timetuple()))
|
||||
|
||||
|
||||
def date_utcnow():
|
||||
"""returns the time now
|
||||
"""
|
||||
return datetime.datetime.now(datetime.timezone.utc)
|
||||
|
||||
|
||||
def _date_from_numbers(year: int, month: int, day: int,
|
||||
hour: int, mins: int):
|
||||
"""returns an offset-aware datetime
|
||||
"""
|
||||
return datetime.datetime(year, month, day, hour, mins, 0,
|
||||
tzinfo=datetime.timezone.utc)
|
||||
|
||||
|
||||
def date_from_string_format(date_str: str, formats: []):
|
||||
"""returns an offset-aware datetime from a string date
|
||||
"""
|
||||
if not formats:
|
||||
formats = ("%a, %d %b %Y %H:%M:%S %Z",
|
||||
"%a, %d %b %Y %H:%M:%S %z",
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
dtime = None
|
||||
for date_format in formats:
|
||||
try:
|
||||
dtime = \
|
||||
datetime.datetime.strptime(date_str, date_format)
|
||||
except BaseException:
|
||||
continue
|
||||
break
|
||||
if not dtime:
|
||||
return None
|
||||
if not dtime.tzinfo:
|
||||
dtime = dtime.replace(tzinfo=datetime.timezone.utc)
|
||||
return dtime
|
||||
|
||||
|
||||
def date_epoch():
|
||||
"""returns an offset-aware version of epoch
|
||||
"""
|
||||
return _date_from_numbers(1970, 1, 1, 0, 0)
|
||||
|
||||
|
||||
def get_attributed_to(field) -> str:
|
||||
"""Returns the actor
|
||||
"""
|
||||
|
@ -461,15 +519,14 @@ def has_users_path(path_str: str) -> bool:
|
|||
def valid_post_date(published: str, max_age_days: int, debug: bool) -> bool:
|
||||
"""Returns true if the published date is recent and is not in the future
|
||||
"""
|
||||
baseline_time = datetime.datetime(1970, 1, 1)
|
||||
baseline_time = date_epoch()
|
||||
|
||||
days_diff = datetime.datetime.utcnow() - baseline_time
|
||||
days_diff = date_utcnow() - baseline_time
|
||||
now_days_since_epoch = days_diff.days
|
||||
|
||||
try:
|
||||
post_time_object = \
|
||||
datetime.datetime.strptime(published, "%Y-%m-%dT%H:%M:%SZ")
|
||||
except BaseException:
|
||||
post_time_object = \
|
||||
date_from_string_format(published, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if not post_time_object:
|
||||
if debug:
|
||||
print('EX: valid_post_date invalid published date ' +
|
||||
str(published))
|
||||
|
@ -524,9 +581,9 @@ def is_dormant(base_dir: str, nickname: str, domain: str, actor: str,
|
|||
|
||||
if days_since_epoch_str:
|
||||
days_since_epoch = int(days_since_epoch_str)
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
curr_days_since_epoch = \
|
||||
(curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
(curr_time - date_epoch()).days
|
||||
time_diff_months = \
|
||||
int((curr_days_since_epoch - days_since_epoch) / 30)
|
||||
if time_diff_months >= dormant_months:
|
||||
|
@ -1066,11 +1123,11 @@ def get_status_number(published_str: str = None) -> (str, str):
|
|||
"""Returns the status number and published date
|
||||
"""
|
||||
if not published_str:
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
else:
|
||||
curr_time = \
|
||||
datetime.datetime.strptime(published_str, '%Y-%m-%dT%H:%M:%SZ')
|
||||
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
date_from_string_format(published_str, ['%Y-%m-%dT%H:%M:%S%z'])
|
||||
days_since_epoch = (curr_time - date_epoch()).days
|
||||
# status is the number of seconds since epoch
|
||||
status_number = \
|
||||
str(((days_since_epoch * 24 * 60 * 60) +
|
||||
|
@ -1806,8 +1863,8 @@ def locate_news_arrival(base_dir: str, domain: str,
|
|||
arrival = arrival_file.read()
|
||||
if arrival:
|
||||
arrival_date = \
|
||||
datetime.datetime.strptime(arrival,
|
||||
"%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(arrival,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
return arrival_date
|
||||
|
||||
return None
|
||||
|
@ -1958,18 +2015,17 @@ def can_reply_to(base_dir: str, nickname: str, domain: str,
|
|||
published = _get_published_date(post_json_object)
|
||||
if not published:
|
||||
return False
|
||||
try:
|
||||
pub_date = datetime.datetime.strptime(published, '%Y-%m-%dT%H:%M:%SZ')
|
||||
except BaseException:
|
||||
|
||||
pub_date = date_from_string_format(published, ['%Y-%m-%dT%H:%M:%S%z'])
|
||||
if not pub_date:
|
||||
print('EX: can_reply_to unrecognized published date ' + str(published))
|
||||
return False
|
||||
if not curr_date_str:
|
||||
curr_date = datetime.datetime.utcnow()
|
||||
curr_date = date_utcnow()
|
||||
else:
|
||||
try:
|
||||
curr_date = \
|
||||
datetime.datetime.strptime(curr_date_str, '%Y-%m-%dT%H:%M:%SZ')
|
||||
except BaseException:
|
||||
curr_date = \
|
||||
date_from_string_format(curr_date_str, ['%Y-%m-%dT%H:%M:%S%z'])
|
||||
if not curr_date:
|
||||
print('EX: can_reply_to unrecognized current date ' +
|
||||
str(curr_date_str))
|
||||
return False
|
||||
|
@ -2786,7 +2842,8 @@ def file_last_modified(filename: str) -> str:
|
|||
"""Returns the date when a file was last modified
|
||||
"""
|
||||
time_val = os.path.getmtime(filename)
|
||||
modified_time = datetime.datetime.fromtimestamp(time_val)
|
||||
modified_time = \
|
||||
datetime.datetime.fromtimestamp(time_val, datetime.timezone.utc)
|
||||
return modified_time.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
|
||||
|
@ -3219,7 +3276,7 @@ def week_day_of_month_start(month_number: int, year: int) -> int:
|
|||
"""Gets the day number of the first day of the month
|
||||
1=sun, 7=sat
|
||||
"""
|
||||
first_day_of_month = datetime.datetime(year, month_number, 1, 0, 0)
|
||||
first_day_of_month = _date_from_numbers(year, month_number, 1, 0, 0)
|
||||
return int(first_day_of_month.strftime("%w")) + 1
|
||||
|
||||
|
||||
|
@ -3267,24 +3324,24 @@ def is_recent_post(post_json_object: {}, max_days: int) -> bool:
|
|||
return False
|
||||
if not isinstance(post_json_object['object']['published'], str):
|
||||
return False
|
||||
curr_time = datetime.datetime.utcnow()
|
||||
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
curr_time = date_utcnow()
|
||||
days_since_epoch = (curr_time - date_epoch()).days
|
||||
recently = days_since_epoch - max_days
|
||||
|
||||
published_date_str = post_json_object['object']['published']
|
||||
if '.' in published_date_str:
|
||||
published_date_str = published_date_str.split('.')[0] + 'Z'
|
||||
try:
|
||||
published_date = \
|
||||
datetime.datetime.strptime(published_date_str,
|
||||
"%Y-%m-%dT%H:%M:%SZ")
|
||||
except BaseException:
|
||||
|
||||
published_date = \
|
||||
date_from_string_format(published_date_str,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
if not published_date:
|
||||
print('EX: is_recent_post unrecognized published date ' +
|
||||
str(published_date_str))
|
||||
return False
|
||||
|
||||
published_days_since_epoch = \
|
||||
(published_date - datetime.datetime(1970, 1, 1)).days
|
||||
(published_date - date_epoch()).days
|
||||
if published_days_since_epoch < recently:
|
||||
return False
|
||||
return True
|
||||
|
@ -3725,21 +3782,24 @@ def is_float(value) -> bool:
|
|||
def date_string_to_seconds(date_str: str) -> int:
|
||||
"""Converts a date string (eg "published") into seconds since epoch
|
||||
"""
|
||||
try:
|
||||
expiry_time = \
|
||||
datetime.datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%SZ')
|
||||
except BaseException:
|
||||
expiry_time = \
|
||||
date_from_string_format(date_str, ['%Y-%m-%dT%H:%M:%S%z'])
|
||||
if not expiry_time:
|
||||
print('EX: date_string_to_seconds unable to parse date ' +
|
||||
str(date_str))
|
||||
return None
|
||||
return int(datetime.datetime.timestamp(expiry_time))
|
||||
return _datetime_to_timestamp(expiry_time)
|
||||
|
||||
|
||||
def date_seconds_to_string(date_sec: int) -> str:
|
||||
"""Converts a date in seconds since epoch to a string
|
||||
"""
|
||||
this_date = datetime.datetime.fromtimestamp(date_sec)
|
||||
return this_date.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
this_date = \
|
||||
datetime.datetime.fromtimestamp(date_sec, datetime.timezone.utc)
|
||||
if not this_date.tzinfo:
|
||||
this_date = this_date.replace(tzinfo=datetime.timezone.utc)
|
||||
this_date_tz = this_date.astimezone(datetime.timezone.utc)
|
||||
return this_date_tz.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
|
||||
def has_group_type(base_dir: str, actor: str, person_cache: {},
|
||||
|
|
|
@ -24,6 +24,7 @@ from utils import acct_dir
|
|||
from utils import local_actor_url
|
||||
from utils import replace_users_with_at
|
||||
from utils import language_right_to_left
|
||||
from utils import date_from_string_format
|
||||
from happening import get_todays_events
|
||||
from happening import get_calendar_events
|
||||
from happening import get_todays_events_icalendar
|
||||
|
@ -197,14 +198,14 @@ def _html_calendar_day(person_cache: {}, translate: {},
|
|||
if evnt.get('startTime'):
|
||||
start_time_str = evnt['startTime']
|
||||
event_date = \
|
||||
datetime.strptime(start_time_str,
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(start_time_str,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
event_time = event_date.strftime("%H:%M").strip()
|
||||
if evnt.get('endTime'):
|
||||
end_time_str = evnt['endTime']
|
||||
event_end_date = \
|
||||
datetime.strptime(end_time_str,
|
||||
"%Y-%m-%dT%H:%M:%S%z")
|
||||
date_from_string_format(end_time_str,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
event_end_time = \
|
||||
event_end_date.strftime("%H:%M").strip()
|
||||
if 'public' in evnt:
|
||||
|
|
|
@ -8,7 +8,6 @@ __status__ = "Production"
|
|||
__module_group__ = "Web Interface Columns"
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from content import remove_long_words
|
||||
from content import limit_repeated_words
|
||||
from utils import get_fav_filename_from_url
|
||||
|
@ -22,6 +21,7 @@ from utils import is_editor
|
|||
from utils import get_config_param
|
||||
from utils import remove_domain_port
|
||||
from utils import acct_dir
|
||||
from utils import date_from_string_format
|
||||
from posts import is_moderator
|
||||
from newswire import get_newswire_favicon_url
|
||||
from webapp_utils import get_right_image_file
|
||||
|
@ -234,10 +234,10 @@ def _html_newswire(base_dir: str, newswire: {}, nickname: str, moderator: bool,
|
|||
item[0] = item[0].split('CDATA[')[1]
|
||||
if ']' in item[0]:
|
||||
item[0] = item[0].split(']')[0]
|
||||
try:
|
||||
published_date = \
|
||||
datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S%z")
|
||||
except BaseException:
|
||||
|
||||
published_date = \
|
||||
date_from_string_format(date_str, ["%Y-%m-%d %H:%M:%S%z"])
|
||||
if not published_date:
|
||||
print('EX: _html_newswire bad date format ' + date_str)
|
||||
continue
|
||||
date_shown = published_date.strftime("%Y-%m-%d %H:%M")
|
||||
|
@ -448,7 +448,7 @@ def html_citations(base_dir: str, nickname: str, domain: str,
|
|||
selected_str = ' checked'
|
||||
|
||||
published_date = \
|
||||
datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S%z")
|
||||
date_from_string_format(date_str, ["%Y-%m-%d %H:%M:%S%z"])
|
||||
date_shown = published_date.strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
title = remove_long_words(item[0], 16, []).replace('\n', '<br>')
|
||||
|
|
|
@ -12,6 +12,7 @@ from datetime import datetime
|
|||
from utils import get_nickname_from_actor
|
||||
from utils import get_config_param
|
||||
from utils import escape_text
|
||||
from utils import date_utcnow
|
||||
from categories import get_hashtag_categories
|
||||
from categories import get_hashtag_category
|
||||
from webapp_utils import set_custom_background
|
||||
|
@ -37,7 +38,7 @@ def get_hashtag_categories_feed(base_dir: str,
|
|||
' <title>#categories</title>\n'
|
||||
|
||||
rss_date_str = \
|
||||
datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S UT")
|
||||
date_utcnow().strftime("%a, %d %b %Y %H:%M:%S UT")
|
||||
|
||||
for category_str, hashtag_list in hashtag_categories.items():
|
||||
rss_str += \
|
||||
|
@ -67,7 +68,7 @@ def html_hash_tag_swarm(base_dir: str, actor: str, translate: {}) -> str:
|
|||
"""Returns a tag swarm of today's hashtags
|
||||
"""
|
||||
max_tag_length = 42
|
||||
curr_time = datetime.utcnow()
|
||||
curr_time = date_utcnow()
|
||||
days_since_epoch = (curr_time - datetime(1970, 1, 1)).days
|
||||
days_since_epoch_str = str(days_since_epoch) + ' '
|
||||
days_since_epoch_str2 = str(days_since_epoch - 1) + ' '
|
||||
|
@ -95,7 +96,9 @@ def html_hash_tag_swarm(base_dir: str, actor: str, translate: {}) -> str:
|
|||
|
||||
# get last modified datetime
|
||||
mod_time_since_epoc = os.path.getmtime(tags_filename)
|
||||
last_modified_date = datetime.fromtimestamp(mod_time_since_epoc)
|
||||
last_modified_date = \
|
||||
datetime.fromtimestamp(mod_time_since_epoc,
|
||||
datetime.timezone.utc)
|
||||
file_days_since_epoch = \
|
||||
(last_modified_date - datetime(1970, 1, 1)).days
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import urllib.parse
|
|||
from dateutil.parser import parse
|
||||
from auth import create_password
|
||||
from git import is_git_patch
|
||||
from datetime import datetime
|
||||
from cache import get_person_from_cache
|
||||
from bookmarks import bookmarked_by_person
|
||||
from announce import announced_by_person
|
||||
|
@ -25,6 +24,7 @@ from posts import post_is_muted
|
|||
from posts import get_person_box
|
||||
from posts import download_announce
|
||||
from posts import populate_replies_json
|
||||
from utils import date_from_string_format
|
||||
from utils import remove_markup_tag
|
||||
from utils import ap_proxy_type
|
||||
from utils import remove_style_within_html
|
||||
|
@ -1146,11 +1146,12 @@ def _get_published_date_str(post_json_object: {},
|
|||
if '.' not in published_str:
|
||||
if '+' not in published_str:
|
||||
datetime_object = \
|
||||
datetime.strptime(published_str, "%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(published_str, ["%Y-%m-%dT%H:%M:%S%z"])
|
||||
else:
|
||||
pub_str = published_str.split('+')[0] + 'Z'
|
||||
datetime_object = \
|
||||
datetime.strptime(published_str.split('+')[0] + 'Z',
|
||||
"%Y-%m-%dT%H:%M:%SZ")
|
||||
date_from_string_format(pub_str,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
else:
|
||||
published_str = \
|
||||
published_str.replace('T', ' ').split('.')[0]
|
||||
|
|
|
@ -10,7 +10,7 @@ __module_group__ = "Web Interface"
|
|||
import os
|
||||
from shutil import copyfile
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from utils import date_from_string_format
|
||||
from utils import get_attributed_to
|
||||
from utils import get_actor_from_post_id
|
||||
from utils import remove_html
|
||||
|
@ -1348,7 +1348,8 @@ def rss_hashtag_search(nickname: str, domain: str, port: int,
|
|||
post_json_object['object'].get('attributedTo') and \
|
||||
post_json_object['object'].get('published'):
|
||||
published = post_json_object['object']['published']
|
||||
pub_date = datetime.strptime(published, "%Y-%m-%dT%H:%M:%SZ")
|
||||
pub_date = date_from_string_format(published,
|
||||
["%Y-%m-%dT%H:%M:%S%z"])
|
||||
rss_date_str = pub_date.strftime("%a, %d %b %Y %H:%M:%S UT")
|
||||
hashtag_feed += ' <item>'
|
||||
attrib_field = post_json_object['object']['attributedTo']
|
||||
|
|
Loading…
Reference in New Issue