mirror of https://gitlab.com/bashrc2/epicyon
Merge branch 'main' of gitlab.com:bashrc2/epicyon
commit
3b6a014e07
|
@ -15710,6 +15710,8 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
if html_getreq and not graph.endswith('.json'):
|
||||
if graph == 'post':
|
||||
graph = '_POST'
|
||||
elif graph == 'inbox':
|
||||
graph = 'INBOX'
|
||||
elif graph == 'get':
|
||||
graph = '_GET'
|
||||
msg = \
|
||||
|
@ -15728,6 +15730,8 @@ class PubServer(BaseHTTPRequestHandler):
|
|||
graph = graph.replace('.json', '')
|
||||
if graph == 'post':
|
||||
graph = '_POST'
|
||||
elif graph == 'inbox':
|
||||
graph = 'INBOX'
|
||||
elif graph == 'get':
|
||||
graph = '_GET'
|
||||
watch_points_json = \
|
||||
|
@ -20800,6 +20804,9 @@ def run_daemon(preferred_podcast_formats: [],
|
|||
'Reminder': 'r'
|
||||
}
|
||||
|
||||
# timeout used when getting rss feeds
|
||||
httpd.rss_timeout_sec = 20
|
||||
|
||||
# timeout used when checking for actor changes when clicking an avatar
|
||||
# and entering person options screen
|
||||
if check_actor_timeout < 2:
|
||||
|
|
|
@ -1162,10 +1162,11 @@ if podcast_formats_str:
|
|||
preferred_podcast_formats.append(pod_format)
|
||||
|
||||
if args.rss:
|
||||
timeout_sec = 20
|
||||
session = create_session(None)
|
||||
testRSS = get_rss(base_dir, domain, session, args.rss,
|
||||
False, False, 1000, 1000, 1000, 1000, debug,
|
||||
preferred_podcast_formats)
|
||||
preferred_podcast_formats, timeout_sec)
|
||||
pprint(testRSS)
|
||||
sys.exit()
|
||||
|
||||
|
|
|
@ -59,8 +59,8 @@ def sorted_watch_points(fitness: {}, fitness_id: str) -> []:
|
|||
for watch_point, item in fitness['performance'][fitness_id].items():
|
||||
if not item.get('total'):
|
||||
continue
|
||||
average_time = item['total'] * 1000 / item['ctr']
|
||||
average_time_str = str(average_time).zfill(8)
|
||||
average_time = int(item['total'] * 1000 / item['ctr'])
|
||||
average_time_str = str(average_time).zfill(16)
|
||||
result.append(average_time_str + ' ' + watch_point)
|
||||
result.sort(reverse=True)
|
||||
return result
|
||||
|
|
305
inbox.py
305
inbox.py
|
@ -125,6 +125,7 @@ from notifyOnPost import notify_when_person_posts
|
|||
from conversation import update_conversation
|
||||
from webapp_hashtagswarm import html_hash_tag_swarm
|
||||
from person import valid_sending_actor
|
||||
from fitnessFunctions import fitness_performance
|
||||
|
||||
|
||||
def _store_last_post_id(base_dir: str, nickname: str, domain: str,
|
||||
|
@ -178,10 +179,11 @@ def _update_cached_hashtag_swarm(base_dir: str, nickname: str, domain: str,
|
|||
curr_date = datetime.datetime.utcnow()
|
||||
time_diff = curr_date - modified_date
|
||||
diff_mins = int(time_diff.total_seconds() / 60)
|
||||
if diff_mins < 10:
|
||||
if diff_mins < 30:
|
||||
# was saved recently, so don't save again
|
||||
# This avoids too much disk I/O
|
||||
save_swarm = False
|
||||
print('Not updating hashtag swarm')
|
||||
else:
|
||||
print('Updating cached hashtag swarm, last changed ' +
|
||||
str(diff_mins) + ' minutes ago')
|
||||
|
@ -198,6 +200,7 @@ def _update_cached_hashtag_swarm(base_dir: str, nickname: str, domain: str,
|
|||
except OSError:
|
||||
print('EX: unable to write cached hashtag swarm ' +
|
||||
cached_hashtag_swarm_filename)
|
||||
remove_old_hashtags(base_dir, 3)
|
||||
return False
|
||||
|
||||
|
||||
|
@ -224,8 +227,6 @@ def store_hash_tags(base_dir: str, nickname: str, domain: str,
|
|||
print('Creating tags directory')
|
||||
os.mkdir(tags_dir)
|
||||
|
||||
hashtag_categories = get_hashtag_categories(base_dir)
|
||||
|
||||
hashtags_ctr = 0
|
||||
for tag in post_json_object['object']['tag']:
|
||||
if not tag.get('type'):
|
||||
|
@ -246,33 +247,43 @@ def store_hash_tags(base_dir: str, nickname: str, domain: str,
|
|||
days_since_epoch = days_diff.days
|
||||
tag_line = \
|
||||
str(days_since_epoch) + ' ' + nickname + ' ' + post_url + '\n'
|
||||
hashtags_ctr += 1
|
||||
hashtag_added = False
|
||||
if not os.path.isfile(tags_filename):
|
||||
try:
|
||||
with open(tags_filename, 'w+') as tags_file:
|
||||
tags_file.write(tag_line)
|
||||
hashtag_added = True
|
||||
except OSError:
|
||||
print('EX: unable to write ' + tags_filename)
|
||||
else:
|
||||
if post_url not in open(tags_filename).read():
|
||||
content = ''
|
||||
try:
|
||||
with open(tags_filename, 'r') as tags_file:
|
||||
content = tags_file.read()
|
||||
except OSError:
|
||||
pass
|
||||
if post_url not in content:
|
||||
content = tag_line + content
|
||||
try:
|
||||
with open(tags_filename, 'r+') as tags_file:
|
||||
content = tags_file.read()
|
||||
if tag_line not in content:
|
||||
tags_file.seek(0, 0)
|
||||
tags_file.write(tag_line + content)
|
||||
with open(tags_filename, 'w+') as tags_file:
|
||||
tags_file.write(content)
|
||||
hashtag_added = True
|
||||
except OSError as ex:
|
||||
print('EX: Failed to write entry to tags file ' +
|
||||
tags_filename + ' ' + str(ex))
|
||||
remove_old_hashtags(base_dir, 3)
|
||||
|
||||
# automatically assign a category to the tag if possible
|
||||
category_filename = tags_dir + '/' + tag_name + '.category'
|
||||
if not os.path.isfile(category_filename):
|
||||
category_str = \
|
||||
guess_hashtag_category(tag_name, hashtag_categories)
|
||||
if category_str:
|
||||
set_hashtag_category(base_dir, tag_name, category_str, False)
|
||||
if hashtag_added:
|
||||
hashtags_ctr += 1
|
||||
|
||||
# automatically assign a category to the tag if possible
|
||||
category_filename = tags_dir + '/' + tag_name + '.category'
|
||||
if not os.path.isfile(category_filename):
|
||||
hashtag_categories = get_hashtag_categories(base_dir)
|
||||
category_str = \
|
||||
guess_hashtag_category(tag_name, hashtag_categories)
|
||||
if category_str:
|
||||
set_hashtag_category(base_dir, tag_name,
|
||||
category_str, False)
|
||||
|
||||
# if some hashtags were found then recalculate the swarm
|
||||
# ready for later display
|
||||
|
@ -3526,7 +3537,7 @@ def _check_for_git_patches(base_dir: str, nickname: str, domain: str,
|
|||
return 0
|
||||
|
||||
|
||||
def _inbox_after_initial(server,
|
||||
def _inbox_after_initial(server, inbox_start_time,
|
||||
recent_posts_cache: {}, max_recent_posts: int,
|
||||
session, session_onion, session_i2p,
|
||||
key_id: str, handle: str, message_json: {},
|
||||
|
@ -3577,6 +3588,10 @@ def _inbox_after_initial(server,
|
|||
|
||||
post_is_dm = False
|
||||
is_group = _group_handle(base_dir, handle)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_group_handle',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
handle_name = handle.split('@')[0]
|
||||
|
||||
|
@ -3602,6 +3617,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Like accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_like',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if _receive_undo_like(recent_posts_cache,
|
||||
|
@ -3625,6 +3644,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Undo like accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_undo_like',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if _receive_reaction(recent_posts_cache,
|
||||
|
@ -3649,6 +3672,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Reaction accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_reaction',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if _receive_undo_reaction(recent_posts_cache,
|
||||
|
@ -3672,6 +3699,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Undo reaction accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_undo_reaction',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if _receive_bookmark(recent_posts_cache,
|
||||
|
@ -3695,6 +3726,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Bookmark accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_bookmark',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if _receive_undo_bookmark(recent_posts_cache,
|
||||
|
@ -3718,10 +3753,18 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Undo bookmark accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_undo_bookmark',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if is_create_inside_announce(message_json):
|
||||
message_json = message_json['object']
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'is_create_inside_announce',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
if _receive_announce(recent_posts_cache,
|
||||
session, handle, is_group,
|
||||
|
@ -3745,6 +3788,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
if debug:
|
||||
print('DEBUG: Announce accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_announce',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
if _receive_undo_announce(recent_posts_cache,
|
||||
session, handle, is_group,
|
||||
|
@ -3758,6 +3805,10 @@ def _inbox_after_initial(server,
|
|||
debug):
|
||||
if debug:
|
||||
print('DEBUG: Undo announce accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_undo_announce',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if _receive_delete(session, handle, is_group,
|
||||
|
@ -3772,6 +3823,10 @@ def _inbox_after_initial(server,
|
|||
recent_posts_cache):
|
||||
if debug:
|
||||
print('DEBUG: Delete accepted from ' + actor)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_delete',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
if debug:
|
||||
|
@ -3795,6 +3850,10 @@ def _inbox_after_initial(server,
|
|||
allow_local_network_access, debug,
|
||||
system_language, http_prefix,
|
||||
domain_full, person_cache):
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_valid_post_content',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
# is the sending actor valid?
|
||||
if not valid_sending_actor(session, base_dir, nickname, domain,
|
||||
person_cache, post_json_object,
|
||||
|
@ -3802,7 +3861,15 @@ def _inbox_after_initial(server,
|
|||
if debug:
|
||||
print('Inbox sending actor is not valid ' +
|
||||
str(post_json_object))
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'not_valid_sending_actor',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'valid_sending_actor',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
if post_json_object.get('object'):
|
||||
json_obj = post_json_object['object']
|
||||
|
@ -3813,19 +3880,35 @@ def _inbox_after_initial(server,
|
|||
|
||||
if _check_for_git_patches(base_dir, nickname, domain,
|
||||
handle, json_obj) == 2:
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_check_for_git_patches',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
return False
|
||||
|
||||
# replace YouTube links, so they get less tracking data
|
||||
replace_you_tube(post_json_object, yt_replace_domain, system_language)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'replace_you_tube',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
# replace twitter link domains, so that you can view twitter posts
|
||||
# without having an account
|
||||
replace_twitter(post_json_object, twitter_replacement_domain,
|
||||
system_language)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'replace_you_twitter',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# list of indexes to be updated
|
||||
update_index_list = ['inbox']
|
||||
populate_replies(base_dir, http_prefix, domain, post_json_object,
|
||||
max_replies, debug)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'populate_replies',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
_receive_question_vote(server, base_dir, nickname, domain,
|
||||
http_prefix, handle, debug,
|
||||
|
@ -3845,6 +3928,10 @@ def _inbox_after_initial(server,
|
|||
max_like_count,
|
||||
cw_lists, lists_enabled,
|
||||
bold_reading)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_question_vote',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
is_reply_to_muted_post = False
|
||||
|
||||
|
@ -3870,9 +3957,17 @@ def _inbox_after_initial(server,
|
|||
if debug:
|
||||
print('Invalid DM ' + str(post_json_object))
|
||||
return False
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_is_valid_dm',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# get the actor being replied to
|
||||
actor = local_actor_url(http_prefix, nickname, domain_full)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'local_actor_url',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# create a reply notification file if needed
|
||||
is_reply_to_muted_post = \
|
||||
|
@ -3881,6 +3976,10 @@ def _inbox_after_initial(server,
|
|||
post_json_object, actor,
|
||||
update_index_list, http_prefix,
|
||||
default_reply_interval_hrs)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_create_reply_notification_file',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
if is_image_media(session, base_dir, http_prefix,
|
||||
nickname, domain, post_json_object,
|
||||
|
@ -3893,6 +3992,10 @@ def _inbox_after_initial(server,
|
|||
bold_reading):
|
||||
# media index will be updated
|
||||
update_index_list.append('tlmedia')
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'is_image_media',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
if is_blog_post(post_json_object):
|
||||
# blogs index will be updated
|
||||
update_index_list.append('tlblogs')
|
||||
|
@ -3903,9 +4006,17 @@ def _inbox_after_initial(server,
|
|||
onion_domain, i2p_domain,
|
||||
person_cache, post_json_object, debug,
|
||||
signing_priv_key_pem)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_obtain_avatar_for_reply_post',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# save the post to file
|
||||
if save_json(post_json_object, destination_filename):
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'save_json',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
if mitm:
|
||||
# write a file to indicate that this post was delivered
|
||||
# via a third party
|
||||
|
@ -3920,6 +4031,10 @@ def _inbox_after_initial(server,
|
|||
_low_frequency_post_notification(base_dir, http_prefix,
|
||||
nickname, domain, port,
|
||||
handle, post_is_dm, json_obj)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_low_frequency_post_notification',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# If this is a reply to a muted post then also mute it.
|
||||
# This enables you to ignore a threat that's getting boring
|
||||
|
@ -3939,6 +4054,10 @@ def _inbox_after_initial(server,
|
|||
edited_post_filename(base_dir, handle_name, domain,
|
||||
post_json_object, debug, 300,
|
||||
system_language)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'edited_post_filename',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# If this was an edit then update the edits json file and
|
||||
# delete the previous version of the post
|
||||
|
@ -3979,8 +4098,18 @@ def _inbox_after_initial(server,
|
|||
|
||||
# update the indexes for different timelines
|
||||
for boxname in update_index_list:
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'box_' + boxname,
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
if not inbox_update_index(boxname, base_dir, handle,
|
||||
destination_filename, debug):
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'inbox_update_index',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
print('ERROR: unable to update ' + boxname + ' index')
|
||||
else:
|
||||
if boxname == 'inbox':
|
||||
|
@ -3990,6 +4119,11 @@ def _inbox_after_initial(server,
|
|||
nickname, domain, domain_full,
|
||||
post_json_object, person_cache,
|
||||
translate, None, theme_name)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'update_speaker',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
if not unit_test:
|
||||
if debug:
|
||||
print('Saving inbox post as html to cache')
|
||||
|
@ -3999,6 +4133,11 @@ def _inbox_after_initial(server,
|
|||
show_pub_date_only = show_published_date_only
|
||||
timezone = \
|
||||
get_account_timezone(base_dir, handle_name, domain)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'get_account_timezone',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
_inbox_store_post_to_html_cache(recent_posts_cache,
|
||||
max_recent_posts,
|
||||
translate, base_dir,
|
||||
|
@ -4022,6 +4161,12 @@ def _inbox_after_initial(server,
|
|||
lists_enabled,
|
||||
timezone, mitm,
|
||||
bold_reading)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX',
|
||||
'_inbox_store_post_to_html_cache',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
if debug:
|
||||
time_diff = \
|
||||
str(int((time.time() - html_cache_start_time) *
|
||||
|
@ -4032,15 +4177,35 @@ def _inbox_after_initial(server,
|
|||
|
||||
update_conversation(base_dir, handle_name, domain,
|
||||
post_json_object)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'update_conversation',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# store the id of the last post made by this actor
|
||||
_store_last_post_id(base_dir, nickname, domain, post_json_object)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', '_store_last_post_id',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
_inbox_update_calendar(base_dir, handle, post_json_object)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', '_inbox_update_calendar',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
store_hash_tags(base_dir, handle_name, domain,
|
||||
http_prefix, domain_full,
|
||||
post_json_object, translate)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'store_hash_tags',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# send the post out to group members
|
||||
if is_group:
|
||||
|
@ -4055,15 +4220,29 @@ def _inbox_after_initial(server,
|
|||
debug, system_language,
|
||||
domain, onion_domain, i2p_domain,
|
||||
signing_priv_key_pem)
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', '_send_to_group_members',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
else:
|
||||
if debug:
|
||||
print("Inbox post is not valid " + str(post_json_object))
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'invalid_post',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# if the post wasn't saved
|
||||
if not os.path.isfile(destination_filename):
|
||||
if debug:
|
||||
print("Inbox post was not saved " + destination_filename)
|
||||
return False
|
||||
fitness_performance(inbox_start_time,
|
||||
server.fitness,
|
||||
'INBOX', 'end_after_initial',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
return True
|
||||
|
||||
|
@ -4588,7 +4767,12 @@ def run_inbox_queue(server,
|
|||
"""Processes received items and moves them to the appropriate
|
||||
directories
|
||||
"""
|
||||
inbox_start_time = time.time()
|
||||
print('Starting new session when starting inbox queue')
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'start', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
curr_session_time = int(time.time())
|
||||
session_last_update = 0
|
||||
session = create_session(proxy_type)
|
||||
|
@ -4619,6 +4803,9 @@ def run_inbox_queue(server,
|
|||
# if queue processing was interrupted (eg server crash)
|
||||
# then this loads any outstanding items back into the queue
|
||||
_restore_queue_items(base_dir, queue)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_restore_queue_items', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# keep track of numbers of incoming posts per day
|
||||
quotas_last_update_daily = int(time.time())
|
||||
|
@ -4643,8 +4830,15 @@ def run_inbox_queue(server,
|
|||
# how long it takes for broch mode to lapse
|
||||
broch_lapse_days = random.randrange(7, 14)
|
||||
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'while_loop_start', debug)
|
||||
inbox_start_time = time.time()
|
||||
while True:
|
||||
time.sleep(1)
|
||||
inbox_start_time = time.time()
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'while_loop_itteration', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# heartbeat to monitor whether the inbox queue is running
|
||||
heart_beat_ctr += 1
|
||||
|
@ -4652,6 +4846,9 @@ def run_inbox_queue(server,
|
|||
# turn off broch mode after it has timed out
|
||||
if broch_modeLapses(base_dir, broch_lapse_days):
|
||||
broch_lapse_days = random.randrange(7, 14)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'broch_modeLapses', debug)
|
||||
inbox_start_time = time.time()
|
||||
print('>>> Heartbeat Q:' + str(len(queue)) + ' ' +
|
||||
'{:%F %T}'.format(datetime.datetime.now()))
|
||||
heart_beat_ctr = 0
|
||||
|
@ -4662,6 +4859,9 @@ def run_inbox_queue(server,
|
|||
if queue_restore_ctr >= 30:
|
||||
queue_restore_ctr = 0
|
||||
_restore_queue_items(base_dir, queue)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'restore_queue', debug)
|
||||
inbox_start_time = time.time()
|
||||
continue
|
||||
|
||||
# oldest item first
|
||||
|
@ -4679,6 +4879,9 @@ def run_inbox_queue(server,
|
|||
|
||||
# Load the queue json
|
||||
queue_json = load_json(queue_filename, 1)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'load_queue_json', debug)
|
||||
inbox_start_time = time.time()
|
||||
if not queue_json:
|
||||
print('Queue: run_inbox_queue failed to load inbox queue item ' +
|
||||
queue_filename)
|
||||
|
@ -4722,6 +4925,9 @@ def run_inbox_queue(server,
|
|||
domain_max_posts_per_day,
|
||||
account_max_posts_per_day, debug):
|
||||
continue
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_inbox_quota_exceeded', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# recreate the session periodically
|
||||
if not session or curr_time - session_last_update > 21600:
|
||||
|
@ -4751,6 +4957,9 @@ def run_inbox_queue(server,
|
|||
else:
|
||||
print('WARN: inbox i2p session not created')
|
||||
continue
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'recreate_session', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
curr_session = session
|
||||
curr_proxy_type = proxy_type
|
||||
|
@ -4769,6 +4978,9 @@ def run_inbox_queue(server,
|
|||
if debug and queue_json.get('actor'):
|
||||
print('Obtaining public key for actor ' + queue_json['actor'])
|
||||
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'start_get_pubkey', debug)
|
||||
inbox_start_time = time.time()
|
||||
# Try a few times to obtain the public key
|
||||
pub_key = None
|
||||
key_id = None
|
||||
|
@ -4793,6 +5005,9 @@ def run_inbox_queue(server,
|
|||
project_version, http_prefix,
|
||||
domain, onion_domain, i2p_domain,
|
||||
signing_priv_key_pem)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'get_person_pub_key', debug)
|
||||
inbox_start_time = time.time()
|
||||
if pub_key:
|
||||
if debug:
|
||||
print('DEBUG: public key: ' + str(pub_key))
|
||||
|
@ -4817,6 +5032,9 @@ def run_inbox_queue(server,
|
|||
continue
|
||||
|
||||
# check the http header signature
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'begin_check_signature', debug)
|
||||
inbox_start_time = time.time()
|
||||
if debug:
|
||||
print('DEBUG: checking http header signature')
|
||||
pprint(queue_json['httpHeaders'])
|
||||
|
@ -4833,10 +5051,16 @@ def run_inbox_queue(server,
|
|||
else:
|
||||
if debug:
|
||||
print('DEBUG: http header signature check success')
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'verify_post_headers', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# check if a json signature exists on this post
|
||||
has_json_signature, jwebsig_type = \
|
||||
_check_json_signature(base_dir, queue_json)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_check_json_signature', debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# strict enforcement of json signatures
|
||||
if not has_json_signature:
|
||||
|
@ -4887,6 +5111,10 @@ def run_inbox_queue(server,
|
|||
str(queue_filename))
|
||||
if len(queue) > 0:
|
||||
queue.pop(0)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'not_verify_signature',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
continue
|
||||
else:
|
||||
if http_signature_failed:
|
||||
|
@ -4894,6 +5122,10 @@ def run_inbox_queue(server,
|
|||
'via relay ' + key_id)
|
||||
else:
|
||||
print('jsonld inbox signature check success ' + key_id)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'verify_signature_success',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# set the id to the same as the post filename
|
||||
# This makes the filename and the id consistent
|
||||
|
@ -4917,6 +5149,10 @@ def run_inbox_queue(server,
|
|||
str(queue_filename))
|
||||
if len(queue) > 0:
|
||||
queue.pop(0)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_undo',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
continue
|
||||
|
||||
if debug:
|
||||
|
@ -4942,6 +5178,10 @@ def run_inbox_queue(server,
|
|||
queue.pop(0)
|
||||
print('Queue: Follow activity for ' + key_id +
|
||||
' removed from queue')
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_follow_request',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
continue
|
||||
else:
|
||||
if debug:
|
||||
|
@ -4963,6 +5203,10 @@ def run_inbox_queue(server,
|
|||
str(queue_filename))
|
||||
if len(queue) > 0:
|
||||
queue.pop(0)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'receive_accept_reject',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
continue
|
||||
|
||||
if _receive_update_activity(recent_posts_cache, curr_session,
|
||||
|
@ -4997,6 +5241,10 @@ def run_inbox_queue(server,
|
|||
str(queue_filename))
|
||||
if len(queue) > 0:
|
||||
queue.pop(0)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_receive_update_activity',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
continue
|
||||
|
||||
# get recipients list
|
||||
|
@ -5018,6 +5266,10 @@ def run_inbox_queue(server,
|
|||
if len(queue) > 0:
|
||||
queue.pop(0)
|
||||
continue
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', '_post_recipients',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# if there are only a small number of followers then
|
||||
# process them as if they were specifically
|
||||
|
@ -5050,10 +5302,19 @@ def run_inbox_queue(server,
|
|||
queue_json['destination'].replace(inbox_handle, inbox_handle)
|
||||
if not os.path.isfile(shared_inbox_post_filename):
|
||||
save_json(queue_json['post'], shared_inbox_post_filename)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'shared_inbox_save',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
lists_enabled = get_config_param(base_dir, "listsEnabled")
|
||||
content_license_url = get_config_param(base_dir, "contentLicenseUrl")
|
||||
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'distribute_post',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
|
||||
# for posts addressed to specific accounts
|
||||
for handle, _ in recipients_dict.items():
|
||||
destination = \
|
||||
|
@ -5067,7 +5328,7 @@ def run_inbox_queue(server,
|
|||
base_dir + '/accounts/' + handle + '/.boldReading'
|
||||
if os.path.isfile(bold_reading_filename):
|
||||
bold_reading = True
|
||||
_inbox_after_initial(server,
|
||||
_inbox_after_initial(server, inbox_start_time,
|
||||
recent_posts_cache,
|
||||
max_recent_posts,
|
||||
session, session_onion, session_i2p,
|
||||
|
@ -5100,6 +5361,10 @@ def run_inbox_queue(server,
|
|||
content_license_url,
|
||||
languages_understood, mitm,
|
||||
bold_reading)
|
||||
fitness_performance(inbox_start_time, server.fitness,
|
||||
'INBOX', 'handle_after_initial',
|
||||
debug)
|
||||
inbox_start_time = time.time()
|
||||
if debug:
|
||||
pprint(queue_json['post'])
|
||||
print('Queue: Queue post accepted')
|
||||
|
|
|
@ -818,7 +818,8 @@ def run_newswire_daemon(base_dir: str, httpd,
|
|||
httpd.maxCategoriesFeedItemSizeKb,
|
||||
httpd.system_language,
|
||||
httpd.debug,
|
||||
httpd.preferred_podcast_formats)
|
||||
httpd.preferred_podcast_formats,
|
||||
httpd.rss_timeout_sec)
|
||||
|
||||
if not httpd.newswire:
|
||||
print('Newswire feeds not updated')
|
||||
|
@ -867,7 +868,7 @@ def run_newswire_daemon(base_dir: str, httpd,
|
|||
httpd.max_news_posts)
|
||||
|
||||
# wait a while before the next feeds update
|
||||
for _ in range(120):
|
||||
for _ in range(360):
|
||||
time.sleep(10)
|
||||
# if a new blog post has been created then stop
|
||||
# waiting and recalculate the newswire
|
||||
|
|
14
newswire.py
14
newswire.py
|
@ -1278,7 +1278,8 @@ def get_rss(base_dir: str, domain: str, session, url: str,
|
|||
max_posts_per_source: int, max_feed_size_kb: int,
|
||||
max_feed_item_size_kb: int,
|
||||
max_categories_feedItem_size_kb: int, debug: bool,
|
||||
preferred_podcast_formats: []) -> {}:
|
||||
preferred_podcast_formats: [],
|
||||
timeout_sec: int) -> {}:
|
||||
"""Returns an RSS url as a dict
|
||||
"""
|
||||
if not isinstance(url, str):
|
||||
|
@ -1302,7 +1303,10 @@ def get_rss(base_dir: str, domain: str, session, url: str,
|
|||
url = _yt_channel_to_atom_feed(url)
|
||||
try:
|
||||
result = \
|
||||
session.get(url, headers=session_headers, params=session_params)
|
||||
session.get(url, headers=session_headers,
|
||||
params=session_params,
|
||||
timeout=timeout_sec,
|
||||
allow_redirects=False)
|
||||
if result:
|
||||
if int(len(result.text) / 1024) < max_feed_size_kb and \
|
||||
not contains_invalid_chars(result.text):
|
||||
|
@ -1558,7 +1562,8 @@ def get_dict_from_newswire(session, base_dir: str, domain: str,
|
|||
max_newswire_posts: int,
|
||||
max_categories_feedItem_size_kb: int,
|
||||
system_language: str, debug: bool,
|
||||
preferred_podcast_formats: []) -> {}:
|
||||
preferred_podcast_formats: [],
|
||||
timeout_sec: int) -> {}:
|
||||
"""Gets rss feeds as a dictionary from newswire file
|
||||
"""
|
||||
subscriptions_filename = base_dir + '/accounts/newswire.txt'
|
||||
|
@ -1600,7 +1605,8 @@ def get_dict_from_newswire(session, base_dir: str, domain: str,
|
|||
max_posts_per_source, max_feed_size_kb,
|
||||
max_feed_item_size_kb,
|
||||
max_categories_feedItem_size_kb, debug,
|
||||
preferred_podcast_formats)
|
||||
preferred_podcast_formats,
|
||||
timeout_sec)
|
||||
if items_list:
|
||||
for date_str, item in items_list.items():
|
||||
result[date_str] = item
|
||||
|
|
16
session.py
16
session.py
|
@ -71,7 +71,8 @@ def url_exists(session, url: str, timeout_sec: int = 3,
|
|||
try:
|
||||
result = session.get(url, headers=session_headers,
|
||||
params=session_params,
|
||||
timeout=timeout_sec)
|
||||
timeout=timeout_sec,
|
||||
allow_redirects=False)
|
||||
if result:
|
||||
if result.status_code == 200 or \
|
||||
result.status_code == 304:
|
||||
|
@ -91,7 +92,8 @@ def _get_json_request(session, url: str, domain_full: str, session_headers: {},
|
|||
"""
|
||||
try:
|
||||
result = session.get(url, headers=session_headers,
|
||||
params=session_params, timeout=timeout_sec)
|
||||
params=session_params, timeout=timeout_sec,
|
||||
allow_redirects=False)
|
||||
if result.status_code != 200:
|
||||
if result.status_code == 401:
|
||||
print("WARN: get_json " + url + ' rejected by secure mode')
|
||||
|
@ -284,7 +286,8 @@ def get_vcard(xml_format: bool,
|
|||
|
||||
try:
|
||||
result = session.get(url, headers=session_headers,
|
||||
params=session_params, timeout=timeout_sec)
|
||||
params=session_params, timeout=timeout_sec,
|
||||
allow_redirects=False)
|
||||
if result.status_code != 200:
|
||||
if result.status_code == 401:
|
||||
print("WARN: get_vcard " + url + ' rejected by secure mode')
|
||||
|
@ -592,7 +595,8 @@ def download_image(session, base_dir: str, url: str,
|
|||
print('Downloading image url: ' + url)
|
||||
result = session.get(url,
|
||||
headers=session_headers,
|
||||
params=None)
|
||||
params=None,
|
||||
allow_redirects=False)
|
||||
if result.status_code < 200 or \
|
||||
result.status_code > 202:
|
||||
if debug:
|
||||
|
@ -635,7 +639,9 @@ def download_image_any_mime_type(session, url: str,
|
|||
'Accept': 'image/x-icon, image/png, image/webp, image/jpeg, image/gif'
|
||||
}
|
||||
try:
|
||||
result = session.get(url, headers=session_headers, timeout=timeout_sec)
|
||||
result = session.get(url, headers=session_headers,
|
||||
timeout=timeout_sec,
|
||||
allow_redirects=False)
|
||||
except requests.exceptions.RequestException as ex:
|
||||
print('EX: download_image_any_mime_type failed1: ' +
|
||||
str(url) + ', ' + str(ex))
|
||||
|
|
|
@ -1551,6 +1551,8 @@ def _html_system_monitor(nickname: str, translate: {}) -> str:
|
|||
system_monitor_str = begin_edit_section(translate['System Monitor'])
|
||||
system_monitor_str += '<p><a href="/users/' + nickname + \
|
||||
'/performance?graph=get">📊 GET</a></p>'
|
||||
system_monitor_str += '<p><a href="/users/' + nickname + \
|
||||
'/performance?graph=inbox">📊 INBOX</a></p>'
|
||||
system_monitor_str += '<p><a href="/users/' + nickname + \
|
||||
'/performance?graph=post">📊 POST</a></p>'
|
||||
system_monitor_str += end_edit_section()
|
||||
|
|
|
@ -277,7 +277,8 @@ def update_avatar_image_cache(signing_priv_key_pem: str,
|
|||
print('avatar image url: ' + avatar_url)
|
||||
result = session.get(avatar_url,
|
||||
headers=session_headers,
|
||||
params=None)
|
||||
params=None,
|
||||
allow_redirects=False)
|
||||
if result.status_code < 200 or \
|
||||
result.status_code > 202:
|
||||
if debug:
|
||||
|
|
Loading…
Reference in New Issue