Debug for thread events

main
Bob Mottram 2022-03-13 11:01:07 +00:00
parent 6a59a9b66c
commit d611889d76
8 changed files with 34 additions and 18 deletions

View File

@ -1605,6 +1605,7 @@ class PubServer(BaseHTTPRequestHandler):
print('Creating outbox thread ' + print('Creating outbox thread ' +
account_outbox_thread_name + '/' + account_outbox_thread_name + '/' +
str(self.server.outbox_thread_index[account_outbox_thread_name])) str(self.server.outbox_thread_index[account_outbox_thread_name]))
print('THREAD: _post_to_outbox')
self.server.outboxThread[account_outbox_thread_name][index] = \ self.server.outboxThread[account_outbox_thread_name][index] = \
thread_with_trace(target=self._post_to_outbox, thread_with_trace(target=self._post_to_outbox,
args=(message_json.copy(), args=(message_json.copy(),
@ -20184,7 +20185,7 @@ def run_shares_expire(version_number: str, base_dir: str) -> None:
def run_posts_watchdog(project_version: str, httpd) -> None: def run_posts_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the posts thread running even if it dies """This tries to keep the posts thread running even if it dies
""" """
print('Starting posts queue watchdog') print('THREAD: Starting posts queue watchdog')
posts_queue_original = httpd.thrPostsQueue.clone(run_posts_queue) posts_queue_original = httpd.thrPostsQueue.clone(run_posts_queue)
httpd.thrPostsQueue.start() httpd.thrPostsQueue.start()
while True: while True:
@ -20192,6 +20193,7 @@ def run_posts_watchdog(project_version: str, httpd) -> None:
if httpd.thrPostsQueue.is_alive(): if httpd.thrPostsQueue.is_alive():
continue continue
httpd.thrPostsQueue.kill() httpd.thrPostsQueue.kill()
print('THREAD: restarting posts queue')
httpd.thrPostsQueue = posts_queue_original.clone(run_posts_queue) httpd.thrPostsQueue = posts_queue_original.clone(run_posts_queue)
httpd.thrPostsQueue.start() httpd.thrPostsQueue.start()
print('Restarting posts queue...') print('Restarting posts queue...')
@ -20200,7 +20202,7 @@ def run_posts_watchdog(project_version: str, httpd) -> None:
def run_shares_expire_watchdog(project_version: str, httpd) -> None: def run_shares_expire_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the shares expiry thread running even if it dies """This tries to keep the shares expiry thread running even if it dies
""" """
print('Starting shares expiry watchdog') print('THREAD: Starting shares expiry watchdog')
shares_expire_original = httpd.thrSharesExpire.clone(run_shares_expire) shares_expire_original = httpd.thrSharesExpire.clone(run_shares_expire)
httpd.thrSharesExpire.start() httpd.thrSharesExpire.start()
while True: while True:
@ -20208,6 +20210,7 @@ def run_shares_expire_watchdog(project_version: str, httpd) -> None:
if httpd.thrSharesExpire.is_alive(): if httpd.thrSharesExpire.is_alive():
continue continue
httpd.thrSharesExpire.kill() httpd.thrSharesExpire.kill()
print('THREAD: restarting shares watchdog')
httpd.thrSharesExpire = shares_expire_original.clone(run_shares_expire) httpd.thrSharesExpire = shares_expire_original.clone(run_shares_expire)
httpd.thrSharesExpire.start() httpd.thrSharesExpire.start()
print('Restarting shares expiry...') print('Restarting shares expiry...')
@ -20688,13 +20691,13 @@ def run_daemon(crawlers_allowed: [],
print('Creating shared item files directory') print('Creating shared item files directory')
os.mkdir(base_dir + '/sharefiles') os.mkdir(base_dir + '/sharefiles')
print('Creating fitness thread') print('THREAD: Creating fitness thread')
httpd.thrFitness = \ httpd.thrFitness = \
thread_with_trace(target=fitness_thread, thread_with_trace(target=fitness_thread,
args=(base_dir, httpd.fitness), daemon=True) args=(base_dir, httpd.fitness), daemon=True)
httpd.thrFitness.start() httpd.thrFitness.start()
print('Creating cache expiry thread') print('THREAD: Creating cache expiry thread')
httpd.thrCache = \ httpd.thrCache = \
thread_with_trace(target=expire_cache, thread_with_trace(target=expire_cache,
args=(base_dir, httpd.person_cache, args=(base_dir, httpd.person_cache,
@ -20706,12 +20709,13 @@ def run_daemon(crawlers_allowed: [],
# number of mins after which sending posts or updates will expire # number of mins after which sending posts or updates will expire
httpd.send_threads_timeout_mins = send_threads_timeout_mins httpd.send_threads_timeout_mins = send_threads_timeout_mins
print('Creating posts queue') print('THREAD: Creating posts queue')
httpd.thrPostsQueue = \ httpd.thrPostsQueue = \
thread_with_trace(target=run_posts_queue, thread_with_trace(target=run_posts_queue,
args=(base_dir, httpd.send_threads, debug, args=(base_dir, httpd.send_threads, debug,
httpd.send_threads_timeout_mins), daemon=True) httpd.send_threads_timeout_mins), daemon=True)
if not unit_test: if not unit_test:
print('THREAD: run_posts_watchdog')
httpd.thrPostsWatchdog = \ httpd.thrPostsWatchdog = \
thread_with_trace(target=run_posts_watchdog, thread_with_trace(target=run_posts_watchdog,
args=(project_version, httpd), daemon=True) args=(project_version, httpd), daemon=True)
@ -20719,11 +20723,12 @@ def run_daemon(crawlers_allowed: [],
else: else:
httpd.thrPostsQueue.start() httpd.thrPostsQueue.start()
print('Creating expire thread for shared items') print('THREAD: Creating expire thread for shared items')
httpd.thrSharesExpire = \ httpd.thrSharesExpire = \
thread_with_trace(target=run_shares_expire, thread_with_trace(target=run_shares_expire,
args=(project_version, base_dir), daemon=True) args=(project_version, base_dir), daemon=True)
if not unit_test: if not unit_test:
print('THREAD: run_shares_expire_watchdog')
httpd.thrSharesExpireWatchdog = \ httpd.thrSharesExpireWatchdog = \
thread_with_trace(target=run_shares_expire_watchdog, thread_with_trace(target=run_shares_expire_watchdog,
args=(project_version, httpd), daemon=True) args=(project_version, httpd), daemon=True)
@ -20752,7 +20757,7 @@ def run_daemon(crawlers_allowed: [],
create_initial_last_seen(base_dir, http_prefix) create_initial_last_seen(base_dir, http_prefix)
print('Creating inbox queue') print('THREAD: Creating inbox queue')
httpd.thrInboxQueue = \ httpd.thrInboxQueue = \
thread_with_trace(target=run_inbox_queue, thread_with_trace(target=run_inbox_queue,
args=(httpd.recent_posts_cache, args=(httpd.recent_posts_cache,
@ -20784,19 +20789,19 @@ def run_daemon(crawlers_allowed: [],
httpd.default_reply_interval_hrs, httpd.default_reply_interval_hrs,
httpd.cw_lists), daemon=True) httpd.cw_lists), daemon=True)
print('Creating scheduled post thread') print('THREAD: Creating scheduled post thread')
httpd.thrPostSchedule = \ httpd.thrPostSchedule = \
thread_with_trace(target=run_post_schedule, thread_with_trace(target=run_post_schedule,
args=(base_dir, httpd, 20), daemon=True) args=(base_dir, httpd, 20), daemon=True)
print('Creating newswire thread') print('THREAD: Creating newswire thread')
httpd.thrNewswireDaemon = \ httpd.thrNewswireDaemon = \
thread_with_trace(target=run_newswire_daemon, thread_with_trace(target=run_newswire_daemon,
args=(base_dir, httpd, args=(base_dir, httpd,
http_prefix, domain, port, http_prefix, domain, port,
httpd.translate), daemon=True) httpd.translate), daemon=True)
print('Creating federated shares thread') print('THREAD: Creating federated shares thread')
httpd.thrFederatedSharesDaemon = \ httpd.thrFederatedSharesDaemon = \
thread_with_trace(target=run_federated_shares_daemon, thread_with_trace(target=run_federated_shares_daemon,
args=(base_dir, httpd, args=(base_dir, httpd,
@ -20818,25 +20823,25 @@ def run_daemon(crawlers_allowed: [],
httpd.signing_priv_key_pem = get_instance_actor_key(base_dir, domain) httpd.signing_priv_key_pem = get_instance_actor_key(base_dir, domain)
if not unit_test: if not unit_test:
print('Creating inbox queue watchdog') print('THREAD: Creating inbox queue watchdog')
httpd.thrWatchdog = \ httpd.thrWatchdog = \
thread_with_trace(target=run_inbox_queue_watchdog, thread_with_trace(target=run_inbox_queue_watchdog,
args=(project_version, httpd), daemon=True) args=(project_version, httpd), daemon=True)
httpd.thrWatchdog.start() httpd.thrWatchdog.start()
print('Creating scheduled post watchdog') print('THREAD: Creating scheduled post watchdog')
httpd.thrWatchdogSchedule = \ httpd.thrWatchdogSchedule = \
thread_with_trace(target=run_post_schedule_watchdog, thread_with_trace(target=run_post_schedule_watchdog,
args=(project_version, httpd), daemon=True) args=(project_version, httpd), daemon=True)
httpd.thrWatchdogSchedule.start() httpd.thrWatchdogSchedule.start()
print('Creating newswire watchdog') print('THREAD: Creating newswire watchdog')
httpd.thrNewswireWatchdog = \ httpd.thrNewswireWatchdog = \
thread_with_trace(target=run_newswire_watchdog, thread_with_trace(target=run_newswire_watchdog,
args=(project_version, httpd), daemon=True) args=(project_version, httpd), daemon=True)
httpd.thrNewswireWatchdog.start() httpd.thrNewswireWatchdog.start()
print('Creating federated shares watchdog') print('THREAD: Creating federated shares watchdog')
httpd.thrFederatedSharesWatchdog = \ httpd.thrFederatedSharesWatchdog = \
thread_with_trace(target=run_federated_shares_watchdog, thread_with_trace(target=run_federated_shares_watchdog,
args=(project_version, httpd), daemon=True) args=(project_version, httpd), daemon=True)

View File

@ -3747,7 +3747,7 @@ def _restore_queue_items(base_dir: str, queue: []) -> None:
def run_inbox_queue_watchdog(project_version: str, httpd) -> None: def run_inbox_queue_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the inbox thread running even if it dies """This tries to keep the inbox thread running even if it dies
""" """
print('Starting inbox queue watchdog') print('THREAD: Starting inbox queue watchdog')
inbox_queue_original = httpd.thrInboxQueue.clone(run_inbox_queue) inbox_queue_original = httpd.thrInboxQueue.clone(run_inbox_queue)
httpd.thrInboxQueue.start() httpd.thrInboxQueue.start()
while True: while True:
@ -3755,6 +3755,7 @@ def run_inbox_queue_watchdog(project_version: str, httpd) -> None:
if not httpd.thrInboxQueue.is_alive() or httpd.restart_inbox_queue: if not httpd.thrInboxQueue.is_alive() or httpd.restart_inbox_queue:
httpd.restart_inbox_queue_in_progress = True httpd.restart_inbox_queue_in_progress = True
httpd.thrInboxQueue.kill() httpd.thrInboxQueue.kill()
print('THREAD: restarting inbox queue watchdog')
httpd.thrInboxQueue = inbox_queue_original.clone(run_inbox_queue) httpd.thrInboxQueue = inbox_queue_original.clone(run_inbox_queue)
httpd.inbox_queue.clear() httpd.inbox_queue.clear()
httpd.thrInboxQueue.start() httpd.thrInboxQueue.start()

View File

@ -88,6 +88,7 @@ def manual_deny_follow_request_thread(session, session_onion, session_i2p,
"""Manually deny a follow request, within a thread so that the """Manually deny a follow request, within a thread so that the
user interface doesn't lag user interface doesn't lag
""" """
print('THREAD: manual_deny_follow_request')
thr = \ thr = \
thread_with_trace(target=manual_deny_follow_request, thread_with_trace(target=manual_deny_follow_request,
args=(session, session_onion, session_i2p, args=(session, session_onion, session_i2p,
@ -321,6 +322,7 @@ def manual_approve_follow_request_thread(session, session_onion, session_i2p,
"""Manually approve a follow request, in a thread so as not to cause """Manually approve a follow request, in a thread so as not to cause
the UI to lag the UI to lag
""" """
print('THREAD: manual_approve_follow_request')
thr = \ thr = \
thread_with_trace(target=manual_approve_follow_request, thread_with_trace(target=manual_approve_follow_request,
args=(session, session_onion, session_i2p, args=(session, session_onion, session_i2p,

View File

@ -882,7 +882,7 @@ def run_newswire_daemon(base_dir: str, httpd,
def run_newswire_watchdog(project_version: str, httpd) -> None: def run_newswire_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the newswire update thread running even if it dies """This tries to keep the newswire update thread running even if it dies
""" """
print('Starting newswire watchdog') print('THREAD: Starting newswire watchdog')
newswire_original = \ newswire_original = \
httpd.thrPostSchedule.clone(run_newswire_daemon) httpd.thrPostSchedule.clone(run_newswire_daemon)
httpd.thrNewswireDaemon.start() httpd.thrNewswireDaemon.start()
@ -891,6 +891,7 @@ def run_newswire_watchdog(project_version: str, httpd) -> None:
if httpd.thrNewswireDaemon.is_alive(): if httpd.thrNewswireDaemon.is_alive():
continue continue
httpd.thrNewswireDaemon.kill() httpd.thrNewswireDaemon.kill()
print('THREAD: restarting newswire watchdog')
httpd.thrNewswireDaemon = \ httpd.thrNewswireDaemon = \
newswire_original.clone(run_newswire_daemon) newswire_original.clone(run_newswire_daemon)
httpd.thrNewswireDaemon.start() httpd.thrNewswireDaemon.start()

View File

@ -2494,6 +2494,7 @@ def send_post(signing_priv_key_pem: str, project_version: str,
send_threads[0].kill() send_threads[0].kill()
send_threads.pop(0) send_threads.pop(0)
print('WARN: thread killed') print('WARN: thread killed')
print('THREAD: thread_send_post')
thr = \ thr = \
thread_with_trace(target=thread_send_post, thread_with_trace(target=thread_send_post,
args=(session, args=(session,
@ -2881,6 +2882,7 @@ def send_signed_json(post_json_object: {}, session, base_dir: str,
print('DEBUG: starting thread to send post') print('DEBUG: starting thread to send post')
pprint(post_json_object) pprint(post_json_object)
domain_full = get_full_domain(domain, port) domain_full = get_full_domain(domain, port)
print('THREAD: thread_send_post 2')
thr = \ thr = \
thread_with_trace(target=thread_send_post, thread_with_trace(target=thread_send_post,
args=(session, args=(session,
@ -3154,6 +3156,7 @@ def send_to_named_addresses_thread(session, session_onion, session_i2p,
signing_priv_key_pem: str): signing_priv_key_pem: str):
"""Returns a thread used to send a post to named addresses """Returns a thread used to send a post to named addresses
""" """
print('THREAD: _send_to_named_addresses')
send_thread = \ send_thread = \
thread_with_trace(target=_send_to_named_addresses, thread_with_trace(target=_send_to_named_addresses,
args=(session, session_onion, session_i2p, args=(session, session_onion, session_i2p,
@ -3412,6 +3415,7 @@ def send_to_followers_thread(session, session_onion, session_i2p,
signing_priv_key_pem: str): signing_priv_key_pem: str):
"""Returns a thread used to send a post to followers """Returns a thread used to send a post to followers
""" """
print('THREAD: send_to_followers')
send_thread = \ send_thread = \
thread_with_trace(target=send_to_followers, thread_with_trace(target=send_to_followers,
args=(session, session_onion, session_i2p, args=(session, session_onion, session_i2p,

View File

@ -193,7 +193,7 @@ def run_post_schedule(base_dir: str, httpd, max_scheduled_posts: int):
def run_post_schedule_watchdog(project_version: str, httpd) -> None: def run_post_schedule_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the scheduled post thread running even if it dies """This tries to keep the scheduled post thread running even if it dies
""" """
print('Starting scheduled post watchdog') print('THREAD: Starting scheduled post watchdog')
post_schedule_original = \ post_schedule_original = \
httpd.thrPostSchedule.clone(run_post_schedule) httpd.thrPostSchedule.clone(run_post_schedule)
httpd.thrPostSchedule.start() httpd.thrPostSchedule.start()
@ -202,6 +202,7 @@ def run_post_schedule_watchdog(project_version: str, httpd) -> None:
if httpd.thrPostSchedule.is_alive(): if httpd.thrPostSchedule.is_alive():
continue continue
httpd.thrPostSchedule.kill() httpd.thrPostSchedule.kill()
print('THREAD: restarting scheduled post watchdog')
httpd.thrPostSchedule = \ httpd.thrPostSchedule = \
post_schedule_original.clone(run_post_schedule) post_schedule_original.clone(run_post_schedule)
httpd.thrPostSchedule.start() httpd.thrPostSchedule.start()

View File

@ -1610,7 +1610,7 @@ def run_federated_shares_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the federated shares update thread """This tries to keep the federated shares update thread
running even if it dies running even if it dies
""" """
print('Starting federated shares watchdog') print('THREAD: Starting federated shares watchdog')
federated_shares_original = \ federated_shares_original = \
httpd.thrPostSchedule.clone(run_federated_shares_daemon) httpd.thrPostSchedule.clone(run_federated_shares_daemon)
httpd.thrFederatedSharesDaemon.start() httpd.thrFederatedSharesDaemon.start()
@ -1619,6 +1619,7 @@ def run_federated_shares_watchdog(project_version: str, httpd) -> None:
if httpd.thrFederatedSharesDaemon.is_alive(): if httpd.thrFederatedSharesDaemon.is_alive():
continue continue
httpd.thrFederatedSharesDaemon.kill() httpd.thrFederatedSharesDaemon.kill()
print('THREAD: restarting federated shares watchdog')
httpd.thrFederatedSharesDaemon = \ httpd.thrFederatedSharesDaemon = \
federated_shares_original.clone(run_federated_shares_daemon) federated_shares_original.clone(run_federated_shares_daemon)
httpd.thrFederatedSharesDaemon.start() httpd.thrFederatedSharesDaemon.start()

View File

@ -75,6 +75,7 @@ class thread_with_trace(threading.Thread):
def clone(self, func): def clone(self, func):
"""Create a clone """Create a clone
""" """
print('THREAD: clone')
return thread_with_trace(target=func, return thread_with_trace(target=func,
args=self._args, args=self._args,
daemon=True) daemon=True)