epicyon/daemon.py

1402 lines
50 KiB
Python
Raw Normal View History

2020-04-02 21:35:06 +00:00
__filename__ = "daemon.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
2024-01-21 19:01:20 +00:00
__version__ = "1.5.0"
2020-04-02 21:35:06 +00:00
__maintainer__ = "Bob Mottram"
2021-09-10 16:14:50 +00:00
__email__ = "bob@libreserver.org"
2020-04-02 21:35:06 +00:00
__status__ = "Production"
2021-06-25 16:10:09 +00:00
__module_group__ = "Core"
2020-04-02 21:35:06 +00:00
2020-08-24 19:36:22 +00:00
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer, HTTPServer
2020-06-12 12:03:04 +00:00
import sys
2019-07-01 14:30:48 +00:00
import time
2023-01-05 15:56:49 +00:00
import os
2020-06-23 14:25:03 +00:00
from socket import error as SocketError
import errno
2020-01-12 22:23:01 +00:00
from functools import partial
2019-07-27 20:30:58 +00:00
# for saving images
2021-12-28 17:20:43 +00:00
from metadata import metadata_custom_emoji
from person import update_memorial_flags
2022-03-22 11:42:24 +00:00
from person import clear_person_qrcodes
2021-12-28 18:13:52 +00:00
from person import create_shared_inbox
from person import create_news_inbox
2022-12-28 10:18:13 +00:00
from keys import get_instance_actor_key
2021-12-28 19:33:29 +00:00
from posts import expire_cache
2021-12-28 20:32:11 +00:00
from inbox import run_inbox_queue
from inbox import run_inbox_queue_watchdog
from follow import create_initial_last_seen
2022-07-28 09:59:18 +00:00
from threads import begin_thread
2021-12-28 21:36:27 +00:00
from threads import thread_with_trace
from threads import remove_dormant_threads
2023-03-20 14:50:19 +00:00
from cwlists import load_cw_lists
from blocking import run_federated_blocks_daemon
2024-02-09 18:39:44 +00:00
from blocking import load_federated_blocks_endpoints
2023-07-18 14:05:54 +00:00
from blocking import load_blocked_military
2021-12-28 21:55:38 +00:00
from blocking import update_blocked_cache
from blocking import set_broch_mode
from blocking import get_domain_blocklist
2023-01-13 15:16:08 +00:00
from webapp_utils import load_buy_sites
2021-12-29 21:55:09 +00:00
from webapp_accesskeys import load_access_keys_for_accounts
from webapp_media import load_peertube_instances
from shares import run_federated_shares_daemon
from shares import run_federated_shares_watchdog
from shares import create_shared_item_federation_token
from shares import generate_shared_item_federation_tokens
from shares import expire_shares
2023-09-02 14:42:59 +00:00
from categories import load_city_hashtags
2021-12-29 21:55:09 +00:00
from categories import update_hashtag_categories
2022-12-08 15:28:17 +00:00
from languages import load_default_post_languages
from utils import set_accounts_data_dir
2024-05-12 12:35:26 +00:00
from utils import data_dir
2024-03-01 17:10:04 +00:00
from utils import check_bad_path
2022-12-18 15:29:54 +00:00
from utils import acct_handle_dir
2022-11-26 15:39:36 +00:00
from utils import load_reverse_timeline
from utils import load_min_images_for_accounts
2022-02-25 19:12:40 +00:00
from utils import load_account_timezones
2021-12-26 19:12:02 +00:00
from utils import load_translations_from_file
2022-03-24 13:14:41 +00:00
from utils import load_bold_reading
from utils import load_hide_follows
2021-12-26 12:45:03 +00:00
from utils import get_full_domain
2021-12-27 20:38:02 +00:00
from utils import set_config_param
2021-12-26 14:08:58 +00:00
from utils import get_config_param
2021-12-26 15:13:34 +00:00
from utils import load_json
from content import load_auto_cw_cache
2022-07-05 12:30:21 +00:00
from content import load_dogwhistles
2021-12-29 21:55:09 +00:00
from theme import scan_themes_for_scripts
from theme import is_news_theme_name
from theme import get_text_mode_banner
from theme import set_news_avatar
from schedule import run_post_schedule
from schedule import run_post_schedule_watchdog
2022-02-23 09:51:00 +00:00
from happening import dav_propfind_response
from happening import dav_put_response
from happening import dav_report_response
from happening import dav_delete_response
2021-12-29 21:55:09 +00:00
from newswire import load_hashtag_categories
from newsdaemon import run_newswire_watchdog
from newsdaemon import run_newswire_daemon
from fitnessFunctions import fitness_thread
2023-09-15 21:04:31 +00:00
from siteactive import load_unavailable_sites
from crawlers import load_known_web_bots
2022-04-04 09:29:54 +00:00
from qrcode import save_domain_qrcode
from importFollowing import run_import_following_watchdog
2022-11-28 13:33:11 +00:00
from relationships import update_moved_actors
from daemon_get import daemon_http_get
2024-03-01 17:10:04 +00:00
from daemon_post import daemon_http_post
from daemon_head import daemon_http_head
2024-03-01 17:10:04 +00:00
from httpcodes import http_200
from httpcodes import http_201
from httpcodes import http_207
from httpcodes import http_403
from httpcodes import http_404
from httpcodes import http_304
from httpcodes import http_400
from httpcodes import write2
from httpheaders import set_headers
from daemon_utils import has_accept
2024-03-01 17:10:04 +00:00
from daemon_utils import is_authorized
from poison import load_dictionary
from poison import load_2grams
2024-03-01 17:10:04 +00:00
2019-06-28 18:55:29 +00:00
class PubServer(BaseHTTPRequestHandler):
2020-04-02 21:35:06 +00:00
protocol_version = 'HTTP/1.1'
2019-11-25 11:19:03 +00:00
2020-06-08 16:46:01 +00:00
def handle_error(self, request, client_address):
2024-01-29 10:49:46 +00:00
"""HTTP server error handling
"""
2020-06-08 16:46:01 +00:00
print('ERROR: http server error: ' + str(request) + ', ' +
str(client_address))
def do_GET(self):
daemon_http_get(self)
2024-03-01 17:10:04 +00:00
def _dav_handler(self, endpoint_type: str, debug: bool):
calling_domain = self.server.domain_full
if not has_accept(self, calling_domain):
2024-03-01 17:10:04 +00:00
http_400(self)
return
accept_str = self.headers['Accept']
if 'application/xml' not in accept_str:
if debug:
print(endpoint_type.upper() + ' is not of xml type')
http_400(self)
return
if not self.headers.get('Content-length'):
print(endpoint_type.upper() + ' has no content-length')
http_400(self)
2019-06-28 21:06:05 +00:00
return
2019-07-03 16:14:45 +00:00
2024-03-01 17:10:04 +00:00
# check that the content length string is not too long
if isinstance(self.headers['Content-length'], str):
max_content_size = len(str(self.server.maxMessageLength))
if len(self.headers['Content-length']) > max_content_size:
http_400(self)
2019-11-16 00:07:07 +00:00
return
2024-03-01 17:10:04 +00:00
length = int(self.headers['Content-length'])
if length > self.server.max_post_length:
print(endpoint_type.upper() +
' request size too large ' + self.path)
http_400(self)
return
if not self.path.startswith('/calendars/'):
print(endpoint_type.upper() + ' without /calendars ' + self.path)
http_404(self, 145)
return
if debug:
print(endpoint_type.upper() + ' checking authorization')
if not is_authorized(self):
print(endpoint_type.upper() + ' not authorized')
http_403(self)
return
nickname = self.path.split('/calendars/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
if not nickname:
print(endpoint_type.upper() + ' no nickname ' + self.path)
http_400(self)
return
2024-05-12 12:35:26 +00:00
dir_str = data_dir(self.server.base_dir)
if not os.path.isdir(dir_str + '/' +
2024-03-01 17:10:04 +00:00
nickname + '@' + self.server.domain):
print(endpoint_type.upper() +
' for non-existent account ' + self.path)
http_404(self, 146)
return
propfind_bytes = None
2020-06-08 18:52:18 +00:00
try:
2024-03-01 17:10:04 +00:00
propfind_bytes = self.rfile.read(length)
2021-12-25 15:28:52 +00:00
except SocketError as ex:
if ex.errno == errno.ECONNRESET:
2024-03-01 17:10:04 +00:00
print('EX: ' + endpoint_type.upper() +
' connection reset by peer')
2020-06-23 14:25:03 +00:00
else:
2024-03-01 17:10:04 +00:00
print('EX: ' + endpoint_type.upper() + ' socket error')
http_400(self)
2020-06-23 14:25:03 +00:00
return
2021-12-25 15:28:52 +00:00
except ValueError as ex:
2024-03-01 17:10:04 +00:00
print('EX: ' + endpoint_type.upper() +
' rfile.read failed, ' + str(ex))
http_400(self)
2020-06-08 18:52:18 +00:00
return
2024-03-01 17:10:04 +00:00
if not propfind_bytes:
http_404(self, 147)
return
propfind_xml = propfind_bytes.decode('utf-8')
response_str = None
if endpoint_type == 'propfind':
response_str = \
dav_propfind_response(nickname, propfind_xml)
elif endpoint_type == 'put':
response_str = \
dav_put_response(self.server.base_dir,
nickname, self.server.domain,
propfind_xml,
self.server.http_prefix,
self.server.system_language,
self.server.recent_dav_etags)
elif endpoint_type == 'report':
curr_etag = None
if self.headers.get('ETag'):
curr_etag = self.headers['ETag']
elif self.headers.get('Etag'):
curr_etag = self.headers['Etag']
response_str = \
dav_report_response(self.server.base_dir,
nickname, self.server.domain,
propfind_xml,
self.server.person_cache,
self.server.http_prefix,
curr_etag,
self.server.recent_dav_etags,
self.server.domain_full,
self.server.system_language)
elif endpoint_type == 'delete':
response_str = \
dav_delete_response(self.server.base_dir,
nickname, self.server.domain,
self.path,
self.server.http_prefix,
2024-04-08 14:00:47 +00:00
debug,
2024-03-01 17:10:04 +00:00
self.server.recent_posts_cache)
if not response_str:
http_404(self, 148)
return
if response_str == 'Not modified':
if endpoint_type == 'put':
http_200(self)
2019-11-16 00:01:00 +00:00
return
2024-03-01 17:10:04 +00:00
http_304(self)
return
if response_str.startswith('ETag:') and endpoint_type == 'put':
response_etag = response_str.split('ETag:', 1)[1]
http_201(self, response_etag)
elif response_str != 'Ok':
message_xml = response_str.encode('utf-8')
message_xml_len = len(message_xml)
set_headers(self, 'application/xml; charset=utf-8',
message_xml_len,
None, calling_domain, False)
write2(self, message_xml)
if 'multistatus' in response_str:
return http_207(self)
http_200(self)
2019-11-16 00:01:00 +00:00
2024-03-01 17:10:04 +00:00
def do_PROPFIND(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
2020-10-15 08:59:08 +00:00
return
2024-03-01 17:10:04 +00:00
self._dav_handler('propfind', self.server.debug)
2023-07-18 14:05:54 +00:00
2024-03-01 17:10:04 +00:00
def do_PUT(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
2024-01-19 21:10:11 +00:00
return
2020-03-22 21:16:02 +00:00
2024-03-01 17:10:04 +00:00
self._dav_handler('put', self.server.debug)
2019-07-03 21:37:46 +00:00
2024-03-01 17:10:04 +00:00
def do_REPORT(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
return
2019-11-16 11:55:14 +00:00
2024-03-01 17:10:04 +00:00
self._dav_handler('report', self.server.debug)
2019-07-03 16:14:45 +00:00
2024-03-01 17:10:04 +00:00
def do_DELETE(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
return
2024-03-01 17:10:04 +00:00
self._dav_handler('delete', self.server.debug)
2019-07-02 15:07:27 +00:00
2024-03-01 17:10:04 +00:00
def do_HEAD(self):
daemon_http_head(self)
2019-11-16 11:55:14 +00:00
2024-03-01 17:10:04 +00:00
def do_POST(self):
daemon_http_post(self)
2020-04-02 21:35:06 +00:00
2019-07-03 16:14:45 +00:00
2019-08-17 15:16:27 +00:00
class PubServerUnitTest(PubServer):
2020-04-02 21:35:06 +00:00
protocol_version = 'HTTP/1.0'
2019-10-16 18:19:18 +00:00
2020-04-02 21:35:06 +00:00
class EpicyonServer(ThreadingHTTPServer):
2024-04-30 12:31:58 +00:00
starting_daemon = True
2024-05-13 14:58:14 +00:00
hide_announces = {}
2024-04-30 12:31:58 +00:00
no_of_books = 0
max_api_blocks = 32000
block_federated_endpoints = None
block_federated = []
books_cache = {}
max_recent_books = 1000
max_cached_readers = 24
auto_cw_cache = {}
sites_unavailable = None
max_shares_on_profile = 0
block_military = []
followers_synchronization = False
followers_sync_cache = {}
buy_sites = None
min_images_for_accounts = 0
default_post_language = None
css_cache = {}
reverse_sequence = None
clacks = None
public_replies_unlisted = False
dogwhistles = {}
preferred_podcast_formats = []
bold_reading = {}
hide_follows = {}
account_timezone = None
post_to_nickname = None
nodeinfo_is_active = False
security_txt_is_active = False
vcard_is_active = False
masto_api_is_active = False
map_format = None
dyslexic_font = False
content_license_url = ''
dm_license_url = ''
fitness = {}
signing_priv_key_pem = None
show_node_info_accounts = False
show_node_info_version = False
text_mode_banner = ''
access_keys = {}
rss_timeout_sec = 20
check_actor_timeout = 2
default_reply_interval_hrs = 9999999
recent_dav_etags = {}
key_shortcuts = {}
low_bandwidth = False
user_agents_blocked = None
crawlers_allowed = None
known_bots = None
unit_test = False
allow_local_network_access = False
yt_replace_domain = ''
twitter_replacement_domain = ''
newswire = {}
max_newswire_posts = 0
verify_all_signatures = False
blocklistUpdateCtr = 0
blocklistUpdateInterval = 100
domainBlocklist = None
manual_follower_approval = True
onion_domain = None
i2p_domain = None
media_instance = False
blogs_instance = False
translate = {}
system_language = 'en'
city = ''
voting_time_mins = 30
positive_voting = False
newswire_votes_threshold = 1
max_newswire_feed_size_kb = 1
max_newswire_posts_per_source = 1
show_published_date_only = False
max_mirrored_articles = 0
max_news_posts = 0
maxTags = 32
max_followers = 2000
show_publish_as_icon = False
full_width_tl_button_header = False
icons_as_buttons = False
rss_icon_at_top = True
publish_button_at_top = False
max_feed_item_size_kb = 100
maxCategoriesFeedItemSizeKb = 1024
dormant_months = 6
max_like_count = 10
followingItemsPerPage = 12
registration = False
enable_shared_inbox = True
outboxThread = {}
outbox_thread_index = {}
new_post_thread = {}
project_version = __version__
secure_mode = True
max_post_length = 0
maxMediaSize = 0
maxMessageLength = 64000
maxPostsInBox = 32000
maxCacheAgeDays = 30
domain = ''
port = 43
domain_full = ''
http_prefix = 'https'
debug = False
federation_list = []
shared_items_federated_domains = []
base_dir = ''
instance_id = ''
person_cache = {}
cached_webfingers = {}
favicons_cache = {}
proxy_type = None
session = None
session_onion = None
session_i2p = None
last_getreq = 0
last_postreq = 0
getreq_busy = False
postreq_busy = False
received_message = False
inbox_queue = []
send_threads = None
postLog = []
max_queue_length = 64
allow_deletion = True
last_login_time = 0
last_login_failure = 0
login_failure_count = {}
log_login_failures = True
max_replies = 10
tokens = {}
tokens_lookup = {}
instance_only_skills_search = True
followers_threads = []
blocked_cache = []
blocked_cache_last_updated = 0
blocked_cache_update_secs = 120
blocked_cache_last_updated = 0
custom_emoji = {}
known_crawlers = {}
last_known_crawler = 0
lists_enabled = None
cw_lists = {}
theme_name = ''
news_instance = False
default_timeline = 'inbox'
thrFitness = None
recent_posts_cache = {}
thrCache = None
send_threads_timeout_mins = 3
thrPostsQueue = None
thrPostsWatchdog = None
thrSharesExpire = None
thrSharesExpireWatchdog = None
max_recent_posts = 1
iconsCache = {}
fontsCache = {}
shared_item_federation_tokens = None
shared_item_federation_tokens = None
peertube_instances = []
max_mentions = 10
max_emoji = 10
max_hashtags = 10
thrInboxQueue = None
thrPostSchedule = None
thrNewswireDaemon = None
thrFederatedSharesDaemon = None
restart_inbox_queue_in_progress = False
restart_inbox_queue = False
signing_priv_key_pem = ''
thrCheckActor = {}
thrImportFollowing = None
thrWatchdog = None
thrWatchdogSchedule = None
thrNewswireWatchdog = None
thrFederatedSharesWatchdog = None
thrFederatedBlocksDaemon = None
2024-05-01 10:22:31 +00:00
qrcode_scale = 6
instance_description = ''
instance_description_short = 'Epicyon'
2024-04-30 12:31:58 +00:00
def handle_error(self, request, client_address):
# surpress connection reset errors
2022-06-10 13:50:24 +00:00
cls, e_ret = sys.exc_info()[:2]
2020-08-24 20:05:10 +00:00
if cls is ConnectionResetError:
2022-06-10 13:50:24 +00:00
if e_ret.errno != errno.ECONNRESET:
print('ERROR: (EpicyonServer) ' + str(cls) + ", " + str(e_ret))
2021-06-21 16:38:00 +00:00
elif cls is BrokenPipeError:
pass
2020-08-24 19:48:35 +00:00
else:
2022-06-10 13:50:24 +00:00
print('ERROR: (EpicyonServer) ' + str(cls) + ", " + str(e_ret))
2020-08-24 19:48:35 +00:00
return HTTPServer.handle_error(self, request, client_address)
2021-12-29 21:55:09 +00:00
def run_posts_queue(base_dir: str, send_threads: [], debug: bool,
2022-06-10 13:48:08 +00:00
timeout_mins: int) -> None:
2019-10-16 18:19:18 +00:00
"""Manages the threads used to send posts
"""
while True:
time.sleep(1)
2022-06-10 13:48:08 +00:00
remove_dormant_threads(base_dir, send_threads, debug, timeout_mins)
2020-04-02 21:35:06 +00:00
2019-10-16 18:19:18 +00:00
def run_shares_expire(version_number: str, base_dir: str, httpd) -> None:
2019-10-17 09:58:30 +00:00
"""Expires shares as needed
"""
while True:
time.sleep(120)
expire_shares(base_dir, httpd.max_shares_on_profile,
httpd.person_cache)
2019-10-17 09:58:30 +00:00
2020-04-02 21:35:06 +00:00
2021-12-29 21:55:09 +00:00
def run_posts_watchdog(project_version: str, httpd) -> None:
2019-10-16 18:19:18 +00:00
"""This tries to keep the posts thread running even if it dies
"""
2022-03-13 11:01:07 +00:00
print('THREAD: Starting posts queue watchdog')
2022-01-01 15:11:42 +00:00
posts_queue_original = httpd.thrPostsQueue.clone(run_posts_queue)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrPostsQueue, 'run_posts_watchdog')
2019-10-16 18:19:18 +00:00
while True:
2020-03-22 21:16:02 +00:00
time.sleep(20)
2021-06-05 12:43:57 +00:00
if httpd.thrPostsQueue.is_alive():
continue
httpd.thrPostsQueue.kill()
2022-03-13 11:01:07 +00:00
print('THREAD: restarting posts queue')
2022-01-01 15:11:42 +00:00
httpd.thrPostsQueue = posts_queue_original.clone(run_posts_queue)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrPostsQueue, 'run_posts_watchdog 2')
2021-06-05 12:43:57 +00:00
print('Restarting posts queue...')
2019-10-16 18:19:18 +00:00
2020-04-02 21:35:06 +00:00
2021-12-29 21:55:09 +00:00
def run_shares_expire_watchdog(project_version: str, httpd) -> None:
2019-10-17 09:58:30 +00:00
"""This tries to keep the shares expiry thread running even if it dies
"""
2022-03-13 11:01:07 +00:00
print('THREAD: Starting shares expiry watchdog')
2022-01-01 15:11:42 +00:00
shares_expire_original = httpd.thrSharesExpire.clone(run_shares_expire)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrSharesExpire, 'run_shares_expire_watchdog')
2019-10-17 09:58:30 +00:00
while True:
2020-03-22 21:16:02 +00:00
time.sleep(20)
2021-06-05 12:43:57 +00:00
if httpd.thrSharesExpire.is_alive():
continue
httpd.thrSharesExpire.kill()
2022-03-13 11:01:07 +00:00
print('THREAD: restarting shares watchdog')
2022-01-01 15:11:42 +00:00
httpd.thrSharesExpire = shares_expire_original.clone(run_shares_expire)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrSharesExpire, 'run_shares_expire_watchdog 2')
2021-06-05 12:43:57 +00:00
print('Restarting shares expiry...')
2019-10-17 09:58:30 +00:00
2020-04-02 21:35:06 +00:00
2022-01-01 15:11:42 +00:00
def load_tokens(base_dir: str, tokens_dict: {}, tokens_lookup: {}) -> None:
"""Loads shared items access tokens for each account
"""
2024-05-12 12:35:26 +00:00
dir_str = data_dir(base_dir)
for _, dirs, _ in os.walk(dir_str):
2019-10-25 16:48:53 +00:00
for handle in dirs:
if '@' in handle:
2022-12-18 15:29:54 +00:00
token_filename = acct_handle_dir(base_dir, handle) + '/.token'
2022-01-01 15:11:42 +00:00
if not os.path.isfile(token_filename):
2019-10-25 16:48:53 +00:00
continue
2020-04-02 21:35:06 +00:00
nickname = handle.split('@')[0]
token = None
try:
2022-06-10 13:47:10 +00:00
with open(token_filename, 'r',
encoding='utf-8') as fp_tok:
2022-01-01 15:11:42 +00:00
token = fp_tok.read()
2022-08-17 12:45:54 +00:00
except OSError as ex:
print('WARN: Unable to read token for ' +
2021-12-25 15:28:52 +00:00
nickname + ' ' + str(ex))
2019-10-25 16:48:53 +00:00
if not token:
continue
2022-01-01 15:11:42 +00:00
tokens_dict[nickname] = token
2021-12-25 22:45:26 +00:00
tokens_lookup[token] = nickname
2020-12-13 22:13:45 +00:00
break
2020-04-02 21:35:06 +00:00
def run_daemon(accounts_data_dir: str,
no_of_books: int,
public_replies_unlisted: int,
max_shares_on_profile: int,
max_hashtags: int,
2022-11-19 18:47:28 +00:00
map_format: str,
2022-08-23 10:09:24 +00:00
clacks: str,
2022-07-25 12:04:27 +00:00
preferred_podcast_formats: [],
check_actor_timeout: int,
2022-03-31 12:00:36 +00:00
crawlers_allowed: [],
2022-03-06 12:31:58 +00:00
dyslexic_font: bool,
2022-01-26 22:13:23 +00:00
content_license_url: str,
2021-12-29 21:55:09 +00:00
lists_enabled: str,
default_reply_interval_hrs: int,
low_bandwidth: bool,
max_like_count: int,
shared_items_federated_domains: [],
user_agents_blocked: [],
log_login_failures: bool,
city: str,
show_node_info_accounts: bool,
show_node_info_version: bool,
broch_mode: bool,
verify_all_signatures: bool,
send_threads_timeout_mins: int,
dormant_months: int,
max_newswire_posts: int,
allow_local_network_access: bool,
max_feed_item_size_kb: int,
publish_button_at_top: bool,
rss_icon_at_top: bool,
icons_as_buttons: bool,
full_width_tl_button_header: bool,
show_publish_as_icon: bool,
max_followers: int,
max_news_posts: int,
max_mirrored_articles: int,
max_newswire_feed_size_kb: int,
2022-06-10 13:47:10 +00:00
max_newswire_posts_per_source: int,
2021-12-29 21:55:09 +00:00
show_published_date_only: bool,
voting_time_mins: int,
positive_voting: bool,
newswire_votes_threshold: int,
news_instance: bool,
blogs_instance: bool,
media_instance: bool,
max_recent_posts: int,
enable_shared_inbox: bool,
registration: bool,
language: str,
project_version: str,
instance_id: str,
client_to_server: bool,
base_dir: str,
domain: str,
onion_domain: str,
i2p_domain: str,
2021-12-29 21:55:09 +00:00
yt_replace_domain: str,
twitter_replacement_domain: str,
port: int,
proxy_port: int,
http_prefix: str,
fed_list: [],
max_mentions: int,
max_emoji: int,
secure_mode: bool,
proxy_type: str,
max_replies: int,
domain_max_posts_per_day: int,
account_max_posts_per_day: int,
allow_deletion: bool,
debug: bool,
unit_test: bool,
instance_only_skills_search: bool,
send_threads: [],
manual_follower_approval: bool,
2024-07-25 13:53:15 +00:00
watermark_width_percent: int,
watermark_position: str,
watermark_opacity: int,
bind_to_ip_address: str) -> None:
2020-04-02 21:35:06 +00:00
if len(domain) == 0:
domain = 'localhost'
2019-06-28 18:55:29 +00:00
if '.' not in domain:
2019-07-03 12:24:54 +00:00
if domain != 'localhost':
print('Invalid domain: ' + domain)
return
2019-06-28 18:55:29 +00:00
2022-11-28 13:33:11 +00:00
update_moved_actors(base_dir, debug)
2021-12-25 21:32:15 +00:00
if unit_test:
2022-01-01 15:11:42 +00:00
server_address = (domain, proxy_port)
pub_handler = partial(PubServerUnitTest)
2019-08-17 15:16:27 +00:00
else:
if not bind_to_ip_address:
server_address = ('', proxy_port)
else:
server_address = (bind_to_ip_address, proxy_port)
2022-01-01 15:11:42 +00:00
pub_handler = partial(PubServer)
2020-02-19 11:37:33 +00:00
if accounts_data_dir:
set_accounts_data_dir(base_dir, accounts_data_dir)
2024-05-12 12:35:26 +00:00
dir_str = data_dir(base_dir)
if not os.path.isdir(dir_str):
print('Creating accounts directory')
2024-05-12 12:35:26 +00:00
os.mkdir(dir_str)
2024-04-30 12:31:58 +00:00
httpd = None
2020-02-19 10:37:40 +00:00
try:
2022-01-01 15:11:42 +00:00
httpd = EpicyonServer(server_address, pub_handler)
2022-02-03 10:34:34 +00:00
except SocketError as ex:
if ex.errno == errno.ECONNREFUSED:
print('EX: HTTP server address is already in use. ' +
2022-01-01 15:11:42 +00:00
str(server_address))
2020-02-19 13:16:38 +00:00
return False
2020-03-22 21:16:02 +00:00
2022-02-03 10:34:34 +00:00
print('EX: HTTP server failed to start. ' + str(ex))
2022-01-01 15:11:42 +00:00
print('server_address: ' + str(server_address))
2020-02-19 10:37:40 +00:00
return False
2019-09-07 08:57:52 +00:00
2024-04-30 12:31:58 +00:00
if not httpd:
print('Unable to start daemon')
return False
httpd.starting_daemon = True
# the last time when an LLM scraper was replied to
httpd.last_llm_time = None
# width, position and opacity of watermark applied to attached images
# as a percentage of the attached image width
httpd.watermark_width_percent = watermark_width_percent
2024-07-25 13:53:15 +00:00
httpd.watermark_position = watermark_position
httpd.watermark_opacity = watermark_opacity
# for each account whether to hide announces
httpd.hide_announces = {}
2024-05-13 14:58:14 +00:00
hide_announces_filename = data_dir(base_dir) + '/hide_announces.json'
if os.path.isfile(hide_announces_filename):
httpd.hide_announces = load_json(hide_announces_filename)
# short description of the instance
httpd.instance_description_short = \
get_config_param(base_dir, 'instanceDescriptionShort')
if httpd.instance_description_short is None:
httpd.instance_description_short = 'Epicyon'
2024-06-04 18:18:02 +00:00
# description of the instance
httpd.instance_description = \
get_config_param(base_dir, 'instanceDescription')
if httpd.instance_description is None:
httpd.instance_description = ''
# number of book events which show on profile screens
httpd.no_of_books = no_of_books
# initialise federated blocklists
httpd.max_api_blocks = 32000
httpd.block_federated_endpoints = \
load_federated_blocks_endpoints(base_dir)
httpd.block_federated = []
2023-12-31 11:19:46 +00:00
# cache storing recent book events
httpd.books_cache = {}
httpd.max_recent_books = 1000
httpd.max_cached_readers = 24
# cache for automatic content warnings
httpd.auto_cw_cache = load_auto_cw_cache(base_dir)
2024-07-20 18:22:38 +00:00
# loads a catalog of http header fields
headers_catalog_fieldname = data_dir(base_dir) + '/headers_catalog.json'
httpd.headers_catalog = {}
if os.path.isfile(headers_catalog_fieldname):
httpd.headers_catalog = load_json(headers_catalog_fieldname)
2023-09-15 21:04:31 +00:00
# list of websites which are currently down
httpd.sites_unavailable = load_unavailable_sites(base_dir)
2023-08-22 17:18:49 +00:00
# maximum number of shared items attached to actors, as in
# https://codeberg.org/fediverse/fep/src/branch/main/fep/0837/fep-0837.md
httpd.max_shares_on_profile = max_shares_on_profile
2023-07-18 14:05:54 +00:00
# load a list of nicknames for accounts blocking military instances
httpd.block_military = load_blocked_military(base_dir)
# scan the theme directory for any svg files containing scripts
2021-12-29 21:55:09 +00:00
assert not scan_themes_for_scripts(base_dir)
2023-03-15 23:11:40 +00:00
# lock for followers synchronization
httpd.followers_synchronization = False
# cache containing followers synchronization hashes and json
httpd.followers_sync_cache = {}
2023-01-13 15:04:48 +00:00
# permitted sites from which the buy button may be displayed
2023-01-13 15:16:08 +00:00
httpd.buy_sites = load_buy_sites(base_dir)
2023-01-13 15:04:48 +00:00
# which accounts should minimize all attached images by default
httpd.min_images_for_accounts = load_min_images_for_accounts(base_dir)
2022-12-08 15:28:17 +00:00
# default language for each account when creating a new post
httpd.default_post_language = load_default_post_languages(base_dir)
2022-07-12 19:12:44 +00:00
# caches css files
httpd.css_cache = {}
2022-11-26 15:39:36 +00:00
httpd.reverse_sequence = load_reverse_timeline(base_dir)
2022-11-26 15:31:46 +00:00
2022-07-25 12:49:20 +00:00
httpd.clacks = get_config_param(base_dir, 'clacks')
if not httpd.clacks:
if clacks:
httpd.clacks = clacks
else:
httpd.clacks = 'GNU Natalie Nguyen'
2022-07-25 12:04:27 +00:00
httpd.public_replies_unlisted = public_replies_unlisted
2022-07-05 12:30:21 +00:00
# load a list of dogwhistle words
2024-05-12 12:35:26 +00:00
dogwhistles_filename = data_dir(base_dir) + '/dogwhistles.txt'
2022-07-05 12:30:21 +00:00
if not os.path.isfile(dogwhistles_filename):
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
httpd.dogwhistles = load_dogwhistles(dogwhistles_filename)
# list of preferred podcast formats
2022-10-20 19:37:59 +00:00
# eg ['audio/opus', 'audio/mp3', 'audio/speex']
httpd.preferred_podcast_formats = preferred_podcast_formats
2022-03-24 13:14:41 +00:00
# for each account, whether bold reading is enabled
httpd.bold_reading = load_bold_reading(base_dir)
# whether to hide follows on profile screen for each account
httpd.hide_follows = load_hide_follows(base_dir)
2022-02-25 19:12:40 +00:00
httpd.account_timezone = load_account_timezones(base_dir)
2022-01-01 15:11:42 +00:00
httpd.post_to_nickname = None
httpd.nodeinfo_is_active = False
2022-10-31 19:04:02 +00:00
httpd.security_txt_is_active = False
2022-02-15 14:42:00 +00:00
httpd.vcard_is_active = False
2022-02-04 18:06:53 +00:00
httpd.masto_api_is_active = False
2022-08-23 10:09:24 +00:00
# use kml or gpx format for hashtag maps
httpd.map_format = map_format.lower()
2022-01-26 22:13:23 +00:00
httpd.dyslexic_font = dyslexic_font
2021-11-08 16:17:07 +00:00
# license for content of the instance
2021-12-25 17:13:38 +00:00
if not content_license_url:
content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0'
2021-12-25 17:13:38 +00:00
httpd.content_license_url = content_license_url
2023-04-28 12:38:21 +00:00
httpd.dm_license_url = ''
2021-11-08 16:17:07 +00:00
2021-10-19 13:41:48 +00:00
# fitness metrics
2024-05-12 12:35:26 +00:00
fitness_filename = data_dir(base_dir) + '/fitness.json'
2021-10-19 13:41:48 +00:00
httpd.fitness = {}
2021-12-25 21:45:06 +00:00
if os.path.isfile(fitness_filename):
fitness = load_json(fitness_filename)
if fitness is not None:
httpd.fitness = fitness
2021-10-19 13:41:48 +00:00
2021-09-02 09:52:36 +00:00
# initialize authorized fetch key
2021-12-25 23:03:28 +00:00
httpd.signing_priv_key_pem = None
2021-12-25 18:32:17 +00:00
httpd.show_node_info_accounts = show_node_info_accounts
2021-12-25 18:35:24 +00:00
httpd.show_node_info_version = show_node_info_version
2021-02-05 19:14:27 +00:00
# ASCII/ANSI text banner used in shell browsers, such as Lynx
2021-12-29 21:55:09 +00:00
httpd.text_mode_banner = get_text_mode_banner(base_dir)
2021-02-05 19:14:27 +00:00
# key shortcuts SHIFT + ALT + [key]
2021-12-31 21:18:12 +00:00
httpd.access_keys = {
2021-04-23 13:22:36 +00:00
'Page up': ',',
'Page down': '.',
2021-04-23 09:15:53 +00:00
'submitButton': 'y',
2021-04-23 12:04:42 +00:00
'followButton': 'f',
2022-11-30 21:44:12 +00:00
'moveButton': 'm',
2021-04-23 12:04:42 +00:00
'blockButton': 'b',
'infoButton': 'i',
'snoozeButton': 's',
2021-04-23 17:35:54 +00:00
'reportButton': '[',
2021-04-23 12:04:42 +00:00
'viewButton': 'v',
2022-05-14 17:07:20 +00:00
'unblockButton': 'u',
2021-04-23 12:04:42 +00:00
'enterPetname': 'p',
'enterNotes': 'n',
2021-04-22 14:12:59 +00:00
'menuTimeline': 't',
'menuEdit': 'e',
2021-12-04 16:59:50 +00:00
'menuThemeDesigner': 'z',
2021-04-22 13:11:15 +00:00
'menuProfile': 'p',
'menuInbox': 'i',
'menuSearch': '/',
'menuNewPost': 'n',
'menuNewBlog': '0',
2021-04-22 13:11:15 +00:00
'menuCalendar': 'c',
'menuDM': 'd',
'menuReplies': 'r',
'menuOutbox': 's',
2021-04-22 21:46:02 +00:00
'menuBookmarks': 'q',
2021-04-22 13:11:15 +00:00
'menuShares': 'h',
2021-08-09 18:41:05 +00:00
'menuWanted': 'w',
2024-01-01 23:45:54 +00:00
'menuReadingStatus': '=',
2021-04-22 13:11:15 +00:00
'menuBlogs': 'b',
2022-05-14 17:07:20 +00:00
'menuNewswire': '#',
2021-04-22 13:11:15 +00:00
'menuLinks': 'l',
2021-04-23 12:04:42 +00:00
'menuMedia': 'm',
'menuModeration': 'o',
2021-04-22 14:12:59 +00:00
'menuFollowing': 'f',
'menuFollowers': 'g',
'menuRoles': 'o',
'menuSkills': 'a',
2021-04-22 21:46:02 +00:00
'menuLogout': 'x',
2021-04-23 17:23:12 +00:00
'menuKeys': 'k',
'Public': 'p',
'Reminder': 'r'
2021-04-22 12:47:19 +00:00
}
2022-04-24 19:03:02 +00:00
# timeout used when getting rss feeds
httpd.rss_timeout_sec = 20
# load dictionary used for LLM poisoning
httpd.dictionary = load_dictionary(base_dir)
httpd.twograms = load_2grams(base_dir)
2022-03-31 12:00:36 +00:00
# timeout used when checking for actor changes when clicking an avatar
# and entering person options screen
if check_actor_timeout < 2:
check_actor_timeout = 2
httpd.check_actor_timeout = check_actor_timeout
2022-04-11 12:13:04 +00:00
# how many hours after a post was published can a reply be made
2021-12-25 17:31:22 +00:00
default_reply_interval_hrs = 9999999
httpd.default_reply_interval_hrs = default_reply_interval_hrs
2022-02-23 18:04:34 +00:00
# recent caldav etags for each account
httpd.recent_dav_etags = {}
2022-01-01 15:11:42 +00:00
httpd.key_shortcuts = {}
load_access_keys_for_accounts(base_dir, httpd.key_shortcuts,
2021-12-31 21:18:12 +00:00
httpd.access_keys)
# wheither to use low bandwidth images
2021-12-25 18:20:56 +00:00
httpd.low_bandwidth = low_bandwidth
2021-06-20 17:48:50 +00:00
# list of blocked user agent types within the User-Agent header
2021-12-25 18:27:11 +00:00
httpd.user_agents_blocked = user_agents_blocked
2021-06-20 13:25:18 +00:00
2022-03-06 12:31:58 +00:00
# list of crawler bots permitted within the User-Agent header
httpd.crawlers_allowed = crawlers_allowed
2022-03-06 14:02:26 +00:00
# list of web crawlers known to the system
httpd.known_bots = load_known_web_bots(base_dir)
2022-03-06 14:02:26 +00:00
2021-12-25 21:32:15 +00:00
httpd.unit_test = unit_test
2021-12-25 18:54:50 +00:00
httpd.allow_local_network_access = allow_local_network_access
2021-12-25 21:32:15 +00:00
if unit_test:
# unit tests are run on the local network with LAN addresses
2021-12-25 18:54:50 +00:00
httpd.allow_local_network_access = True
2021-12-25 17:15:52 +00:00
httpd.yt_replace_domain = yt_replace_domain
2021-12-25 20:55:47 +00:00
httpd.twitter_replacement_domain = twitter_replacement_domain
2020-10-04 12:29:07 +00:00
# newswire storing rss feeds
httpd.newswire = {}
# maximum number of posts to appear in the newswire on the right column
2021-12-25 18:49:19 +00:00
httpd.max_newswire_posts = max_newswire_posts
# whether to require that all incoming posts have valid jsonld signatures
2021-12-25 18:40:32 +00:00
httpd.verify_all_signatures = verify_all_signatures
2020-03-28 10:33:04 +00:00
# This counter is used to update the list of blocked domains in memory.
# It helps to avoid touching the disk and so improves flooding resistance
2020-04-02 21:35:06 +00:00
httpd.blocklistUpdateCtr = 0
httpd.blocklistUpdateInterval = 100
2021-12-28 21:55:38 +00:00
httpd.domainBlocklist = get_domain_blocklist(base_dir)
2020-04-02 21:35:06 +00:00
2021-12-25 21:42:26 +00:00
httpd.manual_follower_approval = manual_follower_approval
2022-01-04 22:05:04 +00:00
if domain.endswith('.onion'):
onion_domain = domain
elif domain.endswith('.i2p'):
i2p_domain = domain
2021-12-25 20:43:43 +00:00
httpd.onion_domain = onion_domain
2021-12-25 20:50:24 +00:00
httpd.i2p_domain = i2p_domain
2021-12-25 20:25:07 +00:00
httpd.media_instance = media_instance
2021-12-25 20:22:25 +00:00
httpd.blogs_instance = blogs_instance
2019-11-28 16:16:43 +00:00
2019-09-07 08:57:52 +00:00
# load translations dictionary
2020-04-02 21:35:06 +00:00
httpd.translate = {}
2021-12-25 23:03:28 +00:00
httpd.system_language = 'en'
2021-12-25 21:32:15 +00:00
if not unit_test:
2021-12-25 23:03:28 +00:00
httpd.translate, httpd.system_language = \
2021-12-26 19:12:02 +00:00
load_translations_from_file(base_dir, language)
2021-12-25 23:03:28 +00:00
if not httpd.system_language:
2021-08-02 20:03:37 +00:00
print('ERROR: no system language loaded')
2021-08-02 20:26:09 +00:00
sys.exit()
2021-12-25 23:03:28 +00:00
print('System language: ' + httpd.system_language)
2020-06-12 12:02:04 +00:00
if not httpd.translate:
2021-03-18 17:27:46 +00:00
print('ERROR: no translations were loaded')
2020-06-12 21:41:32 +00:00
sys.exit()
2020-04-02 21:35:06 +00:00
2023-09-02 14:42:59 +00:00
# create hashtag categories for cities
load_city_hashtags(base_dir, httpd.translate)
2021-05-09 19:11:05 +00:00
# spoofed city for gps location misdirection
httpd.city = city
2020-10-09 12:15:20 +00:00
# For moderated newswire feeds this is the amount of time allowed
# for voting after the post arrives
2021-12-25 20:12:07 +00:00
httpd.voting_time_mins = voting_time_mins
# on the newswire, whether moderators vote positively for items
# or against them (veto)
2021-12-25 20:14:45 +00:00
httpd.positive_voting = positive_voting
2020-10-08 17:49:03 +00:00
# number of votes needed to remove a newswire item from the news timeline
# or if positive voting is anabled to add the item to the news timeline
2021-12-25 20:17:35 +00:00
httpd.newswire_votes_threshold = newswire_votes_threshold
# maximum overall size of an rss/atom feed read by the newswire daemon
# If the feed is too large then this is probably a DoS attempt
2021-12-25 20:09:29 +00:00
httpd.max_newswire_feed_size_kb = max_newswire_feed_size_kb
# For each newswire source (account or rss feed)
# this is the maximum number of posts to show for each.
# This avoids one or two sources from dominating the news,
# and also prevents big feeds from slowing down page load times
2022-06-10 13:47:10 +00:00
httpd.max_newswire_posts_per_source = max_newswire_posts_per_source
# Show only the date at the bottom of posts, and not the time
2021-12-25 20:06:27 +00:00
httpd.show_published_date_only = show_published_date_only
2020-10-19 16:33:58 +00:00
# maximum number of news articles to mirror
2021-12-25 19:42:14 +00:00
httpd.max_mirrored_articles = max_mirrored_articles
2020-10-19 16:33:58 +00:00
2020-10-21 10:39:09 +00:00
# maximum number of posts in the news timeline/outbox
2021-12-25 19:39:45 +00:00
httpd.max_news_posts = max_news_posts
2020-10-21 10:39:09 +00:00
# The maximum number of tags per post which can be
# attached to RSS feeds pulled in via the newswire
httpd.maxTags = 32
# maximum number of followers per account
2021-12-25 19:37:10 +00:00
httpd.max_followers = max_followers
# whether to show an icon for publish on the
# newswire, or a 'Publish' button
2021-12-25 19:34:20 +00:00
httpd.show_publish_as_icon = show_publish_as_icon
# Whether to show the timeline header containing inbox, outbox
# calendar, etc as the full width of the screen or not
2021-12-25 19:31:24 +00:00
httpd.full_width_tl_button_header = full_width_tl_button_header
2020-10-25 20:38:01 +00:00
# whether to show icons in the header (eg calendar) as buttons
2021-12-25 19:19:14 +00:00
httpd.icons_as_buttons = icons_as_buttons
2020-10-25 20:38:01 +00:00
# whether to show the RSS icon at the top or the bottom of the timeline
2021-12-25 19:09:03 +00:00
httpd.rss_icon_at_top = rss_icon_at_top
2020-10-26 21:32:08 +00:00
# Whether to show the newswire publish button at the top,
# above the header image
2021-12-25 19:00:00 +00:00
httpd.publish_button_at_top = publish_button_at_top
2020-10-26 21:32:08 +00:00
# maximum size of individual RSS feed items, in K
2021-12-25 18:57:13 +00:00
httpd.max_feed_item_size_kb = max_feed_item_size_kb
2020-12-02 17:02:32 +00:00
# maximum size of a hashtag category, in K
2020-12-05 14:32:13 +00:00
httpd.maxCategoriesFeedItemSizeKb = 1024
2020-12-02 17:02:32 +00:00
# how many months does a followed account need to be unseen
# for it to be considered dormant?
2021-12-25 18:47:04 +00:00
httpd.dormant_months = dormant_months
# maximum number of likes to display on a post
2021-12-25 18:23:12 +00:00
httpd.max_like_count = max_like_count
if httpd.max_like_count < 0:
httpd.max_like_count = 0
elif httpd.max_like_count > 16:
httpd.max_like_count = 16
2021-03-24 13:15:43 +00:00
httpd.followingItemsPerPage = 12
2020-04-02 21:35:06 +00:00
if registration == 'open':
httpd.registration = True
2019-11-13 12:49:40 +00:00
else:
2020-04-02 21:35:06 +00:00
httpd.registration = False
2021-12-25 20:30:30 +00:00
httpd.enable_shared_inbox = enable_shared_inbox
2020-04-02 21:35:06 +00:00
httpd.outboxThread = {}
2021-12-25 23:05:40 +00:00
httpd.outbox_thread_index = {}
2021-12-25 23:18:10 +00:00
httpd.new_post_thread = {}
2021-12-25 20:34:38 +00:00
httpd.project_version = project_version
2021-12-25 21:07:06 +00:00
httpd.secure_mode = secure_mode
2019-09-03 19:30:41 +00:00
# max POST size of 30M
2021-12-25 22:38:20 +00:00
httpd.max_post_length = 1024 * 1024 * 30
httpd.maxMediaSize = httpd.max_post_length
# Maximum text length is 64K - enough for a blog post
httpd.maxMessageLength = 64000
2020-02-24 11:50:50 +00:00
# Maximum overall number of posts per box
2020-04-02 21:35:06 +00:00
httpd.maxPostsInBox = 32000
2022-11-22 10:51:08 +00:00
httpd.maxCacheAgeDays = 30
2020-04-02 21:35:06 +00:00
httpd.domain = domain
httpd.port = port
2021-12-26 12:45:03 +00:00
httpd.domain_full = get_full_domain(domain, port)
2024-05-01 10:22:31 +00:00
httpd.qrcode_scale = 6
if onion_domain:
2024-05-01 10:22:31 +00:00
save_domain_qrcode(base_dir, 'http', onion_domain, httpd.qrcode_scale)
elif i2p_domain:
2024-05-01 10:22:31 +00:00
save_domain_qrcode(base_dir, 'http', i2p_domain, httpd.qrcode_scale)
else:
2024-05-01 10:22:31 +00:00
save_domain_qrcode(base_dir, http_prefix, httpd.domain_full,
httpd.qrcode_scale)
2022-03-22 11:42:24 +00:00
clear_person_qrcodes(base_dir)
2021-12-25 17:09:22 +00:00
httpd.http_prefix = http_prefix
2020-04-02 21:35:06 +00:00
httpd.debug = debug
2021-12-25 23:45:30 +00:00
httpd.federation_list = fed_list.copy()
2021-12-25 18:05:01 +00:00
httpd.shared_items_federated_domains = \
shared_items_federated_domains.copy()
2021-12-25 16:17:53 +00:00
httpd.base_dir = base_dir
2021-12-25 20:36:53 +00:00
httpd.instance_id = instance_id
2021-12-25 22:17:49 +00:00
httpd.person_cache = {}
2021-12-25 22:28:18 +00:00
httpd.cached_webfingers = {}
2021-12-25 22:30:59 +00:00
httpd.favicons_cache = {}
2021-12-25 21:09:22 +00:00
httpd.proxy_type = proxy_type
2020-04-02 21:35:06 +00:00
httpd.session = None
httpd.session_onion = None
httpd.session_i2p = None
2022-01-01 20:36:56 +00:00
httpd.last_getreq = 0
httpd.last_postreq = 0
2022-01-01 15:11:42 +00:00
httpd.getreq_busy = False
httpd.postreq_busy = False
2021-12-25 22:35:43 +00:00
httpd.received_message = False
2021-12-25 23:23:29 +00:00
httpd.inbox_queue = []
2021-12-25 21:37:41 +00:00
httpd.send_threads = send_threads
2020-04-02 21:35:06 +00:00
httpd.postLog = []
2021-12-25 23:21:17 +00:00
httpd.max_queue_length = 64
2021-12-25 21:29:53 +00:00
httpd.allow_deletion = allow_deletion
2021-12-25 22:42:29 +00:00
httpd.last_login_time = 0
httpd.last_login_failure = 0
2021-12-25 23:12:31 +00:00
httpd.login_failure_count = {}
2021-12-25 18:29:29 +00:00
httpd.log_login_failures = log_login_failures
2021-12-25 21:11:35 +00:00
httpd.max_replies = max_replies
2020-04-02 21:35:06 +00:00
httpd.tokens = {}
2021-12-25 22:45:26 +00:00
httpd.tokens_lookup = {}
2021-12-29 21:55:09 +00:00
load_tokens(base_dir, httpd.tokens, httpd.tokens_lookup)
2021-12-25 21:34:53 +00:00
httpd.instance_only_skills_search = instance_only_skills_search
2019-11-04 10:43:19 +00:00
# contains threads used to send posts to followers
2021-12-25 22:48:08 +00:00
httpd.followers_threads = []
2019-07-11 12:29:31 +00:00
2021-06-21 09:22:24 +00:00
# create a cache of blocked domains in memory.
# This limits the amount of slow disk reads which need to be done
2021-12-31 23:07:23 +00:00
httpd.blocked_cache = []
httpd.blocked_cache_last_updated = 0
httpd.blocked_cache_update_secs = 120
httpd.blocked_cache_last_updated = \
update_blocked_cache(base_dir, httpd.blocked_cache,
httpd.blocked_cache_last_updated, 0)
2021-06-21 09:22:24 +00:00
2021-05-27 22:08:49 +00:00
# get the list of custom emoji, for use by the mastodon api
2022-01-01 20:36:56 +00:00
httpd.custom_emoji = \
2021-12-28 17:20:43 +00:00
metadata_custom_emoji(base_dir, http_prefix, httpd.domain_full)
2021-05-27 22:08:49 +00:00
2021-02-16 09:50:50 +00:00
# whether to enable broch mode, which locks down the instance
2021-12-28 21:55:38 +00:00
set_broch_mode(base_dir, httpd.domain_full, broch_mode)
2021-02-16 09:50:50 +00:00
2024-05-12 12:35:26 +00:00
dir_str = data_dir(base_dir)
if not os.path.isdir(dir_str + '/inbox@' + domain):
2020-04-02 21:35:06 +00:00
print('Creating shared inbox: inbox@' + domain)
2021-12-28 18:13:52 +00:00
create_shared_inbox(base_dir, 'inbox', domain, port, http_prefix)
2019-07-12 09:52:06 +00:00
2024-05-12 12:35:26 +00:00
if not os.path.isdir(dir_str + '/news@' + domain):
2020-10-07 16:01:45 +00:00
print('Creating news inbox: news@' + domain)
2021-12-28 18:13:52 +00:00
create_news_inbox(base_dir, domain, port, http_prefix)
2021-12-30 13:56:38 +00:00
set_config_param(base_dir, "listsEnabled", "Murdoch press")
2021-10-21 13:43:48 +00:00
2021-10-24 11:06:08 +00:00
# dict of known web crawlers accessing nodeinfo or the masto API
# and how many times they have been seen
2021-12-31 23:07:23 +00:00
httpd.known_crawlers = {}
2024-05-12 12:35:26 +00:00
known_crawlers_filename = dir_str + '/knownCrawlers.json'
2021-12-31 23:07:23 +00:00
if os.path.isfile(known_crawlers_filename):
httpd.known_crawlers = load_json(known_crawlers_filename)
# when was the last crawler seen?
2021-12-31 23:07:23 +00:00
httpd.last_known_crawler = 0
2021-10-24 11:06:08 +00:00
2021-12-25 18:12:13 +00:00
if lists_enabled:
httpd.lists_enabled = lists_enabled
2021-10-21 14:06:25 +00:00
else:
2021-12-30 13:56:38 +00:00
httpd.lists_enabled = get_config_param(base_dir, "listsEnabled")
2021-12-28 21:55:38 +00:00
httpd.cw_lists = load_cw_lists(base_dir, True)
2020-10-07 16:01:45 +00:00
2020-10-13 21:43:16 +00:00
# set the avatar for the news account
2021-12-26 14:08:58 +00:00
httpd.theme_name = get_config_param(base_dir, 'theme')
2021-12-25 23:35:50 +00:00
if not httpd.theme_name:
httpd.theme_name = 'default'
2021-12-29 21:55:09 +00:00
if is_news_theme_name(base_dir, httpd.theme_name):
2021-12-25 20:20:08 +00:00
news_instance = True
2021-02-27 11:44:50 +00:00
2021-12-25 20:20:08 +00:00
httpd.news_instance = news_instance
2021-12-31 23:50:29 +00:00
httpd.default_timeline = 'inbox'
2021-12-25 20:25:07 +00:00
if media_instance:
2021-12-31 23:50:29 +00:00
httpd.default_timeline = 'tlmedia'
2021-12-25 20:22:25 +00:00
if blogs_instance:
2021-12-31 23:50:29 +00:00
httpd.default_timeline = 'tlblogs'
2021-12-25 20:20:08 +00:00
if news_instance:
2021-12-31 23:50:29 +00:00
httpd.default_timeline = 'tlfeatures'
2021-02-27 11:44:50 +00:00
2021-12-29 21:55:09 +00:00
set_news_avatar(base_dir,
httpd.theme_name,
http_prefix,
domain,
httpd.domain_full)
2020-10-13 21:43:16 +00:00
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/cache'):
os.mkdir(base_dir + '/cache')
if not os.path.isdir(base_dir + '/cache/actors'):
2019-08-20 10:10:33 +00:00
print('Creating actors cache')
2021-12-25 16:17:53 +00:00
os.mkdir(base_dir + '/cache/actors')
if not os.path.isdir(base_dir + '/cache/announce'):
2019-08-20 12:39:59 +00:00
print('Creating announce cache')
2021-12-25 16:17:53 +00:00
os.mkdir(base_dir + '/cache/announce')
if not os.path.isdir(base_dir + '/cache/avatars'):
2019-09-14 17:12:03 +00:00
print('Creating avatars cache')
2021-12-25 16:17:53 +00:00
os.mkdir(base_dir + '/cache/avatars')
2019-08-20 10:10:33 +00:00
2021-12-25 23:41:17 +00:00
archive_dir = base_dir + '/archive'
if not os.path.isdir(archive_dir):
2019-08-20 11:51:29 +00:00
print('Creating archive')
2021-12-25 23:41:17 +00:00
os.mkdir(archive_dir)
2020-03-22 21:16:02 +00:00
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/sharefiles'):
2021-07-27 19:03:02 +00:00
print('Creating shared item files directory')
2021-12-25 16:17:53 +00:00
os.mkdir(base_dir + '/sharefiles')
2021-07-27 19:03:02 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating fitness thread')
2021-10-19 17:35:52 +00:00
httpd.thrFitness = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=fitness_thread,
2021-12-28 21:36:27 +00:00
args=(base_dir, httpd.fitness), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrFitness, 'run_daemon thrFitness')
2021-10-19 17:35:52 +00:00
httpd.recent_posts_cache = {}
2022-03-13 11:01:07 +00:00
print('THREAD: Creating cache expiry thread')
2020-04-02 21:35:06 +00:00
httpd.thrCache = \
2021-12-28 21:36:27 +00:00
thread_with_trace(target=expire_cache,
args=(base_dir, httpd.person_cache,
httpd.http_prefix,
archive_dir,
httpd.recent_posts_cache,
2022-11-22 10:51:08 +00:00
httpd.maxPostsInBox,
httpd.maxCacheAgeDays), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrCache, 'run_daemon thrCache')
2019-08-20 10:28:05 +00:00
# number of mins after which sending posts or updates will expire
2021-12-25 18:44:18 +00:00
httpd.send_threads_timeout_mins = send_threads_timeout_mins
2022-03-13 11:01:07 +00:00
print('THREAD: Creating posts queue')
2020-04-02 21:35:06 +00:00
httpd.thrPostsQueue = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_posts_queue,
2021-12-28 21:36:27 +00:00
args=(base_dir, httpd.send_threads, debug,
httpd.send_threads_timeout_mins), daemon=True)
2021-12-25 21:32:15 +00:00
if not unit_test:
2022-03-13 11:01:07 +00:00
print('THREAD: run_posts_watchdog')
2020-04-02 21:35:06 +00:00
httpd.thrPostsWatchdog = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_posts_watchdog,
2021-12-28 21:36:27 +00:00
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrPostsWatchdog, 'run_daemon thrPostWatchdog')
2019-10-16 18:19:18 +00:00
else:
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrPostsQueue, 'run_daemon thrPostWatchdog 2')
2019-10-16 18:19:18 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating expire thread for shared items')
2020-04-02 21:35:06 +00:00
httpd.thrSharesExpire = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_shares_expire,
args=(project_version, base_dir,
httpd),
daemon=True)
2021-12-25 21:32:15 +00:00
if not unit_test:
2022-03-13 11:01:07 +00:00
print('THREAD: run_shares_expire_watchdog')
2020-04-02 21:35:06 +00:00
httpd.thrSharesExpireWatchdog = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_shares_expire_watchdog,
2021-12-28 21:36:27 +00:00
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrSharesExpireWatchdog,
'run_daemon thrSharesExpireWatchdog')
2019-10-17 09:58:30 +00:00
else:
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrSharesExpire,
'run_daemon thrSharesExpireWatchdog 2')
2019-10-17 09:58:30 +00:00
2021-12-25 20:28:06 +00:00
httpd.max_recent_posts = max_recent_posts
2020-04-02 21:35:06 +00:00
httpd.iconsCache = {}
2020-05-24 22:10:14 +00:00
httpd.fontsCache = {}
# create tokens used for shared item federation
2021-12-25 18:05:01 +00:00
fed_domains = httpd.shared_items_federated_domains
2022-01-01 15:11:42 +00:00
httpd.shared_item_federation_tokens = \
2021-12-29 21:55:09 +00:00
generate_shared_item_federation_tokens(fed_domains,
base_dir)
2022-01-01 15:11:42 +00:00
si_federation_tokens = httpd.shared_item_federation_tokens
httpd.shared_item_federation_tokens = \
2021-12-29 21:55:09 +00:00
create_shared_item_federation_token(base_dir, httpd.domain_full, False,
2022-01-01 15:11:42 +00:00
si_federation_tokens)
2020-12-24 09:45:41 +00:00
# load peertube instances from file into a list
2021-12-25 23:38:53 +00:00
httpd.peertube_instances = []
2021-12-29 21:55:09 +00:00
load_peertube_instances(base_dir, httpd.peertube_instances)
2020-12-23 23:59:49 +00:00
2021-12-28 20:32:11 +00:00
create_initial_last_seen(base_dir, http_prefix)
2020-12-13 22:01:10 +00:00
2022-11-17 19:13:46 +00:00
httpd.max_mentions = max_mentions
2022-11-17 19:17:09 +00:00
httpd.max_emoji = max_emoji
2022-11-19 18:47:28 +00:00
httpd.max_hashtags = max_hashtags
2022-11-17 19:13:46 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating inbox queue')
2020-04-02 21:35:06 +00:00
httpd.thrInboxQueue = \
2021-12-28 21:36:27 +00:00
thread_with_trace(target=run_inbox_queue,
2022-03-14 13:45:42 +00:00
args=(httpd, httpd.recent_posts_cache,
2021-12-28 21:36:27 +00:00
httpd.max_recent_posts,
project_version,
base_dir, http_prefix, httpd.send_threads,
httpd.postLog, httpd.cached_webfingers,
httpd.person_cache, httpd.inbox_queue,
domain, onion_domain, i2p_domain,
port, proxy_type,
httpd.federation_list,
max_replies,
domain_max_posts_per_day,
account_max_posts_per_day,
allow_deletion, debug,
max_mentions, max_emoji,
httpd.translate, unit_test,
httpd.yt_replace_domain,
httpd.twitter_replacement_domain,
httpd.show_published_date_only,
httpd.max_followers,
httpd.allow_local_network_access,
httpd.peertube_instances,
verify_all_signatures,
httpd.theme_name,
httpd.system_language,
httpd.max_like_count,
httpd.signing_priv_key_pem,
httpd.default_reply_interval_hrs,
2022-11-19 18:03:55 +00:00
httpd.cw_lists,
httpd.max_hashtags), daemon=True)
2020-10-04 20:21:50 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating scheduled post thread')
2020-04-02 21:35:06 +00:00
httpd.thrPostSchedule = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_post_schedule,
2021-12-28 21:36:27 +00:00
args=(base_dir, httpd, 20), daemon=True)
2020-04-27 09:41:38 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating newswire thread')
2020-10-04 20:21:50 +00:00
httpd.thrNewswireDaemon = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_newswire_daemon,
2021-12-28 21:36:27 +00:00
args=(base_dir, httpd,
http_prefix, domain, port,
httpd.translate), daemon=True)
2020-10-04 20:21:50 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating federated shares thread')
2021-07-26 17:54:13 +00:00
httpd.thrFederatedSharesDaemon = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_federated_shares_daemon,
2021-12-28 21:36:27 +00:00
args=(base_dir, httpd,
http_prefix, httpd.domain_full,
proxy_type, debug,
httpd.system_language), daemon=True)
2021-07-26 17:54:13 +00:00
2020-04-27 09:41:38 +00:00
# flags used when restarting the inbox queue
2022-01-01 15:11:42 +00:00
httpd.restart_inbox_queue_in_progress = False
httpd.restart_inbox_queue = False
2020-04-27 09:41:38 +00:00
2021-12-29 21:55:09 +00:00
update_hashtag_categories(base_dir)
2021-12-25 23:03:28 +00:00
print('Adding hashtag categories for language ' + httpd.system_language)
2021-12-29 21:55:09 +00:00
load_hashtag_categories(base_dir, httpd.system_language)
# signing key used for authorized fetch
# this is the instance actor private key
2021-12-28 18:13:52 +00:00
httpd.signing_priv_key_pem = get_instance_actor_key(base_dir, domain)
2022-03-31 10:45:35 +00:00
# threads used for checking for actor changes when clicking on
# avatar icon / person options
httpd.thrCheckActor = {}
2021-12-25 21:32:15 +00:00
if not unit_test:
print('THREAD: Creating import following watchdog')
httpd.thrImportFollowing = \
thread_with_trace(target=run_import_following_watchdog,
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrImportFollowing,
'run_daemon thrImportFollowing')
2022-03-13 11:01:07 +00:00
print('THREAD: Creating inbox queue watchdog')
2020-04-02 21:35:06 +00:00
httpd.thrWatchdog = \
2021-12-28 21:36:27 +00:00
thread_with_trace(target=run_inbox_queue_watchdog,
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrWatchdog, 'run_daemon thrWatchdog')
2020-01-12 20:13:44 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating scheduled post watchdog')
2020-04-02 21:35:06 +00:00
httpd.thrWatchdogSchedule = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_post_schedule_watchdog,
2021-12-28 21:36:27 +00:00
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrWatchdogSchedule,
'run_daemon thrWatchdogSchedule')
2020-10-04 20:21:50 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating newswire watchdog')
2020-10-04 20:21:50 +00:00
httpd.thrNewswireWatchdog = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_newswire_watchdog,
2021-12-28 21:36:27 +00:00
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrNewswireWatchdog,
'run_daemon thrNewswireWatchdog')
2021-07-26 17:54:13 +00:00
2022-03-13 11:01:07 +00:00
print('THREAD: Creating federated shares watchdog')
2021-07-26 17:54:13 +00:00
httpd.thrFederatedSharesWatchdog = \
2021-12-29 21:55:09 +00:00
thread_with_trace(target=run_federated_shares_watchdog,
2021-12-28 21:36:27 +00:00
args=(project_version, httpd), daemon=True)
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrFederatedSharesWatchdog,
'run_daemon thrFederatedSharesWatchdog')
2024-02-09 23:23:33 +00:00
print('THREAD: Creating federated blocks thread')
httpd.thrFederatedBlocksDaemon = \
thread_with_trace(target=run_federated_blocks_daemon,
args=(base_dir, httpd, debug), daemon=True)
begin_thread(httpd.thrFederatedBlocksDaemon,
'run_daemon thrFederatedBlocksDaemon')
2019-09-05 12:43:59 +00:00
else:
2021-08-15 10:51:39 +00:00
print('Starting inbox queue')
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrInboxQueue, 'run_daemon start inbox')
2021-08-15 10:51:39 +00:00
print('Starting scheduled posts daemon')
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrPostSchedule,
'run_daemon start scheduled posts')
2021-08-15 10:51:39 +00:00
print('Starting federated shares daemon')
2022-07-28 09:59:18 +00:00
begin_thread(httpd.thrFederatedSharesDaemon,
'run_daemon start federated shares')
2019-09-05 12:43:59 +00:00
update_memorial_flags(base_dir, httpd.person_cache)
2021-12-25 20:39:35 +00:00
if client_to_server:
2020-04-02 21:35:06 +00:00
print('Running ActivityPub client on ' +
2021-12-25 20:58:07 +00:00
domain + ' port ' + str(proxy_port))
2019-07-13 09:37:17 +00:00
else:
2020-04-02 21:35:06 +00:00
print('Running ActivityPub server on ' +
2021-12-25 20:58:07 +00:00
domain + ' port ' + str(proxy_port))
httpd.starting_daemon = False
2019-06-28 18:55:29 +00:00
httpd.serve_forever()