mirror of https://gitlab.com/bashrc2/epicyon
Function for accounts data directory
parent
c8cf6d4a70
commit
f20eaefa94
14
auth.py
14
auth.py
|
@ -12,6 +12,7 @@ import hashlib
|
||||||
import binascii
|
import binascii
|
||||||
import os
|
import os
|
||||||
import secrets
|
import secrets
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_system_account
|
from utils import is_system_account
|
||||||
from utils import is_memorial_account
|
from utils import is_memorial_account
|
||||||
from utils import has_users_path
|
from utils import has_users_path
|
||||||
|
@ -143,7 +144,7 @@ def authorize_basic(base_dir: str, path: str, auth_header: str,
|
||||||
print('basic auth - attempted login using memorial account ' +
|
print('basic auth - attempted login using memorial account ' +
|
||||||
nickname + ' in Auth header')
|
nickname + ' in Auth header')
|
||||||
return False
|
return False
|
||||||
password_file = base_dir + '/accounts/passwords'
|
password_file = data_dir(base_dir) + '/passwords'
|
||||||
if not os.path.isfile(password_file):
|
if not os.path.isfile(password_file):
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: passwords file missing')
|
print('DEBUG: passwords file missing')
|
||||||
|
@ -178,10 +179,11 @@ def store_basic_credentials(base_dir: str,
|
||||||
nickname = remove_eol(nickname).strip()
|
nickname = remove_eol(nickname).strip()
|
||||||
password = remove_eol(password).strip()
|
password = remove_eol(password).strip()
|
||||||
|
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
os.mkdir(base_dir + '/accounts')
|
if not os.path.isdir(dir_str):
|
||||||
|
os.mkdir(dir_str)
|
||||||
|
|
||||||
password_file = base_dir + '/accounts/passwords'
|
password_file = dir_str + '/passwords'
|
||||||
store_str = nickname + ':' + _hash_password(password)
|
store_str = nickname + ':' + _hash_password(password)
|
||||||
if os.path.isfile(password_file):
|
if os.path.isfile(password_file):
|
||||||
if text_in_file(nickname + ':', password_file):
|
if text_in_file(nickname + ':', password_file):
|
||||||
|
@ -226,7 +228,7 @@ def remove_password(base_dir: str, nickname: str) -> None:
|
||||||
"""Removes the password entry for the given nickname
|
"""Removes the password entry for the given nickname
|
||||||
This is called during account removal
|
This is called during account removal
|
||||||
"""
|
"""
|
||||||
password_file = base_dir + '/accounts/passwords'
|
password_file = data_dir(base_dir) + '/passwords'
|
||||||
if os.path.isfile(password_file):
|
if os.path.isfile(password_file):
|
||||||
try:
|
try:
|
||||||
with open(password_file, 'r', encoding='utf-8') as fin:
|
with open(password_file, 'r', encoding='utf-8') as fin:
|
||||||
|
@ -291,7 +293,7 @@ def record_login_failure(base_dir: str, ip_address: str,
|
||||||
if not log_to_file:
|
if not log_to_file:
|
||||||
return
|
return
|
||||||
|
|
||||||
failure_log = base_dir + '/accounts/loginfailures.log'
|
failure_log = data_dir(base_dir) + '/loginfailures.log'
|
||||||
write_type = 'a+'
|
write_type = 'a+'
|
||||||
if not os.path.isfile(failure_log):
|
if not os.path.isfile(failure_log):
|
||||||
write_type = 'w+'
|
write_type = 'w+'
|
||||||
|
|
52
blocking.py
52
blocking.py
|
@ -12,6 +12,7 @@ import json
|
||||||
import time
|
import time
|
||||||
from session import get_json_valid
|
from session import get_json_valid
|
||||||
from session import create_session
|
from session import create_session
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_contains
|
from utils import string_contains
|
||||||
from utils import date_from_string_format
|
from utils import date_from_string_format
|
||||||
from utils import date_utcnow
|
from utils import date_utcnow
|
||||||
|
@ -240,7 +241,7 @@ def _add_global_block_reason(base_dir: str,
|
||||||
return False
|
return False
|
||||||
|
|
||||||
blocking_reasons_filename = \
|
blocking_reasons_filename = \
|
||||||
base_dir + '/accounts/blocking_reasons.txt'
|
data_dir(base_dir) + '/blocking_reasons.txt'
|
||||||
|
|
||||||
if not block_nickname.startswith('#'):
|
if not block_nickname.startswith('#'):
|
||||||
# is the handle already blocked?
|
# is the handle already blocked?
|
||||||
|
@ -302,7 +303,7 @@ def add_global_block(base_dir: str,
|
||||||
block_nickname, block_domain,
|
block_nickname, block_domain,
|
||||||
reason)
|
reason)
|
||||||
|
|
||||||
blocking_filename = base_dir + '/accounts/blocking.txt'
|
blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if not block_nickname.startswith('#'):
|
if not block_nickname.startswith('#'):
|
||||||
# is the handle already blocked?
|
# is the handle already blocked?
|
||||||
block_handle = block_nickname + '@' + block_domain
|
block_handle = block_nickname + '@' + block_domain
|
||||||
|
@ -481,7 +482,7 @@ def _remove_global_block_reason(base_dir: str,
|
||||||
unblock_domain: str) -> bool:
|
unblock_domain: str) -> bool:
|
||||||
"""Remove a globla block reason
|
"""Remove a globla block reason
|
||||||
"""
|
"""
|
||||||
unblocking_filename = base_dir + '/accounts/blocking_reasons.txt'
|
unblocking_filename = data_dir(base_dir) + '/blocking_reasons.txt'
|
||||||
if not os.path.isfile(unblocking_filename):
|
if not os.path.isfile(unblocking_filename):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -524,7 +525,7 @@ def remove_global_block(base_dir: str,
|
||||||
unblock_nickname,
|
unblock_nickname,
|
||||||
unblock_domain)
|
unblock_domain)
|
||||||
|
|
||||||
unblocking_filename = base_dir + '/accounts/blocking.txt'
|
unblocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if not unblock_nickname.startswith('#'):
|
if not unblock_nickname.startswith('#'):
|
||||||
unblock_handle = unblock_nickname + '@' + unblock_domain
|
unblock_handle = unblock_nickname + '@' + unblock_domain
|
||||||
if os.path.isfile(unblocking_filename):
|
if os.path.isfile(unblocking_filename):
|
||||||
|
@ -621,7 +622,7 @@ def is_blocked_hashtag(base_dir: str, hashtag: str) -> bool:
|
||||||
# avoid very long hashtags
|
# avoid very long hashtags
|
||||||
if len(hashtag) > 32:
|
if len(hashtag) > 32:
|
||||||
return True
|
return True
|
||||||
global_blocking_filename = base_dir + '/accounts/blocking.txt'
|
global_blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if os.path.isfile(global_blocking_filename):
|
if os.path.isfile(global_blocking_filename):
|
||||||
hashtag = hashtag.strip('\n').strip('\r')
|
hashtag = hashtag.strip('\n').strip('\r')
|
||||||
if not hashtag.startswith('#'):
|
if not hashtag.startswith('#'):
|
||||||
|
@ -641,7 +642,7 @@ def get_domain_blocklist(base_dir: str) -> str:
|
||||||
for evil in evil_domains:
|
for evil in evil_domains:
|
||||||
blocked_str += evil + '\n'
|
blocked_str += evil + '\n'
|
||||||
|
|
||||||
global_blocking_filename = base_dir + '/accounts/blocking.txt'
|
global_blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if not os.path.isfile(global_blocking_filename):
|
if not os.path.isfile(global_blocking_filename):
|
||||||
return blocked_str
|
return blocked_str
|
||||||
try:
|
try:
|
||||||
|
@ -666,7 +667,7 @@ def update_blocked_cache(base_dir: str,
|
||||||
seconds_since_last_update = curr_time - blocked_cache_last_updated
|
seconds_since_last_update = curr_time - blocked_cache_last_updated
|
||||||
if seconds_since_last_update < blocked_cache_update_secs:
|
if seconds_since_last_update < blocked_cache_update_secs:
|
||||||
return blocked_cache_last_updated
|
return blocked_cache_last_updated
|
||||||
global_blocking_filename = base_dir + '/accounts/blocking.txt'
|
global_blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if not os.path.isfile(global_blocking_filename):
|
if not os.path.isfile(global_blocking_filename):
|
||||||
return blocked_cache_last_updated
|
return blocked_cache_last_updated
|
||||||
try:
|
try:
|
||||||
|
@ -724,7 +725,7 @@ def is_blocked_domain(base_dir: str, domain: str,
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# instance block list
|
# instance block list
|
||||||
global_blocking_filename = base_dir + '/accounts/blocking.txt'
|
global_blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if os.path.isfile(global_blocking_filename):
|
if os.path.isfile(global_blocking_filename):
|
||||||
search_str += '\n'
|
search_str += '\n'
|
||||||
search_str_short = None
|
search_str_short = None
|
||||||
|
@ -743,7 +744,7 @@ def is_blocked_domain(base_dir: str, domain: str,
|
||||||
print('EX: unable to read ' + global_blocking_filename +
|
print('EX: unable to read ' + global_blocking_filename +
|
||||||
' ' + str(ex))
|
' ' + str(ex))
|
||||||
else:
|
else:
|
||||||
allow_filename = base_dir + '/accounts/allowedinstances.txt'
|
allow_filename = data_dir(base_dir) + '/allowedinstances.txt'
|
||||||
# instance allow list
|
# instance allow list
|
||||||
if not short_domain:
|
if not short_domain:
|
||||||
if not text_in_file(domain, allow_filename):
|
if not text_in_file(domain, allow_filename):
|
||||||
|
@ -766,7 +767,7 @@ def is_blocked_nickname(base_dir: str, nickname: str,
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# instance-wide block list
|
# instance-wide block list
|
||||||
global_blocking_filename = base_dir + '/accounts/blocking.txt'
|
global_blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if os.path.isfile(global_blocking_filename):
|
if os.path.isfile(global_blocking_filename):
|
||||||
search_str += '\n'
|
search_str += '\n'
|
||||||
try:
|
try:
|
||||||
|
@ -818,7 +819,7 @@ def is_blocked(base_dir: str, nickname: str, domain: str,
|
||||||
if blocked_str == block_handle:
|
if blocked_str == block_handle:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
global_blocks_filename = base_dir + '/accounts/blocking.txt'
|
global_blocks_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if os.path.isfile(global_blocks_filename):
|
if os.path.isfile(global_blocks_filename):
|
||||||
if block_nickname:
|
if block_nickname:
|
||||||
if text_in_file(block_nickname + '@*\n',
|
if text_in_file(block_nickname + '@*\n',
|
||||||
|
@ -832,7 +833,7 @@ def is_blocked(base_dir: str, nickname: str, domain: str,
|
||||||
return True
|
return True
|
||||||
if not block_federated:
|
if not block_federated:
|
||||||
federated_blocks_filename = \
|
federated_blocks_filename = \
|
||||||
base_dir + '/accounts/block_api.txt'
|
data_dir(base_dir) + '/block_api.txt'
|
||||||
if os.path.isfile(federated_blocks_filename):
|
if os.path.isfile(federated_blocks_filename):
|
||||||
block_federated = []
|
block_federated = []
|
||||||
try:
|
try:
|
||||||
|
@ -849,7 +850,7 @@ def is_blocked(base_dir: str, nickname: str, domain: str,
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# instance allow list
|
# instance allow list
|
||||||
allow_filename = base_dir + '/accounts/allowedinstances.txt'
|
allow_filename = data_dir(base_dir) + '/allowedinstances.txt'
|
||||||
short_domain = _get_short_domain(block_domain)
|
short_domain = _get_short_domain(block_domain)
|
||||||
if not short_domain and block_domain:
|
if not short_domain and block_domain:
|
||||||
if not text_in_file(block_domain + '\n', allow_filename):
|
if not text_in_file(block_domain + '\n', allow_filename):
|
||||||
|
@ -904,7 +905,7 @@ def allowed_announce(base_dir: str, nickname: str, domain: str,
|
||||||
|
|
||||||
# non-cached instance level announce blocks
|
# non-cached instance level announce blocks
|
||||||
global_announce_blocks_filename = \
|
global_announce_blocks_filename = \
|
||||||
base_dir + '/accounts/noannounce.txt'
|
data_dir(base_dir) + '/noannounce.txt'
|
||||||
if os.path.isfile(global_announce_blocks_filename):
|
if os.path.isfile(global_announce_blocks_filename):
|
||||||
if block_nickname:
|
if block_nickname:
|
||||||
if text_in_file(block_nickname + '@*',
|
if text_in_file(block_nickname + '@*',
|
||||||
|
@ -1563,7 +1564,7 @@ def outbox_undo_mute(base_dir: str, http_prefix: str,
|
||||||
def broch_mode_is_active(base_dir: str) -> bool:
|
def broch_mode_is_active(base_dir: str) -> bool:
|
||||||
"""Returns true if broch mode is active
|
"""Returns true if broch mode is active
|
||||||
"""
|
"""
|
||||||
allow_filename = base_dir + '/accounts/allowedinstances.txt'
|
allow_filename = data_dir(base_dir) + '/allowedinstances.txt'
|
||||||
return os.path.isfile(allow_filename)
|
return os.path.isfile(allow_filename)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1576,7 +1577,7 @@ def set_broch_mode(base_dir: str, domain_full: str, enabled: bool) -> None:
|
||||||
to construct an instance level allow list. Anything arriving
|
to construct an instance level allow list. Anything arriving
|
||||||
which is then not from one of the allowed domains will be dropped
|
which is then not from one of the allowed domains will be dropped
|
||||||
"""
|
"""
|
||||||
allow_filename = base_dir + '/accounts/allowedinstances.txt'
|
allow_filename = data_dir(base_dir) + '/allowedinstances.txt'
|
||||||
|
|
||||||
if not enabled:
|
if not enabled:
|
||||||
# remove instance allow list
|
# remove instance allow list
|
||||||
|
@ -1595,11 +1596,12 @@ def set_broch_mode(base_dir: str, domain_full: str, enabled: bool) -> None:
|
||||||
# generate instance allow list
|
# generate instance allow list
|
||||||
allowed_domains = [domain_full]
|
allowed_domains = [domain_full]
|
||||||
follow_files = ('following.txt', 'followers.txt')
|
follow_files = ('following.txt', 'followers.txt')
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
for follow_file_type in follow_files:
|
for follow_file_type in follow_files:
|
||||||
following_filename = account_dir + '/' + follow_file_type
|
following_filename = account_dir + '/' + follow_file_type
|
||||||
if not os.path.isfile(following_filename):
|
if not os.path.isfile(following_filename):
|
||||||
|
@ -1639,7 +1641,7 @@ def broch_modeLapses(base_dir: str, lapse_days: int) -> bool:
|
||||||
"""After broch mode is enabled it automatically
|
"""After broch mode is enabled it automatically
|
||||||
elapses after a period of time
|
elapses after a period of time
|
||||||
"""
|
"""
|
||||||
allow_filename = base_dir + '/accounts/allowedinstances.txt'
|
allow_filename = data_dir(base_dir) + '/allowedinstances.txt'
|
||||||
if not os.path.isfile(allow_filename):
|
if not os.path.isfile(allow_filename):
|
||||||
return False
|
return False
|
||||||
last_modified = file_last_modified(allow_filename)
|
last_modified = file_last_modified(allow_filename)
|
||||||
|
@ -1858,7 +1860,7 @@ def get_blocks_via_server(session, nickname: str, password: str,
|
||||||
def load_blocked_military(base_dir: str) -> {}:
|
def load_blocked_military(base_dir: str) -> {}:
|
||||||
"""Loads a list of nicknames for accounts which block military instances
|
"""Loads a list of nicknames for accounts which block military instances
|
||||||
"""
|
"""
|
||||||
block_military_filename = base_dir + '/accounts/block_military.txt'
|
block_military_filename = data_dir(base_dir) + '/block_military.txt'
|
||||||
nicknames_list = []
|
nicknames_list = []
|
||||||
if os.path.isfile(block_military_filename):
|
if os.path.isfile(block_military_filename):
|
||||||
try:
|
try:
|
||||||
|
@ -1883,7 +1885,7 @@ def save_blocked_military(base_dir: str, block_military: {}) -> None:
|
||||||
for nickname, _ in block_military.items():
|
for nickname, _ in block_military.items():
|
||||||
nicknames_str += nickname + '\n'
|
nicknames_str += nickname + '\n'
|
||||||
|
|
||||||
block_military_filename = base_dir + '/accounts/block_military.txt'
|
block_military_filename = data_dir(base_dir) + '/block_military.txt'
|
||||||
try:
|
try:
|
||||||
with open(block_military_filename, 'w+',
|
with open(block_military_filename, 'w+',
|
||||||
encoding='utf-8') as fp_mil:
|
encoding='utf-8') as fp_mil:
|
||||||
|
@ -1921,7 +1923,7 @@ def load_federated_blocks_endpoints(base_dir: str) -> []:
|
||||||
"""
|
"""
|
||||||
block_federated_endpoints = []
|
block_federated_endpoints = []
|
||||||
block_api_endpoints_filename = \
|
block_api_endpoints_filename = \
|
||||||
base_dir + '/accounts/block_api_endpoints.txt'
|
data_dir(base_dir) + '/block_api_endpoints.txt'
|
||||||
if os.path.isfile(block_api_endpoints_filename):
|
if os.path.isfile(block_api_endpoints_filename):
|
||||||
new_block_federated_endpoints = []
|
new_block_federated_endpoints = []
|
||||||
try:
|
try:
|
||||||
|
@ -2033,7 +2035,7 @@ def _update_federated_blocks(session, base_dir: str,
|
||||||
block_federated.append(handle)
|
block_federated.append(handle)
|
||||||
|
|
||||||
block_api_filename = \
|
block_api_filename = \
|
||||||
base_dir + '/accounts/block_api.txt'
|
data_dir(base_dir) + '/block_api.txt'
|
||||||
if not new_block_api_str:
|
if not new_block_api_str:
|
||||||
print('DEBUG: federated blocklist not loaded: ' + block_api_filename)
|
print('DEBUG: federated blocklist not loaded: ' + block_api_filename)
|
||||||
if os.path.isfile(block_api_filename):
|
if os.path.isfile(block_api_filename):
|
||||||
|
@ -2057,7 +2059,7 @@ def save_block_federated_endpoints(base_dir: str,
|
||||||
"""Saves a list of blocking API endpoints
|
"""Saves a list of blocking API endpoints
|
||||||
"""
|
"""
|
||||||
block_api_endpoints_filename = \
|
block_api_endpoints_filename = \
|
||||||
base_dir + '/accounts/block_api_endpoints.txt'
|
data_dir(base_dir) + '/block_api_endpoints.txt'
|
||||||
result = []
|
result = []
|
||||||
block_federated_endpoints_str = ''
|
block_federated_endpoints_str = ''
|
||||||
for endpoint in block_federated_endpoints:
|
for endpoint in block_federated_endpoints:
|
||||||
|
@ -2079,7 +2081,7 @@ def save_block_federated_endpoints(base_dir: str,
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to delete block_api_endpoints.txt')
|
print('EX: unable to delete block_api_endpoints.txt')
|
||||||
block_api_filename = \
|
block_api_filename = \
|
||||||
base_dir + '/accounts/block_api.txt'
|
data_dir(base_dir) + '/block_api.txt'
|
||||||
if os.path.isfile(block_api_filename):
|
if os.path.isfile(block_api_filename):
|
||||||
try:
|
try:
|
||||||
os.remove(block_api_filename)
|
os.remove(block_api_filename)
|
||||||
|
|
24
blog.py
24
blog.py
|
@ -16,6 +16,7 @@ from webapp_utils import html_footer
|
||||||
from webapp_utils import get_post_attachments_as_html
|
from webapp_utils import get_post_attachments_as_html
|
||||||
from webapp_utils import edit_text_area
|
from webapp_utils import edit_text_area
|
||||||
from webapp_media import add_embedded_elements
|
from webapp_media import add_embedded_elements
|
||||||
|
from utils import data_dir
|
||||||
from utils import remove_link_tracking
|
from utils import remove_link_tracking
|
||||||
from utils import get_url_from_post
|
from utils import get_url_from_post
|
||||||
from utils import date_from_string_format
|
from utils import date_from_string_format
|
||||||
|
@ -704,11 +705,12 @@ def _no_of_blog_accounts(base_dir: str) -> int:
|
||||||
"""Returns the number of blog accounts
|
"""Returns the number of blog accounts
|
||||||
"""
|
"""
|
||||||
ctr = 0
|
ctr = 0
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
blogs_index = account_dir + '/tlblogs.index'
|
blogs_index = account_dir + '/tlblogs.index'
|
||||||
if os.path.isfile(blogs_index):
|
if os.path.isfile(blogs_index):
|
||||||
ctr += 1
|
ctr += 1
|
||||||
|
@ -719,11 +721,12 @@ def _no_of_blog_accounts(base_dir: str) -> int:
|
||||||
def _single_blog_account_nickname(base_dir: str) -> str:
|
def _single_blog_account_nickname(base_dir: str) -> str:
|
||||||
"""Returns the nickname of a single blog account
|
"""Returns the nickname of a single blog account
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
blogs_index = account_dir + '/tlblogs.index'
|
blogs_index = account_dir + '/tlblogs.index'
|
||||||
if os.path.isfile(blogs_index):
|
if os.path.isfile(blogs_index):
|
||||||
return acct.split('@')[0]
|
return acct.split('@')[0]
|
||||||
|
@ -760,11 +763,12 @@ def html_blog_view(authorized: bool,
|
||||||
|
|
||||||
domain_full = get_full_domain(domain, port)
|
domain_full = get_full_domain(domain, port)
|
||||||
|
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
blogs_index = account_dir + '/tlblogs.index'
|
blogs_index = account_dir + '/tlblogs.index'
|
||||||
if os.path.isfile(blogs_index):
|
if os.path.isfile(blogs_index):
|
||||||
blog_str += '<p class="blogaccount">'
|
blog_str += '<p class="blogaccount">'
|
||||||
|
@ -796,13 +800,13 @@ def html_edit_blog(media_instance: bool, translate: {},
|
||||||
edit_blog_text = \
|
edit_blog_text = \
|
||||||
'<h1">' + translate['Write your post text below.'] + '</h1>'
|
'<h1">' + translate['Write your post text below.'] + '</h1>'
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/newpost.txt'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if os.path.isfile(dir_str + '/newpost.txt'):
|
||||||
try:
|
try:
|
||||||
with open(base_dir + '/accounts/newpost.txt', 'r',
|
with open(dir_str + '/newpost.txt', 'r', encoding='utf-8') as file:
|
||||||
encoding='utf-8') as file:
|
|
||||||
edit_blog_text = '<p>' + file.read() + '</p>'
|
edit_blog_text = '<p>' + file.read() + '</p>'
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: unable to read ' + base_dir + '/accounts/newpost.txt')
|
print('EX: unable to read ' + dir_str + '/newpost.txt')
|
||||||
|
|
||||||
css_filename = base_dir + '/epicyon-profile.css'
|
css_filename = base_dir + '/epicyon-profile.css'
|
||||||
if os.path.isfile(base_dir + '/epicyon.css'):
|
if os.path.isfile(base_dir + '/epicyon.css'):
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "RSS Feeds"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
|
from utils import data_dir
|
||||||
from utils import date_utcnow
|
from utils import date_utcnow
|
||||||
from utils import date_epoch
|
from utils import date_epoch
|
||||||
|
|
||||||
|
@ -181,7 +182,7 @@ def get_hashtag_categories(base_dir: str,
|
||||||
def update_hashtag_categories(base_dir: str) -> None:
|
def update_hashtag_categories(base_dir: str) -> None:
|
||||||
"""Regenerates the list of hashtag categories
|
"""Regenerates the list of hashtag categories
|
||||||
"""
|
"""
|
||||||
category_list_filename = base_dir + '/accounts/categoryList.txt'
|
category_list_filename = data_dir(base_dir) + '/categoryList.txt'
|
||||||
hashtag_categories = get_hashtag_categories(base_dir, False, None)
|
hashtag_categories = get_hashtag_categories(base_dir, False, None)
|
||||||
if not hashtag_categories:
|
if not hashtag_categories:
|
||||||
if os.path.isfile(category_list_filename):
|
if os.path.isfile(category_list_filename):
|
||||||
|
|
|
@ -15,6 +15,7 @@ import email.parser
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
|
from utils import data_dir
|
||||||
from utils import remove_link_tracking
|
from utils import remove_link_tracking
|
||||||
from utils import string_contains
|
from utils import string_contains
|
||||||
from utils import string_ends_with
|
from utils import string_ends_with
|
||||||
|
@ -383,7 +384,7 @@ def _update_common_emoji(base_dir: str, emoji_content: str) -> None:
|
||||||
emoji_content = _get_emoji_name_from_code(base_dir, emoji_code)
|
emoji_content = _get_emoji_name_from_code(base_dir, emoji_code)
|
||||||
if not emoji_content:
|
if not emoji_content:
|
||||||
return
|
return
|
||||||
common_emoji_filename = base_dir + '/accounts/common_emoji.txt'
|
common_emoji_filename = data_dir(base_dir) + '/common_emoji.txt'
|
||||||
common_emoji = None
|
common_emoji = None
|
||||||
if os.path.isfile(common_emoji_filename):
|
if os.path.isfile(common_emoji_filename):
|
||||||
try:
|
try:
|
||||||
|
@ -2297,7 +2298,8 @@ def load_auto_cw_cache(base_dir: str) -> {}:
|
||||||
for each account
|
for each account
|
||||||
"""
|
"""
|
||||||
auto_cw_cache = {}
|
auto_cw_cache = {}
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Core"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
from utils import data_dir
|
||||||
from utils import save_json
|
from utils import save_json
|
||||||
from utils import user_agent_domain
|
from utils import user_agent_domain
|
||||||
from utils import remove_eol
|
from utils import remove_eol
|
||||||
|
@ -49,15 +50,15 @@ def update_known_crawlers(ua_str: str,
|
||||||
for uagent in remove_crawlers:
|
for uagent in remove_crawlers:
|
||||||
del known_crawlers[uagent]
|
del known_crawlers[uagent]
|
||||||
# save the list of crawlers
|
# save the list of crawlers
|
||||||
save_json(known_crawlers,
|
dir_str = data_dir(base_dir)
|
||||||
base_dir + '/accounts/knownCrawlers.json')
|
save_json(known_crawlers, dir_str + '/knownCrawlers.json')
|
||||||
return curr_time
|
return curr_time
|
||||||
|
|
||||||
|
|
||||||
def load_known_web_bots(base_dir: str) -> []:
|
def load_known_web_bots(base_dir: str) -> []:
|
||||||
"""Returns a list of known web bots
|
"""Returns a list of known web bots
|
||||||
"""
|
"""
|
||||||
known_bots_filename = base_dir + '/accounts/knownBots.txt'
|
known_bots_filename = data_dir(base_dir) + '/knownBots.txt'
|
||||||
if not os.path.isfile(known_bots_filename):
|
if not os.path.isfile(known_bots_filename):
|
||||||
return []
|
return []
|
||||||
crawlers_str = None
|
crawlers_str = None
|
||||||
|
@ -85,7 +86,7 @@ def load_known_web_bots(base_dir: str) -> []:
|
||||||
def _save_known_web_bots(base_dir: str, known_bots: []) -> bool:
|
def _save_known_web_bots(base_dir: str, known_bots: []) -> bool:
|
||||||
"""Saves a list of known web bots
|
"""Saves a list of known web bots
|
||||||
"""
|
"""
|
||||||
known_bots_filename = base_dir + '/accounts/knownBots.txt'
|
known_bots_filename = data_dir(base_dir) + '/knownBots.txt'
|
||||||
known_bots_str = ''
|
known_bots_str = ''
|
||||||
for crawler in known_bots:
|
for crawler in known_bots:
|
||||||
known_bots_str += crawler.strip() + '\n'
|
known_bots_str += crawler.strip() + '\n'
|
||||||
|
|
27
daemon.py
27
daemon.py
|
@ -50,6 +50,7 @@ from shares import expire_shares
|
||||||
from categories import load_city_hashtags
|
from categories import load_city_hashtags
|
||||||
from categories import update_hashtag_categories
|
from categories import update_hashtag_categories
|
||||||
from languages import load_default_post_languages
|
from languages import load_default_post_languages
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_contains
|
from utils import string_contains
|
||||||
from utils import check_bad_path
|
from utils import check_bad_path
|
||||||
from utils import acct_handle_dir
|
from utils import acct_handle_dir
|
||||||
|
@ -161,7 +162,8 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
print(endpoint_type.upper() + ' no nickname ' + self.path)
|
print(endpoint_type.upper() + ' no nickname ' + self.path)
|
||||||
http_400(self)
|
http_400(self)
|
||||||
return
|
return
|
||||||
if not os.path.isdir(self.server.base_dir + '/accounts/' +
|
dir_str = data_dir(self.server.base_dir)
|
||||||
|
if not os.path.isdir(dir_str + '/' +
|
||||||
nickname + '@' + self.server.domain):
|
nickname + '@' + self.server.domain):
|
||||||
print(endpoint_type.upper() +
|
print(endpoint_type.upper() +
|
||||||
' for non-existent account ' + self.path)
|
' for non-existent account ' + self.path)
|
||||||
|
@ -333,7 +335,7 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
if avatar_file.startswith('avatar'):
|
if avatar_file.startswith('avatar'):
|
||||||
avatar_file = 'avatar.' + avatar_file_ext
|
avatar_file = 'avatar.' + avatar_file_ext
|
||||||
media_filename = \
|
media_filename = \
|
||||||
self.server.base_dir + '/accounts/' + \
|
data_dir(self.server.base_dir) + '/' + \
|
||||||
nickname + '@' + self.server.domain + '/' + \
|
nickname + '@' + self.server.domain + '/' + \
|
||||||
avatar_file
|
avatar_file
|
||||||
else:
|
else:
|
||||||
|
@ -348,7 +350,7 @@ class PubServer(BaseHTTPRequestHandler):
|
||||||
if banner_file.startswith('banner'):
|
if banner_file.startswith('banner'):
|
||||||
banner_file = 'banner.' + banner_file_ext
|
banner_file = 'banner.' + banner_file_ext
|
||||||
media_filename = \
|
media_filename = \
|
||||||
self.server.base_dir + '/accounts/' + \
|
data_dir(self.server.base_dir) + '/' + \
|
||||||
nickname + '@' + self.server.domain + '/' + \
|
nickname + '@' + self.server.domain + '/' + \
|
||||||
banner_file
|
banner_file
|
||||||
|
|
||||||
|
@ -654,7 +656,8 @@ def run_shares_expire_watchdog(project_version: str, httpd) -> None:
|
||||||
def load_tokens(base_dir: str, tokens_dict: {}, tokens_lookup: {}) -> None:
|
def load_tokens(base_dir: str, tokens_dict: {}, tokens_lookup: {}) -> None:
|
||||||
"""Loads shared items access tokens for each account
|
"""Loads shared items access tokens for each account
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if '@' in handle:
|
if '@' in handle:
|
||||||
token_filename = acct_handle_dir(base_dir, handle) + '/.token'
|
token_filename = acct_handle_dir(base_dir, handle) + '/.token'
|
||||||
|
@ -767,9 +770,10 @@ def run_daemon(no_of_books: int,
|
||||||
server_address = ('', proxy_port)
|
server_address = ('', proxy_port)
|
||||||
pub_handler = partial(PubServer)
|
pub_handler = partial(PubServer)
|
||||||
|
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if not os.path.isdir(dir_str):
|
||||||
print('Creating accounts directory')
|
print('Creating accounts directory')
|
||||||
os.mkdir(base_dir + '/accounts')
|
os.mkdir(dir_str)
|
||||||
|
|
||||||
httpd = None
|
httpd = None
|
||||||
try:
|
try:
|
||||||
|
@ -850,7 +854,7 @@ def run_daemon(no_of_books: int,
|
||||||
httpd.public_replies_unlisted = public_replies_unlisted
|
httpd.public_replies_unlisted = public_replies_unlisted
|
||||||
|
|
||||||
# load a list of dogwhistle words
|
# load a list of dogwhistle words
|
||||||
dogwhistles_filename = base_dir + '/accounts/dogwhistles.txt'
|
dogwhistles_filename = data_dir(base_dir) + '/dogwhistles.txt'
|
||||||
if not os.path.isfile(dogwhistles_filename):
|
if not os.path.isfile(dogwhistles_filename):
|
||||||
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
|
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
|
||||||
httpd.dogwhistles = load_dogwhistles(dogwhistles_filename)
|
httpd.dogwhistles = load_dogwhistles(dogwhistles_filename)
|
||||||
|
@ -886,7 +890,7 @@ def run_daemon(no_of_books: int,
|
||||||
httpd.dm_license_url = ''
|
httpd.dm_license_url = ''
|
||||||
|
|
||||||
# fitness metrics
|
# fitness metrics
|
||||||
fitness_filename = base_dir + '/accounts/fitness.json'
|
fitness_filename = data_dir(base_dir) + '/fitness.json'
|
||||||
httpd.fitness = {}
|
httpd.fitness = {}
|
||||||
if os.path.isfile(fitness_filename):
|
if os.path.isfile(fitness_filename):
|
||||||
fitness = load_json(fitness_filename)
|
fitness = load_json(fitness_filename)
|
||||||
|
@ -1186,11 +1190,12 @@ def run_daemon(no_of_books: int,
|
||||||
# whether to enable broch mode, which locks down the instance
|
# whether to enable broch mode, which locks down the instance
|
||||||
set_broch_mode(base_dir, httpd.domain_full, broch_mode)
|
set_broch_mode(base_dir, httpd.domain_full, broch_mode)
|
||||||
|
|
||||||
if not os.path.isdir(base_dir + '/accounts/inbox@' + domain):
|
dir_str = data_dir(base_dir)
|
||||||
|
if not os.path.isdir(dir_str + '/inbox@' + domain):
|
||||||
print('Creating shared inbox: inbox@' + domain)
|
print('Creating shared inbox: inbox@' + domain)
|
||||||
create_shared_inbox(base_dir, 'inbox', domain, port, http_prefix)
|
create_shared_inbox(base_dir, 'inbox', domain, port, http_prefix)
|
||||||
|
|
||||||
if not os.path.isdir(base_dir + '/accounts/news@' + domain):
|
if not os.path.isdir(dir_str + '/news@' + domain):
|
||||||
print('Creating news inbox: news@' + domain)
|
print('Creating news inbox: news@' + domain)
|
||||||
create_news_inbox(base_dir, domain, port, http_prefix)
|
create_news_inbox(base_dir, domain, port, http_prefix)
|
||||||
set_config_param(base_dir, "listsEnabled", "Murdoch press")
|
set_config_param(base_dir, "listsEnabled", "Murdoch press")
|
||||||
|
@ -1198,7 +1203,7 @@ def run_daemon(no_of_books: int,
|
||||||
# dict of known web crawlers accessing nodeinfo or the masto API
|
# dict of known web crawlers accessing nodeinfo or the masto API
|
||||||
# and how many times they have been seen
|
# and how many times they have been seen
|
||||||
httpd.known_crawlers = {}
|
httpd.known_crawlers = {}
|
||||||
known_crawlers_filename = base_dir + '/accounts/knownCrawlers.json'
|
known_crawlers_filename = dir_str + '/knownCrawlers.json'
|
||||||
if os.path.isfile(known_crawlers_filename):
|
if os.path.isfile(known_crawlers_filename):
|
||||||
httpd.known_crawlers = load_json(known_crawlers_filename)
|
httpd.known_crawlers = load_json(known_crawlers_filename)
|
||||||
# when was the last crawler seen?
|
# when was the last crawler seen?
|
||||||
|
|
|
@ -85,6 +85,7 @@ from httpcodes import http_304
|
||||||
from httpcodes import http_400
|
from httpcodes import http_400
|
||||||
from httpcodes import http_503
|
from httpcodes import http_503
|
||||||
from httpcodes import write2
|
from httpcodes import write2
|
||||||
|
from utils import data_dir
|
||||||
from utils import user_agent_domain
|
from utils import user_agent_domain
|
||||||
from utils import local_network_host
|
from utils import local_network_host
|
||||||
from utils import permitted_dir
|
from utils import permitted_dir
|
||||||
|
@ -2409,8 +2410,7 @@ def daemon_http_get(self) -> None:
|
||||||
if (is_image_file(self.path) and
|
if (is_image_file(self.path) and
|
||||||
(self.path.startswith('/login.') or
|
(self.path.startswith('/login.') or
|
||||||
self.path.startswith('/qrcode.png'))):
|
self.path.startswith('/qrcode.png'))):
|
||||||
icon_filename = \
|
icon_filename = data_dir(self.server.base_dir) + self.path
|
||||||
self.server.base_dir + '/accounts' + self.path
|
|
||||||
if os.path.isfile(icon_filename):
|
if os.path.isfile(icon_filename):
|
||||||
if etag_exists(self, icon_filename):
|
if etag_exists(self, icon_filename):
|
||||||
# The file has not changed
|
# The file has not changed
|
||||||
|
|
|
@ -18,6 +18,7 @@ from httpcodes import write2
|
||||||
from httpcodes import http_304
|
from httpcodes import http_304
|
||||||
from httpcodes import http_404
|
from httpcodes import http_404
|
||||||
from httpheaders import set_headers_etag
|
from httpheaders import set_headers_etag
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
from utils import media_file_mime_type
|
from utils import media_file_mime_type
|
||||||
from utils import get_image_mime_type
|
from utils import get_image_mime_type
|
||||||
|
@ -689,7 +690,7 @@ def show_background_image(self, path: str,
|
||||||
# follow screen background image
|
# follow screen background image
|
||||||
if path.endswith('/' + bg_im + '-background.' + ext):
|
if path.endswith('/' + bg_im + '-background.' + ext):
|
||||||
bg_filename = \
|
bg_filename = \
|
||||||
base_dir + '/accounts/' + \
|
data_dir(base_dir) + '/' + \
|
||||||
bg_im + '-background.' + ext
|
bg_im + '-background.' + ext
|
||||||
if os.path.isfile(bg_filename):
|
if os.path.isfile(bg_filename):
|
||||||
if etag_exists(self, bg_filename):
|
if etag_exists(self, bg_filename):
|
||||||
|
|
|
@ -16,6 +16,7 @@ from httpheaders import set_headers
|
||||||
from newswire import get_rss_from_dict
|
from newswire import get_rss_from_dict
|
||||||
from fitnessFunctions import fitness_performance
|
from fitnessFunctions import fitness_performance
|
||||||
from posts import is_moderator
|
from posts import is_moderator
|
||||||
|
from utils import data_dir
|
||||||
from utils import local_actor_url
|
from utils import local_actor_url
|
||||||
from utils import save_json
|
from utils import save_json
|
||||||
from webapp_column_right import html_edit_news_post
|
from webapp_column_right import html_edit_news_post
|
||||||
|
@ -89,7 +90,7 @@ def newswire_vote(self, calling_domain: str, path: str,
|
||||||
newswire_item[votes_index].append('vote:' + nickname)
|
newswire_item[votes_index].append('vote:' + nickname)
|
||||||
filename = newswire_item[filename_index]
|
filename = newswire_item[filename_index]
|
||||||
newswire_state_filename = \
|
newswire_state_filename = \
|
||||||
base_dir + '/accounts/.newswirestate.json'
|
data_dir(base_dir) + '/.newswirestate.json'
|
||||||
try:
|
try:
|
||||||
save_json(newswire, newswire_state_filename)
|
save_json(newswire, newswire_state_filename)
|
||||||
except BaseException as ex:
|
except BaseException as ex:
|
||||||
|
@ -144,7 +145,7 @@ def newswire_unvote(self, calling_domain: str, path: str,
|
||||||
newswire_item[votes_index].remove('vote:' + nickname)
|
newswire_item[votes_index].remove('vote:' + nickname)
|
||||||
filename = newswire_item[filename_index]
|
filename = newswire_item[filename_index]
|
||||||
newswire_state_filename = \
|
newswire_state_filename = \
|
||||||
base_dir + '/accounts/.newswirestate.json'
|
data_dir(base_dir) + '/.newswirestate.json'
|
||||||
try:
|
try:
|
||||||
save_json(newswire, newswire_state_filename)
|
save_json(newswire, newswire_state_filename)
|
||||||
except BaseException as ex:
|
except BaseException as ex:
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Core GET"
|
__module_group__ = "Core GET"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_account_dir
|
from utils import is_account_dir
|
||||||
from utils import acct_dir
|
from utils import acct_dir
|
||||||
from session import establish_session
|
from session import establish_session
|
||||||
|
@ -97,7 +98,8 @@ def get_rss2site(self, calling_domain: str, path: str,
|
||||||
return
|
return
|
||||||
|
|
||||||
msg = ''
|
msg = ''
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Core POST"
|
||||||
import os
|
import os
|
||||||
import errno
|
import errno
|
||||||
from socket import error as SocketError
|
from socket import error as SocketError
|
||||||
|
from utils import data_dir
|
||||||
from utils import dangerous_markup
|
from utils import dangerous_markup
|
||||||
from utils import get_instance_url
|
from utils import get_instance_url
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
|
@ -194,10 +195,10 @@ def links_update(self, calling_domain: str, cookie: str,
|
||||||
self.server.postreq_busy = False
|
self.server.postreq_busy = False
|
||||||
return
|
return
|
||||||
|
|
||||||
links_filename = base_dir + '/accounts/links.txt'
|
links_filename = data_dir(base_dir) + '/links.txt'
|
||||||
about_filename = base_dir + '/accounts/about.md'
|
about_filename = data_dir(base_dir) + '/about.md'
|
||||||
tos_filename = base_dir + '/accounts/tos.md'
|
tos_filename = data_dir(base_dir) + '/tos.md'
|
||||||
specification_filename = base_dir + '/accounts/activitypub.md'
|
specification_filename = data_dir(base_dir) + '/activitypub.md'
|
||||||
|
|
||||||
if not boundary:
|
if not boundary:
|
||||||
if b'--LYNX' in post_bytes:
|
if b'--LYNX' in post_bytes:
|
||||||
|
|
|
@ -25,6 +25,7 @@ from httpheaders import redirect_headers
|
||||||
from httpheaders import clear_login_details
|
from httpheaders import clear_login_details
|
||||||
from webapp_login import html_get_login_credentials
|
from webapp_login import html_get_login_credentials
|
||||||
from webapp_suspended import html_suspended
|
from webapp_suspended import html_suspended
|
||||||
|
from utils import data_dir
|
||||||
from utils import acct_dir
|
from utils import acct_dir
|
||||||
from utils import is_suspended
|
from utils import is_suspended
|
||||||
from utils import is_local_network_address
|
from utils import is_local_network_address
|
||||||
|
@ -191,8 +192,7 @@ def post_login_screen(self, calling_domain: str, cookie: str,
|
||||||
self.server.tokens[login_nickname] = token
|
self.server.tokens[login_nickname] = token
|
||||||
login_handle = login_nickname + '@' + domain
|
login_handle = login_nickname + '@' + domain
|
||||||
token_filename = \
|
token_filename = \
|
||||||
base_dir + '/accounts/' + \
|
data_dir(base_dir) + '/' + login_handle + '/.token'
|
||||||
login_handle + '/.token'
|
|
||||||
try:
|
try:
|
||||||
with open(token_filename, 'w+',
|
with open(token_filename, 'w+',
|
||||||
encoding='utf-8') as fp_tok:
|
encoding='utf-8') as fp_tok:
|
||||||
|
@ -201,9 +201,9 @@ def post_login_screen(self, calling_domain: str, cookie: str,
|
||||||
print('EX: Unable to save token for ' +
|
print('EX: Unable to save token for ' +
|
||||||
login_nickname + ' ' + str(ex))
|
login_nickname + ' ' + str(ex))
|
||||||
|
|
||||||
|
dir_str = data_dir(base_dir)
|
||||||
person_upgrade_actor(base_dir, None,
|
person_upgrade_actor(base_dir, None,
|
||||||
base_dir + '/accounts/' +
|
dir_str + '/' + login_handle + '.json')
|
||||||
login_handle + '.json')
|
|
||||||
|
|
||||||
index = self.server.tokens[login_nickname]
|
index = self.server.tokens[login_nickname]
|
||||||
self.server.tokens_lookup[index] = login_nickname
|
self.server.tokens_lookup[index] = login_nickname
|
||||||
|
|
|
@ -11,6 +11,7 @@ import os
|
||||||
import errno
|
import errno
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from socket import error as SocketError
|
from socket import error as SocketError
|
||||||
|
from utils import data_dir
|
||||||
from utils import delete_post
|
from utils import delete_post
|
||||||
from utils import locate_post
|
from utils import locate_post
|
||||||
from utils import get_full_domain
|
from utils import get_full_domain
|
||||||
|
@ -146,8 +147,8 @@ def moderator_actions(self, path: str, calling_domain: str, cookie: str,
|
||||||
# is this a local nickname on this instance?
|
# is this a local nickname on this instance?
|
||||||
local_handle = \
|
local_handle = \
|
||||||
search_handle + '@' + domain
|
search_handle + '@' + domain
|
||||||
if os.path.isdir(base_dir +
|
dir_str = data_dir(base_dir)
|
||||||
'/accounts/' + local_handle):
|
if os.path.isdir(dir_str + '/' + local_handle):
|
||||||
search_handle = local_handle
|
search_handle = local_handle
|
||||||
else:
|
else:
|
||||||
search_handle = ''
|
search_handle = ''
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Core POST"
|
||||||
import os
|
import os
|
||||||
import errno
|
import errno
|
||||||
from socket import error as SocketError
|
from socket import error as SocketError
|
||||||
|
from utils import data_dir
|
||||||
from utils import clear_from_post_caches
|
from utils import clear_from_post_caches
|
||||||
from utils import remove_id_ending
|
from utils import remove_id_ending
|
||||||
from utils import save_json
|
from utils import save_json
|
||||||
|
@ -95,7 +96,7 @@ def newswire_update(self, calling_domain: str, cookie: str,
|
||||||
self.server.postreq_busy = False
|
self.server.postreq_busy = False
|
||||||
return
|
return
|
||||||
|
|
||||||
newswire_filename = base_dir + '/accounts/newswire.txt'
|
newswire_filename = data_dir(base_dir) + '/newswire.txt'
|
||||||
|
|
||||||
if not boundary:
|
if not boundary:
|
||||||
if b'--LYNX' in post_bytes:
|
if b'--LYNX' in post_bytes:
|
||||||
|
@ -140,8 +141,7 @@ def newswire_update(self, calling_domain: str, cookie: str,
|
||||||
|
|
||||||
# save filtered words list for the newswire
|
# save filtered words list for the newswire
|
||||||
filter_newswire_filename = \
|
filter_newswire_filename = \
|
||||||
base_dir + '/accounts/' + \
|
data_dir(base_dir) + '/' + 'news@' + domain + '/filters.txt'
|
||||||
'news@' + domain + '/filters.txt'
|
|
||||||
if fields.get('filteredWordsNewswire'):
|
if fields.get('filteredWordsNewswire'):
|
||||||
try:
|
try:
|
||||||
with open(filter_newswire_filename, 'w+',
|
with open(filter_newswire_filename, 'w+',
|
||||||
|
@ -158,7 +158,7 @@ def newswire_update(self, calling_domain: str, cookie: str,
|
||||||
filter_newswire_filename)
|
filter_newswire_filename)
|
||||||
|
|
||||||
# save dogwhistle words list
|
# save dogwhistle words list
|
||||||
dogwhistles_filename = base_dir + '/accounts/dogwhistles.txt'
|
dogwhistles_filename = data_dir(base_dir) + '/dogwhistles.txt'
|
||||||
if fields.get('dogwhistleWords'):
|
if fields.get('dogwhistleWords'):
|
||||||
try:
|
try:
|
||||||
with open(dogwhistles_filename, 'w+',
|
with open(dogwhistles_filename, 'w+',
|
||||||
|
@ -179,8 +179,7 @@ def newswire_update(self, calling_domain: str, cookie: str,
|
||||||
self.server.dogwhistles = {}
|
self.server.dogwhistles = {}
|
||||||
|
|
||||||
# save news tagging rules
|
# save news tagging rules
|
||||||
hashtag_rules_filename = \
|
hashtag_rules_filename = data_dir(base_dir) + '/hashtagrules.txt'
|
||||||
base_dir + '/accounts/hashtagrules.txt'
|
|
||||||
if fields.get('hashtagRulesList'):
|
if fields.get('hashtagRulesList'):
|
||||||
try:
|
try:
|
||||||
with open(hashtag_rules_filename, 'w+',
|
with open(hashtag_rules_filename, 'w+',
|
||||||
|
@ -196,8 +195,7 @@ def newswire_update(self, calling_domain: str, cookie: str,
|
||||||
print('EX: _newswire_update unable to delete ' +
|
print('EX: _newswire_update unable to delete ' +
|
||||||
hashtag_rules_filename)
|
hashtag_rules_filename)
|
||||||
|
|
||||||
newswire_tusted_filename = \
|
newswire_tusted_filename = data_dir(base_dir) + '/newswiretrusted.txt'
|
||||||
base_dir + '/accounts/newswiretrusted.txt'
|
|
||||||
if fields.get('trustedNewswire'):
|
if fields.get('trustedNewswire'):
|
||||||
newswire_trusted = fields['trustedNewswire']
|
newswire_trusted = fields['trustedNewswire']
|
||||||
if not newswire_trusted.endswith('\n'):
|
if not newswire_trusted.endswith('\n'):
|
||||||
|
@ -448,7 +446,7 @@ def news_post_edit(self, calling_domain: str, cookie: str,
|
||||||
first_paragraph_from_string(news_post_content)
|
first_paragraph_from_string(news_post_content)
|
||||||
# save newswire
|
# save newswire
|
||||||
newswire_state_filename = \
|
newswire_state_filename = \
|
||||||
base_dir + '/accounts/.newswirestate.json'
|
data_dir(base_dir) + '/.newswirestate.json'
|
||||||
try:
|
try:
|
||||||
save_json(newswire, newswire_state_filename)
|
save_json(newswire, newswire_state_filename)
|
||||||
except BaseException as ex:
|
except BaseException as ex:
|
||||||
|
|
|
@ -14,6 +14,7 @@ from socket import error as SocketError
|
||||||
from blocking import save_blocked_military
|
from blocking import save_blocked_military
|
||||||
from httpheaders import redirect_headers
|
from httpheaders import redirect_headers
|
||||||
from httpheaders import clear_login_details
|
from httpheaders import clear_login_details
|
||||||
|
from utils import data_dir
|
||||||
from utils import set_premium_account
|
from utils import set_premium_account
|
||||||
from utils import is_premium_account
|
from utils import is_premium_account
|
||||||
from utils import remove_avatar_from_cache
|
from utils import remove_avatar_from_cache
|
||||||
|
@ -241,7 +242,7 @@ def _profile_post_peertube_instances(base_dir: str, fields: {}, self,
|
||||||
peertube_instances: []) -> None:
|
peertube_instances: []) -> None:
|
||||||
""" HTTP POST save peertube instances list
|
""" HTTP POST save peertube instances list
|
||||||
"""
|
"""
|
||||||
peertube_instances_file = base_dir + '/accounts/peertube.txt'
|
peertube_instances_file = data_dir(base_dir) + '/peertube.txt'
|
||||||
if fields.get('ptInstances'):
|
if fields.get('ptInstances'):
|
||||||
peertube_instances.clear()
|
peertube_instances.clear()
|
||||||
try:
|
try:
|
||||||
|
@ -309,7 +310,7 @@ def _profile_post_buy_domains(base_dir: str, fields: {}, self) -> None:
|
||||||
buy_sites[buy_icon_text] = site_url.strip()
|
buy_sites[buy_icon_text] = site_url.strip()
|
||||||
if str(self.server.buy_sites) != str(buy_sites):
|
if str(self.server.buy_sites) != str(buy_sites):
|
||||||
self.server.buy_sites = buy_sites
|
self.server.buy_sites = buy_sites
|
||||||
buy_sites_filename = base_dir + '/accounts/buy_sites.json'
|
buy_sites_filename = data_dir(base_dir) + '/buy_sites.json'
|
||||||
if buy_sites:
|
if buy_sites:
|
||||||
save_json(buy_sites, buy_sites_filename)
|
save_json(buy_sites, buy_sites_filename)
|
||||||
else:
|
else:
|
||||||
|
@ -2538,7 +2539,7 @@ def profile_edit(self, calling_domain: str, cookie: str,
|
||||||
# time is an image with metadata publicly exposed,
|
# time is an image with metadata publicly exposed,
|
||||||
# even for a few mS
|
# even for a few mS
|
||||||
if m_type == 'instanceLogo':
|
if m_type == 'instanceLogo':
|
||||||
filename_base = base_dir + '/accounts/login.temp'
|
filename_base = data_dir(base_dir) + '/login.temp'
|
||||||
elif m_type == 'importTheme':
|
elif m_type == 'importTheme':
|
||||||
if not os.path.isdir(base_dir + '/imports'):
|
if not os.path.isdir(base_dir + '/imports'):
|
||||||
os.mkdir(base_dir + '/imports')
|
os.mkdir(base_dir + '/imports')
|
||||||
|
|
|
@ -72,6 +72,7 @@ from tests import test_update_actor
|
||||||
from tests import run_all_tests
|
from tests import run_all_tests
|
||||||
from auth import store_basic_credentials
|
from auth import store_basic_credentials
|
||||||
from auth import create_password
|
from auth import create_password
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_ends_with
|
from utils import string_ends_with
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
from utils import remove_eol
|
from utils import remove_eol
|
||||||
|
@ -3090,7 +3091,7 @@ def _command_options() -> None:
|
||||||
if not os.path.isdir(account_dir):
|
if not os.path.isdir(account_dir):
|
||||||
print('Account ' + nickname + '@' + domain + ' not found')
|
print('Account ' + nickname + '@' + domain + ' not found')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
password_file = base_dir + '/accounts/passwords'
|
password_file = data_dir(base_dir) + '/passwords'
|
||||||
if os.path.isfile(password_file):
|
if os.path.isfile(password_file):
|
||||||
if text_in_file(nickname + ':', password_file):
|
if text_in_file(nickname + ':', password_file):
|
||||||
store_basic_credentials(base_dir, nickname, new_password)
|
store_basic_credentials(base_dir, nickname, new_password)
|
||||||
|
@ -3496,8 +3497,9 @@ def _command_options() -> None:
|
||||||
if os.path.isdir(base_dir + '/tags'):
|
if os.path.isdir(base_dir + '/tags'):
|
||||||
shutil.rmtree(base_dir + '/tags',
|
shutil.rmtree(base_dir + '/tags',
|
||||||
ignore_errors=False, onerror=None)
|
ignore_errors=False, onerror=None)
|
||||||
if os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
shutil.rmtree(base_dir + '/accounts',
|
if os.path.isdir(dir_str):
|
||||||
|
shutil.rmtree(dir_str,
|
||||||
ignore_errors=False, onerror=None)
|
ignore_errors=False, onerror=None)
|
||||||
if os.path.isdir(base_dir + '/keys'):
|
if os.path.isdir(base_dir + '/keys'):
|
||||||
shutil.rmtree(base_dir + '/keys',
|
shutil.rmtree(base_dir + '/keys',
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Moderation"
|
__module_group__ = "Moderation"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import acct_dir
|
from utils import acct_dir
|
||||||
from utils import text_in_file
|
from utils import text_in_file
|
||||||
from utils import remove_eol
|
from utils import remove_eol
|
||||||
|
@ -42,7 +43,7 @@ def add_global_filter(base_dir: str, words: str) -> bool:
|
||||||
return False
|
return False
|
||||||
if len(words) < 2:
|
if len(words) < 2:
|
||||||
return False
|
return False
|
||||||
filters_filename = base_dir + '/accounts/filters.txt'
|
filters_filename = data_dir(base_dir) + '/filters.txt'
|
||||||
if os.path.isfile(filters_filename):
|
if os.path.isfile(filters_filename):
|
||||||
if text_in_file(words, filters_filename):
|
if text_in_file(words, filters_filename):
|
||||||
return False
|
return False
|
||||||
|
@ -85,7 +86,7 @@ def remove_filter(base_dir: str, nickname: str, domain: str,
|
||||||
def remove_global_filter(base_dir: str, words: str) -> bool:
|
def remove_global_filter(base_dir: str, words: str) -> bool:
|
||||||
"""Removes a global word filter
|
"""Removes a global word filter
|
||||||
"""
|
"""
|
||||||
filters_filename = base_dir + '/accounts/filters.txt'
|
filters_filename = data_dir(base_dir) + '/filters.txt'
|
||||||
if not os.path.isfile(filters_filename):
|
if not os.path.isfile(filters_filename):
|
||||||
return False
|
return False
|
||||||
if not text_in_file(words, filters_filename):
|
if not text_in_file(words, filters_filename):
|
||||||
|
@ -161,7 +162,7 @@ def is_filtered_globally(base_dir: str, content: str,
|
||||||
system_language: str) -> bool:
|
system_language: str) -> bool:
|
||||||
"""Is the given content globally filtered?
|
"""Is the given content globally filtered?
|
||||||
"""
|
"""
|
||||||
global_filters_filename = base_dir + '/accounts/filters.txt'
|
global_filters_filename = data_dir(base_dir) + '/filters.txt'
|
||||||
if _is_filtered_base(global_filters_filename, content,
|
if _is_filtered_base(global_filters_filename, content,
|
||||||
system_language):
|
system_language):
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -11,6 +11,7 @@ import os
|
||||||
import time
|
import time
|
||||||
from webapp_utils import html_header_with_external_style
|
from webapp_utils import html_header_with_external_style
|
||||||
from webapp_utils import html_footer
|
from webapp_utils import html_footer
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import save_json
|
from utils import save_json
|
||||||
|
|
||||||
|
@ -125,7 +126,7 @@ def html_watch_points_graph(base_dir: str, fitness: {}, fitness_id: str,
|
||||||
def fitness_thread(base_dir: str, fitness: {}) -> None:
|
def fitness_thread(base_dir: str, fitness: {}) -> None:
|
||||||
"""Thread used to save fitness function scores
|
"""Thread used to save fitness function scores
|
||||||
"""
|
"""
|
||||||
fitness_filename = base_dir + '/accounts/fitness.json'
|
fitness_filename = data_dir(base_dir) + '/fitness.json'
|
||||||
while True:
|
while True:
|
||||||
# every 10 mins
|
# every 10 mins
|
||||||
time.sleep(60 * 10)
|
time.sleep(60 * 10)
|
||||||
|
|
21
follow.py
21
follow.py
|
@ -34,6 +34,7 @@ from utils import local_actor_url
|
||||||
from utils import text_in_file
|
from utils import text_in_file
|
||||||
from utils import remove_eol
|
from utils import remove_eol
|
||||||
from utils import get_actor_from_post
|
from utils import get_actor_from_post
|
||||||
|
from utils import data_dir
|
||||||
from acceptreject import create_accept
|
from acceptreject import create_accept
|
||||||
from acceptreject import create_reject
|
from acceptreject import create_reject
|
||||||
from webfinger import webfinger_handle
|
from webfinger import webfinger_handle
|
||||||
|
@ -49,11 +50,12 @@ def create_initial_last_seen(base_dir: str, http_prefix: str) -> None:
|
||||||
The lastseen files are used to generate the Zzz icons on
|
The lastseen files are used to generate the Zzz icons on
|
||||||
follows/following lists on the profile screen.
|
follows/following lists on the profile screen.
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
following_filename = account_dir + '/following.txt'
|
following_filename = account_dir + '/following.txt'
|
||||||
if not os.path.isfile(following_filename):
|
if not os.path.isfile(following_filename):
|
||||||
continue
|
continue
|
||||||
|
@ -318,8 +320,9 @@ def unfollow_account(base_dir: str, nickname: str, domain: str,
|
||||||
handle_to_unfollow = follow_nickname + '@' + follow_domain
|
handle_to_unfollow = follow_nickname + '@' + follow_domain
|
||||||
if group_account:
|
if group_account:
|
||||||
handle_to_unfollow = '!' + handle_to_unfollow
|
handle_to_unfollow = '!' + handle_to_unfollow
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
os.mkdir(base_dir + '/accounts')
|
if not os.path.isdir(dir_str):
|
||||||
|
os.mkdir(dir_str)
|
||||||
handle_dir = acct_handle_dir(base_dir, handle)
|
handle_dir = acct_handle_dir(base_dir, handle)
|
||||||
if not os.path.isdir(handle_dir):
|
if not os.path.isdir(handle_dir):
|
||||||
os.mkdir(handle_dir)
|
os.mkdir(handle_dir)
|
||||||
|
@ -390,8 +393,9 @@ def clear_follows(base_dir: str, nickname: str, domain: str,
|
||||||
follow_file: str) -> None:
|
follow_file: str) -> None:
|
||||||
"""Removes all follows
|
"""Removes all follows
|
||||||
"""
|
"""
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
os.mkdir(base_dir + '/accounts')
|
if not os.path.isdir(dir_str):
|
||||||
|
os.mkdir(dir_str)
|
||||||
accounts_dir = acct_dir(base_dir, nickname, domain)
|
accounts_dir = acct_dir(base_dir, nickname, domain)
|
||||||
if not os.path.isdir(accounts_dir):
|
if not os.path.isdir(accounts_dir):
|
||||||
os.mkdir(accounts_dir)
|
os.mkdir(accounts_dir)
|
||||||
|
@ -602,7 +606,7 @@ def follow_approval_required(base_dir: str, nickname_to_follow: str,
|
||||||
|
|
||||||
manually_approve_follows = False
|
manually_approve_follows = False
|
||||||
domain_to_follow = remove_domain_port(domain_to_follow)
|
domain_to_follow = remove_domain_port(domain_to_follow)
|
||||||
actor_filename = base_dir + '/accounts/' + \
|
actor_filename = data_dir(base_dir) + '/' + \
|
||||||
nickname_to_follow + '@' + domain_to_follow + '.json'
|
nickname_to_follow + '@' + domain_to_follow + '.json'
|
||||||
if os.path.isfile(actor_filename):
|
if os.path.isfile(actor_filename):
|
||||||
actor = load_json(actor_filename)
|
actor = load_json(actor_filename)
|
||||||
|
@ -1428,7 +1432,8 @@ def get_followers_of_actor(base_dir: str, actor: str, debug: bool) -> {}:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: searching for handle ' + actor_handle)
|
print('DEBUG: searching for handle ' + actor_handle)
|
||||||
# for each of the accounts
|
# for each of the accounts
|
||||||
for subdir, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for subdir, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if '@' not in account:
|
if '@' not in account:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -10,6 +10,12 @@ __module_group__ = "Calendar"
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def _data_dir2(base_dir) -> str:
|
||||||
|
"""Returns the directory where account data is stored
|
||||||
|
"""
|
||||||
|
return base_dir + '/accounts'
|
||||||
|
|
||||||
|
|
||||||
def _text_in_file2(text: str, filename: str,
|
def _text_in_file2(text: str, filename: str,
|
||||||
case_sensitive: bool) -> bool:
|
case_sensitive: bool) -> bool:
|
||||||
"""is the given text in the given file?
|
"""is the given text in the given file?
|
||||||
|
@ -32,7 +38,7 @@ def _text_in_file2(text: str, filename: str,
|
||||||
def _dir_acct(base_dir: str, nickname: str, domain: str) -> str:
|
def _dir_acct(base_dir: str, nickname: str, domain: str) -> str:
|
||||||
"""Returns the directory of an account
|
"""Returns the directory of an account
|
||||||
"""
|
"""
|
||||||
return base_dir + '/accounts/' + nickname + '@' + domain
|
return _data_dir2(base_dir) + '/' + nickname + '@' + domain
|
||||||
|
|
||||||
|
|
||||||
def _port_domain_remove(domain: str) -> str:
|
def _port_domain_remove(domain: str) -> str:
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Core"
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_full_domain
|
from utils import get_full_domain
|
||||||
from utils import is_account_dir
|
from utils import is_account_dir
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
|
@ -184,12 +185,13 @@ def _update_import_following(base_dir: str,
|
||||||
def run_import_following(base_dir: str, httpd):
|
def run_import_following(base_dir: str, httpd):
|
||||||
"""Sends out follow requests for imported following csv files
|
"""Sends out follow requests for imported following csv files
|
||||||
"""
|
"""
|
||||||
|
dir_str = data_dir(base_dir)
|
||||||
while True:
|
while True:
|
||||||
time.sleep(20)
|
time.sleep(20)
|
||||||
|
|
||||||
# get a list of accounts on the instance, in random sequence
|
# get a list of accounts on the instance, in random sequence
|
||||||
accounts_list = []
|
accounts_list = []
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if '@' not in account:
|
if '@' not in account:
|
||||||
continue
|
continue
|
||||||
|
@ -203,7 +205,7 @@ def run_import_following(base_dir: str, httpd):
|
||||||
# check if each accounts has an import csv
|
# check if each accounts has an import csv
|
||||||
random.shuffle(accounts_list)
|
random.shuffle(accounts_list)
|
||||||
for account in accounts_list:
|
for account in accounts_list:
|
||||||
account_dir = base_dir + '/accounts/' + account
|
account_dir = dir_str + '/' + account
|
||||||
import_filename = account_dir + '/import_following.csv'
|
import_filename = account_dir + '/import_following.csv'
|
||||||
|
|
||||||
if not os.path.isfile(import_filename):
|
if not os.path.isfile(import_filename):
|
||||||
|
|
30
inbox.py
30
inbox.py
|
@ -87,6 +87,7 @@ from utils import valid_hash_tag
|
||||||
from utils import get_attributed_to
|
from utils import get_attributed_to
|
||||||
from utils import get_reply_to
|
from utils import get_reply_to
|
||||||
from utils import get_actor_from_post
|
from utils import get_actor_from_post
|
||||||
|
from utils import data_dir
|
||||||
from categories import get_hashtag_categories
|
from categories import get_hashtag_categories
|
||||||
from categories import set_hashtag_category
|
from categories import set_hashtag_category
|
||||||
from httpsig import get_digest_algorithm_from_headers
|
from httpsig import get_digest_algorithm_from_headers
|
||||||
|
@ -193,7 +194,7 @@ def cache_svg_images(session, base_dir: str, http_prefix: str,
|
||||||
actor = 'unknown'
|
actor = 'unknown'
|
||||||
if post_attachments:
|
if post_attachments:
|
||||||
actor = get_attributed_to(obj['attributedTo'])
|
actor = get_attributed_to(obj['attributedTo'])
|
||||||
log_filename = base_dir + '/accounts/svg_scripts_log.txt'
|
log_filename = data_dir(base_dir) + '/svg_scripts_log.txt'
|
||||||
for index in range(len(post_attachments)):
|
for index in range(len(post_attachments)):
|
||||||
attach = post_attachments[index]
|
attach = post_attachments[index]
|
||||||
if not attach.get('mediaType'):
|
if not attach.get('mediaType'):
|
||||||
|
@ -855,7 +856,7 @@ def save_post_to_inbox_queue(base_dir: str, http_prefix: str,
|
||||||
inbox_queue_dir = create_inbox_queue_dir(nickname, domain, base_dir)
|
inbox_queue_dir = create_inbox_queue_dir(nickname, domain, base_dir)
|
||||||
|
|
||||||
handle = nickname + '@' + domain
|
handle = nickname + '@' + domain
|
||||||
destination = base_dir + '/accounts/' + \
|
destination = data_dir(base_dir) + '/' + \
|
||||||
handle + '/inbox/' + post_id.replace('/', '#') + '.json'
|
handle + '/inbox/' + post_id.replace('/', '#') + '.json'
|
||||||
filename = inbox_queue_dir + '/' + post_id.replace('/', '#') + '.json'
|
filename = inbox_queue_dir + '/' + post_id.replace('/', '#') + '.json'
|
||||||
|
|
||||||
|
@ -930,7 +931,7 @@ def _inbox_post_recipients_add(base_dir: str, to_list: [],
|
||||||
recipients_dict[handle] = None
|
recipients_dict[handle] = None
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: ' + base_dir + '/accounts/' +
|
print('DEBUG: ' + data_dir(base_dir) + '/' +
|
||||||
handle + ' does not exist')
|
handle + ' does not exist')
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
|
@ -1190,11 +1191,12 @@ def _notify_moved(base_dir: str, domain_full: str,
|
||||||
http_prefix: str) -> None:
|
http_prefix: str) -> None:
|
||||||
"""Notify that an actor has moved
|
"""Notify that an actor has moved
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if not is_account_dir(account):
|
if not is_account_dir(account):
|
||||||
continue
|
continue
|
||||||
account_dir = base_dir + '/accounts/' + account
|
account_dir = dir_str + '/' + account
|
||||||
following_filename = account_dir + '/following.txt'
|
following_filename = account_dir + '/following.txt'
|
||||||
if not os.path.isfile(following_filename):
|
if not os.path.isfile(following_filename):
|
||||||
continue
|
continue
|
||||||
|
@ -1321,7 +1323,7 @@ def _person_receive_update(base_dir: str,
|
||||||
new_actor = prev_nickname + '@' + prev_domain_full + ' ' + \
|
new_actor = prev_nickname + '@' + prev_domain_full + ' ' + \
|
||||||
new_nickname + '@' + new_domain_full
|
new_nickname + '@' + new_domain_full
|
||||||
refollow_str = ''
|
refollow_str = ''
|
||||||
refollow_filename = base_dir + '/accounts/actors_moved.txt'
|
refollow_filename = data_dir(base_dir) + '/actors_moved.txt'
|
||||||
refollow_file_exists = False
|
refollow_file_exists = False
|
||||||
if os.path.isfile(refollow_filename):
|
if os.path.isfile(refollow_filename):
|
||||||
try:
|
try:
|
||||||
|
@ -5534,9 +5536,10 @@ def clear_queue_items(base_dir: str, queue: []) -> None:
|
||||||
"""
|
"""
|
||||||
ctr = 0
|
ctr = 0
|
||||||
queue.clear()
|
queue.clear()
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
queue_dir = base_dir + '/accounts/' + account + '/queue'
|
queue_dir = dir_str + '/' + account + '/queue'
|
||||||
if not os.path.isdir(queue_dir):
|
if not os.path.isdir(queue_dir):
|
||||||
continue
|
continue
|
||||||
for _, _, queuefiles in os.walk(queue_dir):
|
for _, _, queuefiles in os.walk(queue_dir):
|
||||||
|
@ -5557,9 +5560,10 @@ def _restore_queue_items(base_dir: str, queue: []) -> None:
|
||||||
"""Checks the queue for each account and appends filenames
|
"""Checks the queue for each account and appends filenames
|
||||||
"""
|
"""
|
||||||
queue.clear()
|
queue.clear()
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
queue_dir = base_dir + '/accounts/' + account + '/queue'
|
queue_dir = dir_str + '/' + account + '/queue'
|
||||||
if not os.path.isdir(queue_dir):
|
if not os.path.isdir(queue_dir):
|
||||||
continue
|
continue
|
||||||
for _, _, queuefiles in os.walk(queue_dir):
|
for _, _, queuefiles in os.walk(queue_dir):
|
||||||
|
@ -5715,7 +5719,7 @@ def _check_json_signature(base_dir: str, queue_json: {}) -> (bool, bool):
|
||||||
has_json_signature = True
|
has_json_signature = True
|
||||||
else:
|
else:
|
||||||
unknown_contexts_file = \
|
unknown_contexts_file = \
|
||||||
base_dir + '/accounts/unknownContexts.txt'
|
data_dir(base_dir) + '/unknownContexts.txt'
|
||||||
unknown_context = str(original_json['@context'])
|
unknown_context = str(original_json['@context'])
|
||||||
|
|
||||||
print('unrecognized @context: ' + unknown_context)
|
print('unrecognized @context: ' + unknown_context)
|
||||||
|
@ -5736,7 +5740,7 @@ def _check_json_signature(base_dir: str, queue_json: {}) -> (bool, bool):
|
||||||
print('Unrecognized jsonld signature type: ' + jwebsig_type)
|
print('Unrecognized jsonld signature type: ' + jwebsig_type)
|
||||||
|
|
||||||
unknown_signatures_file = \
|
unknown_signatures_file = \
|
||||||
base_dir + '/accounts/unknownJsonSignatures.txt'
|
data_dir(base_dir) + '/unknownJsonSignatures.txt'
|
||||||
|
|
||||||
already_unknown = False
|
already_unknown = False
|
||||||
if os.path.isfile(unknown_signatures_file):
|
if os.path.isfile(unknown_signatures_file):
|
||||||
|
@ -6446,7 +6450,7 @@ def run_inbox_queue(server,
|
||||||
debug)
|
debug)
|
||||||
inbox_start_time = time.time()
|
inbox_start_time = time.time()
|
||||||
|
|
||||||
dogwhistles_filename = base_dir + '/accounts/dogwhistles.txt'
|
dogwhistles_filename = data_dir(base_dir) + '/dogwhistles.txt'
|
||||||
if not os.path.isfile(dogwhistles_filename):
|
if not os.path.isfile(dogwhistles_filename):
|
||||||
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
|
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
|
||||||
dogwhistles = load_dogwhistles(dogwhistles_filename)
|
dogwhistles = load_dogwhistles(dogwhistles_filename)
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Core"
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from urllib import request, parse
|
from urllib import request, parse
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_account_dir
|
from utils import is_account_dir
|
||||||
from utils import acct_dir
|
from utils import acct_dir
|
||||||
from utils import get_actor_languages_list
|
from utils import get_actor_languages_list
|
||||||
|
@ -364,7 +365,8 @@ def load_default_post_languages(base_dir: str) -> {}:
|
||||||
for new posts for each account
|
for new posts for each account
|
||||||
"""
|
"""
|
||||||
result = {}
|
result = {}
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -18,6 +18,7 @@ from utils import get_attachment_property_value
|
||||||
from utils import no_of_accounts
|
from utils import no_of_accounts
|
||||||
from utils import get_status_count
|
from utils import get_status_count
|
||||||
from utils import lines_in_file
|
from utils import lines_in_file
|
||||||
|
from utils import data_dir
|
||||||
|
|
||||||
|
|
||||||
def _meta_data_instance_v1(show_accounts: bool,
|
def _meta_data_instance_v1(show_accounts: bool,
|
||||||
|
@ -31,7 +32,7 @@ def _meta_data_instance_v1(show_accounts: bool,
|
||||||
""" /api/v1/instance endpoint
|
""" /api/v1/instance endpoint
|
||||||
"""
|
"""
|
||||||
admin_actor_filename = \
|
admin_actor_filename = \
|
||||||
base_dir + '/accounts/' + admin_nickname + '@' + domain + '.json'
|
data_dir(base_dir) + '/' + admin_nickname + '@' + domain + '.json'
|
||||||
if not os.path.isfile(admin_actor_filename):
|
if not os.path.isfile(admin_actor_filename):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
@ -41,8 +42,7 @@ def _meta_data_instance_v1(show_accounts: bool,
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
rules_list = []
|
rules_list = []
|
||||||
rules_filename = \
|
rules_filename = data_dir(base_dir) + '/tos.md'
|
||||||
base_dir + '/accounts/tos.md'
|
|
||||||
if os.path.isfile(rules_filename):
|
if os.path.isfile(rules_filename):
|
||||||
with open(rules_filename, 'r', encoding='utf-8') as fp_rules:
|
with open(rules_filename, 'r', encoding='utf-8') as fp_rules:
|
||||||
rules_lines = fp_rules.readlines()
|
rules_lines = fp_rules.readlines()
|
||||||
|
|
|
@ -20,6 +20,7 @@ from utils import get_video_extensions
|
||||||
from utils import get_audio_extensions
|
from utils import get_audio_extensions
|
||||||
from utils import get_image_mime_type
|
from utils import get_image_mime_type
|
||||||
from utils import lines_in_file
|
from utils import lines_in_file
|
||||||
|
from utils import data_dir
|
||||||
|
|
||||||
|
|
||||||
def _get_masto_api_v2id_from_nickname(nickname: str) -> int:
|
def _get_masto_api_v2id_from_nickname(nickname: str) -> int:
|
||||||
|
@ -37,7 +38,7 @@ def _meta_data_instance_v2(show_accounts: bool,
|
||||||
version: str, translate: {}) -> {}:
|
version: str, translate: {}) -> {}:
|
||||||
""" /api/v2/instance endpoint
|
""" /api/v2/instance endpoint
|
||||||
"""
|
"""
|
||||||
account_dir = base_dir + '/accounts/' + admin_nickname + '@' + domain
|
account_dir = data_dir(base_dir) + '/' + admin_nickname + '@' + domain
|
||||||
admin_actor_filename = account_dir + '.json'
|
admin_actor_filename = account_dir + '.json'
|
||||||
if not os.path.isfile(admin_actor_filename):
|
if not os.path.isfile(admin_actor_filename):
|
||||||
return {}
|
return {}
|
||||||
|
@ -48,8 +49,7 @@ def _meta_data_instance_v2(show_accounts: bool,
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
rules_list = []
|
rules_list = []
|
||||||
rules_filename = \
|
rules_filename = data_dir(base_dir) + '/tos.md'
|
||||||
base_dir + '/accounts/tos.md'
|
|
||||||
if os.path.isfile(rules_filename):
|
if os.path.isfile(rules_filename):
|
||||||
with open(rules_filename, 'r', encoding='utf-8') as fp_rules:
|
with open(rules_filename, 'r', encoding='utf-8') as fp_rules:
|
||||||
rules_lines = fp_rules.readlines()
|
rules_lines = fp_rules.readlines()
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Core"
|
__module_group__ = "Core"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_account_dir
|
from utils import is_account_dir
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
from utils import get_domain_from_actor
|
from utils import get_domain_from_actor
|
||||||
|
@ -219,7 +220,8 @@ def migrate_accounts(base_dir: str, session,
|
||||||
"""
|
"""
|
||||||
# update followers and following lists for each account
|
# update followers and following lists for each account
|
||||||
ctr = 0
|
ctr = 0
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -36,6 +36,7 @@ from utils import clear_from_post_caches
|
||||||
from utils import dangerous_markup
|
from utils import dangerous_markup
|
||||||
from utils import local_actor_url
|
from utils import local_actor_url
|
||||||
from utils import text_in_file
|
from utils import text_in_file
|
||||||
|
from utils import data_dir
|
||||||
from inbox import store_hash_tags
|
from inbox import store_hash_tags
|
||||||
from session import create_session
|
from session import create_session
|
||||||
from threads import begin_thread
|
from threads import begin_thread
|
||||||
|
@ -45,7 +46,7 @@ def _update_feeds_outbox_index(base_dir: str, domain: str,
|
||||||
post_id: str) -> None:
|
post_id: str) -> None:
|
||||||
"""Updates the index used for imported RSS feeds
|
"""Updates the index used for imported RSS feeds
|
||||||
"""
|
"""
|
||||||
base_path = base_dir + '/accounts/news@' + domain
|
base_path = data_dir(base_dir) + '/news@' + domain
|
||||||
index_filename = base_path + '/outbox.index'
|
index_filename = base_path + '/outbox.index'
|
||||||
|
|
||||||
if os.path.isfile(index_filename):
|
if os.path.isfile(index_filename):
|
||||||
|
@ -387,7 +388,7 @@ def _newswire_hashtag_processing(base_dir: str, post_json_object: {},
|
||||||
Returns true if the post should be saved to the news timeline
|
Returns true if the post should be saved to the news timeline
|
||||||
of this instance
|
of this instance
|
||||||
"""
|
"""
|
||||||
rules_filename = base_dir + '/accounts/hashtagrules.txt'
|
rules_filename = data_dir(base_dir) + '/hashtagrules.txt'
|
||||||
if not os.path.isfile(rules_filename):
|
if not os.path.isfile(rules_filename):
|
||||||
return True
|
return True
|
||||||
rules = []
|
rules = []
|
||||||
|
@ -447,7 +448,7 @@ def _create_news_mirror(base_dir: str, domain: str,
|
||||||
if '|' in url or '>' in url:
|
if '|' in url or '>' in url:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
mirror_dir = base_dir + '/accounts/newsmirror'
|
mirror_dir = data_dir(base_dir) + '/newsmirror'
|
||||||
if not os.path.isdir(mirror_dir):
|
if not os.path.isdir(mirror_dir):
|
||||||
os.mkdir(mirror_dir)
|
os.mkdir(mirror_dir)
|
||||||
|
|
||||||
|
@ -457,7 +458,7 @@ def _create_news_mirror(base_dir: str, domain: str,
|
||||||
no_of_dirs = len(dirs)
|
no_of_dirs = len(dirs)
|
||||||
break
|
break
|
||||||
|
|
||||||
mirror_index_filename = base_dir + '/accounts/newsmirror.txt'
|
mirror_index_filename = data_dir(base_dir) + '/newsmirror.txt'
|
||||||
|
|
||||||
if max_mirrored_articles > 0 and no_of_dirs > max_mirrored_articles:
|
if max_mirrored_articles > 0 and no_of_dirs > max_mirrored_articles:
|
||||||
if not os.path.isfile(mirror_index_filename):
|
if not os.path.isfile(mirror_index_filename):
|
||||||
|
@ -558,7 +559,7 @@ def _convert_rss_to_activitypub(base_dir: str, http_prefix: str,
|
||||||
print('No newswire to convert')
|
print('No newswire to convert')
|
||||||
return
|
return
|
||||||
|
|
||||||
base_path = base_dir + '/accounts/news@' + domain + '/outbox'
|
base_path = data_dir(base_dir) + '/news@' + domain + '/outbox'
|
||||||
if not os.path.isdir(base_path):
|
if not os.path.isdir(base_path):
|
||||||
os.mkdir(base_path)
|
os.mkdir(base_path)
|
||||||
|
|
||||||
|
@ -787,8 +788,8 @@ def run_newswire_daemon(base_dir: str, httpd,
|
||||||
translate: {}) -> None:
|
translate: {}) -> None:
|
||||||
"""Periodically updates RSS feeds
|
"""Periodically updates RSS feeds
|
||||||
"""
|
"""
|
||||||
newswire_state_filename = base_dir + '/accounts/.newswirestate.json'
|
newswire_state_filename = data_dir(base_dir) + '/.newswirestate.json'
|
||||||
refresh_filename = base_dir + '/accounts/.refresh_newswire'
|
refresh_filename = data_dir(base_dir) + '/.refresh_newswire'
|
||||||
|
|
||||||
print('Starting newswire daemon')
|
print('Starting newswire daemon')
|
||||||
# initial sleep to allow the system to start up
|
# initial sleep to allow the system to start up
|
||||||
|
|
10
newswire.py
10
newswire.py
|
@ -19,6 +19,7 @@ from datetime import timezone
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from utils import valid_post_date
|
from utils import valid_post_date
|
||||||
from categories import set_hashtag_category
|
from categories import set_hashtag_category
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_contains
|
from utils import string_contains
|
||||||
from utils import image_mime_types_dict
|
from utils import image_mime_types_dict
|
||||||
from utils import resembles_url
|
from utils import resembles_url
|
||||||
|
@ -1623,7 +1624,8 @@ def _add_blogs_to_newswire(base_dir: str, domain: str, newswire: {},
|
||||||
moderation_dict = {}
|
moderation_dict = {}
|
||||||
|
|
||||||
# go through each account
|
# go through each account
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
@ -1639,7 +1641,7 @@ def _add_blogs_to_newswire(base_dir: str, domain: str, newswire: {},
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# is there a blogs timeline for this account?
|
# is there a blogs timeline for this account?
|
||||||
account_dir = os.path.join(base_dir + '/accounts', handle)
|
account_dir = os.path.join(dir_str, handle)
|
||||||
blogs_index = account_dir + '/tlblogs.index'
|
blogs_index = account_dir + '/tlblogs.index'
|
||||||
if os.path.isfile(blogs_index):
|
if os.path.isfile(blogs_index):
|
||||||
domain = handle.split('@')[1]
|
domain = handle.split('@')[1]
|
||||||
|
@ -1655,7 +1657,7 @@ def _add_blogs_to_newswire(base_dir: str, domain: str, newswire: {},
|
||||||
OrderedDict(sorted(moderation_dict.items(), reverse=True))
|
OrderedDict(sorted(moderation_dict.items(), reverse=True))
|
||||||
# save the moderation queue details for later display
|
# save the moderation queue details for later display
|
||||||
newswire_moderation_filename = \
|
newswire_moderation_filename = \
|
||||||
base_dir + '/accounts/newswiremoderation.txt'
|
data_dir(base_dir) + '/newswiremoderation.txt'
|
||||||
if sorted_moderation_dict:
|
if sorted_moderation_dict:
|
||||||
save_json(sorted_moderation_dict, newswire_moderation_filename)
|
save_json(sorted_moderation_dict, newswire_moderation_filename)
|
||||||
else:
|
else:
|
||||||
|
@ -1678,7 +1680,7 @@ def get_dict_from_newswire(session, base_dir: str, domain: str,
|
||||||
timeout_sec: int) -> {}:
|
timeout_sec: int) -> {}:
|
||||||
"""Gets rss feeds as a dictionary from newswire file
|
"""Gets rss feeds as a dictionary from newswire file
|
||||||
"""
|
"""
|
||||||
subscriptions_filename = base_dir + '/accounts/newswire.txt'
|
subscriptions_filename = data_dir(base_dir) + '/newswire.txt'
|
||||||
if not os.path.isfile(subscriptions_filename):
|
if not os.path.isfile(subscriptions_filename):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ from posts import outbox_message_create_wrap
|
||||||
from posts import save_post_to_box
|
from posts import save_post_to_box
|
||||||
from posts import send_to_followers_thread
|
from posts import send_to_followers_thread
|
||||||
from posts import send_to_named_addresses_thread
|
from posts import send_to_named_addresses_thread
|
||||||
|
from utils import data_dir
|
||||||
from utils import quote_toots_allowed
|
from utils import quote_toots_allowed
|
||||||
from utils import get_post_attachments
|
from utils import get_post_attachments
|
||||||
from utils import get_attributed_to
|
from utils import get_attributed_to
|
||||||
|
@ -435,7 +436,7 @@ def post_message_to_outbox(session, translate: {},
|
||||||
break
|
break
|
||||||
|
|
||||||
media_dir = \
|
media_dir = \
|
||||||
base_dir + '/accounts/' + \
|
data_dir(base_dir) + '/' + \
|
||||||
post_to_nickname + '@' + domain
|
post_to_nickname + '@' + domain
|
||||||
upload_media_filename = media_dir + '/upload.' + file_extension
|
upload_media_filename = media_dir + '/upload.' + file_extension
|
||||||
if not os.path.isfile(upload_media_filename):
|
if not os.path.isfile(upload_media_filename):
|
||||||
|
@ -537,7 +538,7 @@ def post_message_to_outbox(session, translate: {},
|
||||||
if is_featured_writer(base_dir, post_to_nickname, domain):
|
if is_featured_writer(base_dir, post_to_nickname, domain):
|
||||||
saved_post_id = saved_filename.split('/')[-1]
|
saved_post_id = saved_filename.split('/')[-1]
|
||||||
blogs_dir = \
|
blogs_dir = \
|
||||||
base_dir + '/accounts/news@' + domain + '/tlblogs'
|
data_dir(base_dir) + '/news@' + domain + '/tlblogs'
|
||||||
if not os.path.isdir(blogs_dir):
|
if not os.path.isdir(blogs_dir):
|
||||||
os.mkdir(blogs_dir)
|
os.mkdir(blogs_dir)
|
||||||
copyfile(saved_filename, blogs_dir + '/' + saved_post_id)
|
copyfile(saved_filename, blogs_dir + '/' + saved_post_id)
|
||||||
|
@ -547,7 +548,7 @@ def post_message_to_outbox(session, translate: {},
|
||||||
|
|
||||||
# clear the citations file if it exists
|
# clear the citations file if it exists
|
||||||
citations_filename = \
|
citations_filename = \
|
||||||
base_dir + '/accounts/' + \
|
data_dir(base_dir) + '/' + \
|
||||||
post_to_nickname + '@' + domain + '/.citations.txt'
|
post_to_nickname + '@' + domain + '/.citations.txt'
|
||||||
if os.path.isfile(citations_filename):
|
if os.path.isfile(citations_filename):
|
||||||
try:
|
try:
|
||||||
|
|
55
person.py
55
person.py
|
@ -74,6 +74,7 @@ from utils import dangerous_svg
|
||||||
from utils import text_in_file
|
from utils import text_in_file
|
||||||
from utils import contains_statuses
|
from utils import contains_statuses
|
||||||
from utils import get_actor_from_post
|
from utils import get_actor_from_post
|
||||||
|
from utils import data_dir
|
||||||
from session import get_json_valid
|
from session import get_json_valid
|
||||||
from session import create_session
|
from session import create_session
|
||||||
from session import get_json
|
from session import get_json
|
||||||
|
@ -564,21 +565,21 @@ def _create_person_base(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
|
|
||||||
if save_to_file:
|
if save_to_file:
|
||||||
# save person to file
|
# save person to file
|
||||||
people_subdir = '/accounts'
|
people_subdir = data_dir(base_dir)
|
||||||
if not os.path.isdir(base_dir + people_subdir):
|
if not os.path.isdir(people_subdir):
|
||||||
os.mkdir(base_dir + people_subdir)
|
os.mkdir(people_subdir)
|
||||||
if not os.path.isdir(base_dir + people_subdir + '/' + handle):
|
if not os.path.isdir(people_subdir + '/' + handle):
|
||||||
os.mkdir(base_dir + people_subdir + '/' + handle)
|
os.mkdir(people_subdir + '/' + handle)
|
||||||
if not os.path.isdir(base_dir + people_subdir + '/' +
|
if not os.path.isdir(people_subdir + '/' +
|
||||||
handle + '/inbox'):
|
handle + '/inbox'):
|
||||||
os.mkdir(base_dir + people_subdir + '/' + handle + '/inbox')
|
os.mkdir(people_subdir + '/' + handle + '/inbox')
|
||||||
if not os.path.isdir(base_dir + people_subdir + '/' +
|
if not os.path.isdir(people_subdir + '/' +
|
||||||
handle + '/outbox'):
|
handle + '/outbox'):
|
||||||
os.mkdir(base_dir + people_subdir + '/' + handle + '/outbox')
|
os.mkdir(people_subdir + '/' + handle + '/outbox')
|
||||||
if not os.path.isdir(base_dir + people_subdir + '/' +
|
if not os.path.isdir(people_subdir + '/' +
|
||||||
handle + '/queue'):
|
handle + '/queue'):
|
||||||
os.mkdir(base_dir + people_subdir + '/' + handle + '/queue')
|
os.mkdir(people_subdir + '/' + handle + '/queue')
|
||||||
filename = base_dir + people_subdir + '/' + handle + '.json'
|
filename = people_subdir + '/' + handle + '.json'
|
||||||
save_json(new_person, filename)
|
save_json(new_person, filename)
|
||||||
|
|
||||||
# save to cache
|
# save to cache
|
||||||
|
@ -662,7 +663,8 @@ def create_group(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
def clear_person_qrcodes(base_dir: str) -> None:
|
def clear_person_qrcodes(base_dir: str) -> None:
|
||||||
"""Clears qrcodes for all accounts
|
"""Clears qrcodes for all accounts
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if '@' not in handle:
|
if '@' not in handle:
|
||||||
continue
|
continue
|
||||||
|
@ -720,7 +722,8 @@ def create_person(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
if registrations_remaining <= 0:
|
if registrations_remaining <= 0:
|
||||||
return None, None, None, None
|
return None, None, None, None
|
||||||
else:
|
else:
|
||||||
if os.path.isdir(base_dir + '/accounts/news@' + domain):
|
dir_str = data_dir(base_dir)
|
||||||
|
if os.path.isdir(dir_str + '/news@' + domain):
|
||||||
# news account already exists
|
# news account already exists
|
||||||
return None, None, None, None
|
return None, None, None, None
|
||||||
|
|
||||||
|
@ -742,8 +745,9 @@ def create_person(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
set_role(base_dir, nickname, domain, 'moderator')
|
set_role(base_dir, nickname, domain, 'moderator')
|
||||||
set_role(base_dir, nickname, domain, 'editor')
|
set_role(base_dir, nickname, domain, 'editor')
|
||||||
|
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
os.mkdir(base_dir + '/accounts')
|
if not os.path.isdir(dir_str):
|
||||||
|
os.mkdir(dir_str)
|
||||||
account_dir = acct_dir(base_dir, nickname, domain)
|
account_dir = acct_dir(base_dir, nickname, domain)
|
||||||
if not os.path.isdir(account_dir):
|
if not os.path.isdir(account_dir):
|
||||||
os.mkdir(account_dir)
|
os.mkdir(account_dir)
|
||||||
|
@ -1002,14 +1006,14 @@ def person_upgrade_actor(base_dir: str, person_json: {},
|
||||||
|
|
||||||
# also update the actor within the cache
|
# also update the actor within the cache
|
||||||
actor_cache_filename = \
|
actor_cache_filename = \
|
||||||
base_dir + '/accounts/cache/actors/' + \
|
data_dir(base_dir) + '/cache/actors/' + \
|
||||||
person_json['id'].replace('/', '#') + '.json'
|
person_json['id'].replace('/', '#') + '.json'
|
||||||
if os.path.isfile(actor_cache_filename):
|
if os.path.isfile(actor_cache_filename):
|
||||||
save_json(person_json, actor_cache_filename)
|
save_json(person_json, actor_cache_filename)
|
||||||
|
|
||||||
# update domain/@nickname in actors cache
|
# update domain/@nickname in actors cache
|
||||||
actor_cache_filename = \
|
actor_cache_filename = \
|
||||||
base_dir + '/accounts/cache/actors/' + \
|
data_dir(base_dir) + '/cache/actors/' + \
|
||||||
replace_users_with_at(person_json['id']).replace('/', '#') + \
|
replace_users_with_at(person_json['id']).replace('/', '#') + \
|
||||||
'.json'
|
'.json'
|
||||||
if os.path.isfile(actor_cache_filename):
|
if os.path.isfile(actor_cache_filename):
|
||||||
|
@ -1244,7 +1248,7 @@ def set_bio(base_dir: str, nickname: str, domain: str, bio: str) -> bool:
|
||||||
def reenable_account(base_dir: str, nickname: str) -> None:
|
def reenable_account(base_dir: str, nickname: str) -> None:
|
||||||
"""Removes an account suspension
|
"""Removes an account suspension
|
||||||
"""
|
"""
|
||||||
suspended_filename = base_dir + '/accounts/suspended.txt'
|
suspended_filename = data_dir(base_dir) + '/suspended.txt'
|
||||||
if os.path.isfile(suspended_filename):
|
if os.path.isfile(suspended_filename):
|
||||||
lines = []
|
lines = []
|
||||||
with open(suspended_filename, 'r', encoding='utf-8') as fp_sus:
|
with open(suspended_filename, 'r', encoding='utf-8') as fp_sus:
|
||||||
|
@ -1270,7 +1274,7 @@ def suspend_account(base_dir: str, nickname: str, domain: str) -> None:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Don't suspend moderators
|
# Don't suspend moderators
|
||||||
moderators_file = base_dir + '/accounts/moderators.txt'
|
moderators_file = data_dir(base_dir) + '/moderators.txt'
|
||||||
if os.path.isfile(moderators_file):
|
if os.path.isfile(moderators_file):
|
||||||
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
||||||
lines = fp_mod.readlines()
|
lines = fp_mod.readlines()
|
||||||
|
@ -1291,7 +1295,7 @@ def suspend_account(base_dir: str, nickname: str, domain: str) -> None:
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: suspend_account unable to delete ' + token_filename)
|
print('EX: suspend_account unable to delete ' + token_filename)
|
||||||
|
|
||||||
suspended_filename = base_dir + '/accounts/suspended.txt'
|
suspended_filename = data_dir(base_dir) + '/suspended.txt'
|
||||||
if os.path.isfile(suspended_filename):
|
if os.path.isfile(suspended_filename):
|
||||||
with open(suspended_filename, 'r', encoding='utf-8') as fp_sus:
|
with open(suspended_filename, 'r', encoding='utf-8') as fp_sus:
|
||||||
lines = fp_sus.readlines()
|
lines = fp_sus.readlines()
|
||||||
|
@ -1328,7 +1332,7 @@ def can_remove_post(base_dir: str,
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# is the post by a moderator?
|
# is the post by a moderator?
|
||||||
moderators_file = base_dir + '/accounts/moderators.txt'
|
moderators_file = data_dir(base_dir) + '/moderators.txt'
|
||||||
if os.path.isfile(moderators_file):
|
if os.path.isfile(moderators_file):
|
||||||
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
||||||
lines = fp_mod.readlines()
|
lines = fp_mod.readlines()
|
||||||
|
@ -1386,7 +1390,7 @@ def remove_account(base_dir: str, nickname: str,
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Don't remove moderators
|
# Don't remove moderators
|
||||||
moderators_file = base_dir + '/accounts/moderators.txt'
|
moderators_file = data_dir(base_dir) + '/moderators.txt'
|
||||||
if os.path.isfile(moderators_file):
|
if os.path.isfile(moderators_file):
|
||||||
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
||||||
lines = fp_mod.readlines()
|
lines = fp_mod.readlines()
|
||||||
|
@ -2192,11 +2196,12 @@ def update_memorial_flags(base_dir: str, person_cache: {}) -> None:
|
||||||
"""
|
"""
|
||||||
memorials = get_memorials(base_dir).split('\n')
|
memorials = get_memorials(base_dir).split('\n')
|
||||||
|
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if not is_account_dir(account):
|
if not is_account_dir(account):
|
||||||
continue
|
continue
|
||||||
actor_filename = base_dir + '/accounts/' + account + '.json'
|
actor_filename = data_dir(base_dir) + '/' + account + '.json'
|
||||||
if not os.path.isfile(actor_filename):
|
if not os.path.isfile(actor_filename):
|
||||||
continue
|
continue
|
||||||
actor_json = load_json(actor_filename)
|
actor_json = load_json(actor_filename)
|
||||||
|
|
24
posts.py
24
posts.py
|
@ -92,6 +92,7 @@ from utils import acct_dir
|
||||||
from utils import local_actor_url
|
from utils import local_actor_url
|
||||||
from utils import get_reply_to
|
from utils import get_reply_to
|
||||||
from utils import get_actor_from_post
|
from utils import get_actor_from_post
|
||||||
|
from utils import data_dir
|
||||||
from media import get_music_metadata
|
from media import get_music_metadata
|
||||||
from media import attach_media
|
from media import attach_media
|
||||||
from media import replace_you_tube
|
from media import replace_you_tube
|
||||||
|
@ -151,7 +152,7 @@ def convert_post_content_to_html(message_json: {}) -> None:
|
||||||
def is_moderator(base_dir: str, nickname: str) -> bool:
|
def is_moderator(base_dir: str, nickname: str) -> bool:
|
||||||
"""Returns true if the given nickname is a moderator
|
"""Returns true if the given nickname is a moderator
|
||||||
"""
|
"""
|
||||||
moderators_file = base_dir + '/accounts/moderators.txt'
|
moderators_file = data_dir(base_dir) + '/moderators.txt'
|
||||||
|
|
||||||
if not os.path.isfile(moderators_file):
|
if not os.path.isfile(moderators_file):
|
||||||
admin_name = get_config_param(base_dir, 'admin')
|
admin_name = get_config_param(base_dir, 'admin')
|
||||||
|
@ -1592,7 +1593,7 @@ def _create_post_mod_report(base_dir: str,
|
||||||
else:
|
else:
|
||||||
new_post['moderationStatus'] = 'pending'
|
new_post['moderationStatus'] = 'pending'
|
||||||
# save to index file
|
# save to index file
|
||||||
moderation_index_file = base_dir + '/accounts/moderation.txt'
|
moderation_index_file = data_dir(base_dir) + '/moderation.txt'
|
||||||
try:
|
try:
|
||||||
with open(moderation_index_file, 'a+', encoding='utf-8') as mod_file:
|
with open(moderation_index_file, 'a+', encoding='utf-8') as mod_file:
|
||||||
mod_file.write(new_post_id + '\n')
|
mod_file.write(new_post_id + '\n')
|
||||||
|
@ -2626,7 +2627,7 @@ def create_report_post(base_dir: str,
|
||||||
|
|
||||||
# create the list of moderators from the moderators file
|
# create the list of moderators from the moderators file
|
||||||
moderators_list = []
|
moderators_list = []
|
||||||
moderators_file = base_dir + '/accounts/moderators.txt'
|
moderators_file = data_dir(base_dir) + '/moderators.txt'
|
||||||
if os.path.isfile(moderators_file):
|
if os.path.isfile(moderators_file):
|
||||||
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
with open(moderators_file, 'r', encoding='utf-8') as fp_mod:
|
||||||
for line in fp_mod:
|
for line in fp_mod:
|
||||||
|
@ -4348,7 +4349,7 @@ def create_moderation(base_dir: str, nickname: str, domain: str, port: int,
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_moderator(base_dir, nickname):
|
if is_moderator(base_dir, nickname):
|
||||||
moderation_index_file = base_dir + '/accounts/moderation.txt'
|
moderation_index_file = data_dir(base_dir) + '/moderation.txt'
|
||||||
if os.path.isfile(moderation_index_file):
|
if os.path.isfile(moderation_index_file):
|
||||||
with open(moderation_index_file, 'r',
|
with open(moderation_index_file, 'r',
|
||||||
encoding='utf-8') as index_file:
|
encoding='utf-8') as index_file:
|
||||||
|
@ -5001,7 +5002,8 @@ def archive_posts(base_dir: str, http_prefix: str, archive_dir: str,
|
||||||
if not os.path.isdir(archive_dir + '/accounts'):
|
if not os.path.isdir(archive_dir + '/accounts'):
|
||||||
os.mkdir(archive_dir + '/accounts')
|
os.mkdir(archive_dir + '/accounts')
|
||||||
|
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if '@' in handle:
|
if '@' in handle:
|
||||||
nickname = handle.split('@')[0]
|
nickname = handle.split('@')[0]
|
||||||
|
@ -5134,7 +5136,8 @@ def expire_posts(base_dir: str, http_prefix: str,
|
||||||
"""Expires posts for instance accounts
|
"""Expires posts for instance accounts
|
||||||
"""
|
"""
|
||||||
expired_post_count = 0
|
expired_post_count = 0
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if '@' not in handle:
|
if '@' not in handle:
|
||||||
continue
|
continue
|
||||||
|
@ -5590,7 +5593,7 @@ def get_public_post_domains_blocked(session, base_dir: str,
|
||||||
if not post_domains:
|
if not post_domains:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
blocking_filename = base_dir + '/accounts/blocking.txt'
|
blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if not os.path.isfile(blocking_filename):
|
if not os.path.isfile(blocking_filename):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -5644,7 +5647,7 @@ def check_domains(session, base_dir: str,
|
||||||
if not non_mutuals:
|
if not non_mutuals:
|
||||||
print('No non-mutual followers were found')
|
print('No non-mutual followers were found')
|
||||||
return
|
return
|
||||||
follower_warning_filename = base_dir + '/accounts/followerWarnings.txt'
|
follower_warning_filename = data_dir(base_dir) + '/followerWarnings.txt'
|
||||||
update_follower_warnings = False
|
update_follower_warnings = False
|
||||||
follower_warning_str = ''
|
follower_warning_str = ''
|
||||||
if os.path.isfile(follower_warning_filename):
|
if os.path.isfile(follower_warning_filename):
|
||||||
|
@ -5749,8 +5752,7 @@ def populate_replies_json(base_dir: str, nickname: str, domain: str,
|
||||||
if not reply_found:
|
if not reply_found:
|
||||||
message_id2 = remove_eol(message_id)
|
message_id2 = remove_eol(message_id)
|
||||||
search_filename = \
|
search_filename = \
|
||||||
base_dir + \
|
data_dir(base_dir) + '/inbox@' + \
|
||||||
'/accounts/inbox@' + \
|
|
||||||
domain + '/inbox/' + \
|
domain + '/inbox/' + \
|
||||||
message_id2.replace('/', '#') + '.json'
|
message_id2.replace('/', '#') + '.json'
|
||||||
if os.path.isfile(search_filename):
|
if os.path.isfile(search_filename):
|
||||||
|
@ -6533,7 +6535,7 @@ def post_is_muted(base_dir: str, nickname: str, domain: str,
|
||||||
is_muted = True
|
is_muted = True
|
||||||
else:
|
else:
|
||||||
mute_filename = \
|
mute_filename = \
|
||||||
base_dir + '/accounts/cache/announce/' + nickname + \
|
data_dir(base_dir) + '/cache/announce/' + nickname + \
|
||||||
'/' + message_id.replace('/', '#') + '.json.muted'
|
'/' + message_id.replace('/', '#') + '.json.muted'
|
||||||
if os.path.isfile(mute_filename):
|
if os.path.isfile(mute_filename):
|
||||||
is_muted = True
|
is_muted = True
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Core"
|
__module_group__ = "Core"
|
||||||
|
|
||||||
import pyqrcode
|
import pyqrcode
|
||||||
|
from utils import data_dir
|
||||||
|
|
||||||
|
|
||||||
def save_domain_qrcode(base_dir: str, http_prefix: str,
|
def save_domain_qrcode(base_dir: str, http_prefix: str,
|
||||||
|
@ -15,6 +16,6 @@ def save_domain_qrcode(base_dir: str, http_prefix: str,
|
||||||
"""Saves a qrcode image for the domain name
|
"""Saves a qrcode image for the domain name
|
||||||
This helps to transfer onion or i2p domains to a mobile device
|
This helps to transfer onion or i2p domains to a mobile device
|
||||||
"""
|
"""
|
||||||
qrcode_filename = base_dir + '/accounts/qrcode.png'
|
qrcode_filename = data_dir(base_dir) + '/qrcode.png'
|
||||||
url = pyqrcode.create(http_prefix + '://' + domain_full)
|
url = pyqrcode.create(http_prefix + '://' + domain_full)
|
||||||
url.png(qrcode_filename, scale)
|
url.png(qrcode_filename, scale)
|
||||||
|
|
|
@ -11,6 +11,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
from utils import data_dir
|
||||||
from utils import has_object_string
|
from utils import has_object_string
|
||||||
from utils import has_object_string_object
|
from utils import has_object_string_object
|
||||||
from utils import has_object_string_type
|
from utils import has_object_string_type
|
||||||
|
@ -463,7 +464,7 @@ def outbox_undo_reaction(recent_posts_cache: {},
|
||||||
def _update_common_reactions(base_dir: str, emoji_content: str) -> None:
|
def _update_common_reactions(base_dir: str, emoji_content: str) -> None:
|
||||||
"""Updates the list of commonly used reactions
|
"""Updates the list of commonly used reactions
|
||||||
"""
|
"""
|
||||||
common_reactions_filename = base_dir + '/accounts/common_reactions.txt'
|
common_reactions_filename = data_dir(base_dir) + '/common_reactions.txt'
|
||||||
common_reactions = None
|
common_reactions = None
|
||||||
if os.path.isfile(common_reactions_filename):
|
if os.path.isfile(common_reactions_filename):
|
||||||
try:
|
try:
|
||||||
|
|
16
reading.py
16
reading.py
|
@ -10,6 +10,7 @@ __module_group__ = "Core"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_post_attachments
|
from utils import get_post_attachments
|
||||||
from utils import get_content_from_post
|
from utils import get_content_from_post
|
||||||
from utils import has_object_dict
|
from utils import has_object_dict
|
||||||
|
@ -259,7 +260,7 @@ def remove_reading_event(base_dir: str,
|
||||||
if not book_event_type:
|
if not book_event_type:
|
||||||
print('remove_reading_event no book event')
|
print('remove_reading_event no book event')
|
||||||
return False
|
return False
|
||||||
reading_path = base_dir + '/accounts/reading'
|
reading_path = data_dir(base_dir) + '/reading'
|
||||||
readers_path = reading_path + '/readers'
|
readers_path = reading_path + '/readers'
|
||||||
reader_books_filename = \
|
reader_books_filename = \
|
||||||
readers_path + '/' + actor.replace('/', '#') + '.json'
|
readers_path + '/' + actor.replace('/', '#') + '.json'
|
||||||
|
@ -391,7 +392,7 @@ def _update_recent_books_list(base_dir: str, book_id: str,
|
||||||
debug: bool) -> None:
|
debug: bool) -> None:
|
||||||
"""prepend a book to the recent books list
|
"""prepend a book to the recent books list
|
||||||
"""
|
"""
|
||||||
recent_books_filename = base_dir + '/accounts/recent_books.txt'
|
recent_books_filename = data_dir(base_dir) + '/recent_books.txt'
|
||||||
if os.path.isfile(recent_books_filename):
|
if os.path.isfile(recent_books_filename):
|
||||||
try:
|
try:
|
||||||
with open(recent_books_filename, 'r+',
|
with open(recent_books_filename, 'r+',
|
||||||
|
@ -419,7 +420,7 @@ def _deduplicate_recent_books_list(base_dir: str,
|
||||||
max_recent_books: int) -> None:
|
max_recent_books: int) -> None:
|
||||||
""" Deduplicate and limit the length of the recent books list
|
""" Deduplicate and limit the length of the recent books list
|
||||||
"""
|
"""
|
||||||
recent_books_filename = base_dir + '/accounts/recent_books.txt'
|
recent_books_filename = data_dir(base_dir) + '/recent_books.txt'
|
||||||
if not os.path.isfile(recent_books_filename):
|
if not os.path.isfile(recent_books_filename):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -485,9 +486,10 @@ def store_book_events(base_dir: str,
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: no book event')
|
print('DEBUG: no book event')
|
||||||
return False
|
return False
|
||||||
reading_path = base_dir + '/accounts/reading'
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
reading_path = dir_str + '/reading'
|
||||||
os.mkdir(base_dir + '/accounts')
|
if not os.path.isdir(dir_str):
|
||||||
|
os.mkdir(dir_str)
|
||||||
if not os.path.isdir(reading_path):
|
if not os.path.isdir(reading_path):
|
||||||
os.mkdir(reading_path)
|
os.mkdir(reading_path)
|
||||||
books_path = reading_path + '/books'
|
books_path = reading_path + '/books'
|
||||||
|
@ -558,7 +560,7 @@ def html_profile_book_list(base_dir: str, actor: str, no_of_books: int,
|
||||||
authorized: bool) -> str:
|
authorized: bool) -> str:
|
||||||
"""Returns html for displaying a list of books on a profile screen
|
"""Returns html for displaying a list of books on a profile screen
|
||||||
"""
|
"""
|
||||||
reading_path = base_dir + '/accounts/reading'
|
reading_path = data_dir(base_dir) + '/reading'
|
||||||
readers_path = reading_path + '/readers'
|
readers_path = reading_path + '/readers'
|
||||||
reader_books_filename = \
|
reader_books_filename = \
|
||||||
readers_path + '/' + actor.replace('/', '#') + '.json'
|
readers_path + '/' + actor.replace('/', '#') + '.json'
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Core"
|
__module_group__ = "Core"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_user_paths
|
from utils import get_user_paths
|
||||||
from utils import is_dormant
|
from utils import is_dormant
|
||||||
from utils import acct_dir
|
from utils import acct_dir
|
||||||
|
@ -26,7 +27,7 @@ def get_moved_accounts(base_dir: str, nickname: str, domain: str,
|
||||||
filename: str) -> {}:
|
filename: str) -> {}:
|
||||||
"""returns a dict of moved accounts
|
"""returns a dict of moved accounts
|
||||||
"""
|
"""
|
||||||
moved_accounts_filename = base_dir + '/accounts/actors_moved.txt'
|
moved_accounts_filename = data_dir(base_dir) + '/actors_moved.txt'
|
||||||
if not os.path.isfile(moved_accounts_filename):
|
if not os.path.isfile(moved_accounts_filename):
|
||||||
return {}
|
return {}
|
||||||
refollow_str = ''
|
refollow_str = ''
|
||||||
|
@ -230,12 +231,12 @@ def update_moved_actors(base_dir: str, debug: bool) -> None:
|
||||||
|
|
||||||
# get the handles to be checked for movedTo attribute
|
# get the handles to be checked for movedTo attribute
|
||||||
handles_to_check = []
|
handles_to_check = []
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if not is_account_dir(account):
|
if not is_account_dir(account):
|
||||||
continue
|
continue
|
||||||
following_filename = \
|
following_filename = dir_str + '/' + account + '/following.txt'
|
||||||
base_dir + '/accounts/' + account + '/following.txt'
|
|
||||||
if not os.path.isfile(following_filename):
|
if not os.path.isfile(following_filename):
|
||||||
continue
|
continue
|
||||||
following_str = ''
|
following_str = ''
|
||||||
|
@ -288,7 +289,7 @@ def update_moved_actors(base_dir: str, debug: bool) -> None:
|
||||||
else:
|
else:
|
||||||
print('No moved accounts detected')
|
print('No moved accounts detected')
|
||||||
|
|
||||||
moved_accounts_filename = base_dir + '/accounts/actors_moved.txt'
|
moved_accounts_filename = data_dir(base_dir) + '/actors_moved.txt'
|
||||||
if not moved_str:
|
if not moved_str:
|
||||||
if os.path.isfile(moved_accounts_filename):
|
if os.path.isfile(moved_accounts_filename):
|
||||||
try:
|
try:
|
||||||
|
|
17
roles.py
17
roles.py
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Profile Metadata"
|
__module_group__ = "Profile Metadata"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import load_json
|
from utils import load_json
|
||||||
from utils import save_json
|
from utils import save_json
|
||||||
from utils import get_status_number
|
from utils import get_status_number
|
||||||
|
@ -22,14 +23,15 @@ def _clear_role_status(base_dir: str, role: str) -> None:
|
||||||
This could be slow if there are many users, but only happens
|
This could be slow if there are many users, but only happens
|
||||||
rarely when roles are appointed or removed
|
rarely when roles are appointed or removed
|
||||||
"""
|
"""
|
||||||
directory = os.fsencode(base_dir + '/accounts/')
|
dir_str = data_dir(base_dir)
|
||||||
|
directory = os.fsencode(dir_str + '/')
|
||||||
for fname in os.scandir(directory):
|
for fname in os.scandir(directory):
|
||||||
filename = os.fsdecode(fname.name)
|
filename = os.fsdecode(fname.name)
|
||||||
if '@' not in filename:
|
if '@' not in filename:
|
||||||
continue
|
continue
|
||||||
if not filename.endswith(".json"):
|
if not filename.endswith(".json"):
|
||||||
continue
|
continue
|
||||||
filename = os.path.join(base_dir + '/accounts/', filename)
|
filename = os.path.join(dir_str + '/', filename)
|
||||||
if not text_in_file('"' + role + '"', filename):
|
if not text_in_file('"' + role + '"', filename):
|
||||||
continue
|
continue
|
||||||
actor_json = load_json(filename)
|
actor_json = load_json(filename)
|
||||||
|
@ -48,7 +50,7 @@ def _add_role(base_dir: str, nickname: str, domain: str,
|
||||||
This is a file containing the nicknames of accounts having this role
|
This is a file containing the nicknames of accounts having this role
|
||||||
"""
|
"""
|
||||||
domain = remove_domain_port(domain)
|
domain = remove_domain_port(domain)
|
||||||
role_file = base_dir + '/accounts/' + role_filename
|
role_file = data_dir(base_dir) + '/' + role_filename
|
||||||
if os.path.isfile(role_file):
|
if os.path.isfile(role_file):
|
||||||
# is this nickname already in the file?
|
# is this nickname already in the file?
|
||||||
|
|
||||||
|
@ -71,7 +73,8 @@ def _add_role(base_dir: str, nickname: str, domain: str,
|
||||||
role_nickname = role_nickname.strip('\n').strip('\r')
|
role_nickname = role_nickname.strip('\n').strip('\r')
|
||||||
if len(role_nickname) < 2:
|
if len(role_nickname) < 2:
|
||||||
continue
|
continue
|
||||||
if os.path.isdir(base_dir + '/accounts/' +
|
dir_str = data_dir(base_dir)
|
||||||
|
if os.path.isdir(dir_str + '/' +
|
||||||
role_nickname + '@' + domain):
|
role_nickname + '@' + domain):
|
||||||
fp_role.write(role_nickname + '\n')
|
fp_role.write(role_nickname + '\n')
|
||||||
except OSError:
|
except OSError:
|
||||||
|
@ -90,7 +93,7 @@ def _remove_role(base_dir: str, nickname: str, role_filename: str) -> None:
|
||||||
"""Removes a role nickname from the file.
|
"""Removes a role nickname from the file.
|
||||||
This is a file containing the nicknames of accounts having this role
|
This is a file containing the nicknames of accounts having this role
|
||||||
"""
|
"""
|
||||||
role_file = base_dir + '/accounts/' + role_filename
|
role_file = data_dir(base_dir) + '/' + role_filename
|
||||||
if not os.path.isfile(role_file):
|
if not os.path.isfile(role_file):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -270,7 +273,7 @@ def actor_has_role(actor_json: {}, role_name: str) -> bool:
|
||||||
def is_devops(base_dir: str, nickname: str) -> bool:
|
def is_devops(base_dir: str, nickname: str) -> bool:
|
||||||
"""Returns true if the given nickname has the devops role
|
"""Returns true if the given nickname has the devops role
|
||||||
"""
|
"""
|
||||||
devops_file = base_dir + '/accounts/devops.txt'
|
devops_file = data_dir(base_dir) + '/devops.txt'
|
||||||
|
|
||||||
if not os.path.isfile(devops_file):
|
if not os.path.isfile(devops_file):
|
||||||
admin_name = get_config_param(base_dir, 'admin')
|
admin_name = get_config_param(base_dir, 'admin')
|
||||||
|
@ -305,7 +308,7 @@ def set_roles_from_list(base_dir: str, domain: str, admin_nickname: str,
|
||||||
# check for admin user
|
# check for admin user
|
||||||
if not path.startswith('/users/' + admin_nickname + '/'):
|
if not path.startswith('/users/' + admin_nickname + '/'):
|
||||||
return
|
return
|
||||||
roles_filename = base_dir + '/accounts/' + list_filename
|
roles_filename = data_dir(base_dir) + '/' + list_filename
|
||||||
if not fields.get(list_name):
|
if not fields.get(list_name):
|
||||||
if os.path.isfile(roles_filename):
|
if os.path.isfile(roles_filename):
|
||||||
_clear_role_status(base_dir, role_name)
|
_clear_role_status(base_dir, role_name)
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Calendar"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
from utils import data_dir
|
||||||
from utils import date_from_string_format
|
from utils import date_from_string_format
|
||||||
from utils import date_epoch
|
from utils import date_epoch
|
||||||
from utils import acct_handle_dir
|
from utils import acct_handle_dir
|
||||||
|
@ -196,7 +197,8 @@ def run_post_schedule(base_dir: str, httpd, max_scheduled_posts: int):
|
||||||
while True:
|
while True:
|
||||||
time.sleep(60)
|
time.sleep(60)
|
||||||
# for each account
|
# for each account
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if '@' not in account:
|
if '@' not in account:
|
||||||
continue
|
continue
|
||||||
|
@ -204,7 +206,7 @@ def run_post_schedule(base_dir: str, httpd, max_scheduled_posts: int):
|
||||||
continue
|
continue
|
||||||
# scheduled posts index for this account
|
# scheduled posts index for this account
|
||||||
schedule_index_filename = \
|
schedule_index_filename = \
|
||||||
base_dir + '/accounts/' + account + '/schedule.index'
|
dir_str + '/' + account + '/schedule.index'
|
||||||
if not os.path.isfile(schedule_index_filename):
|
if not os.path.isfile(schedule_index_filename):
|
||||||
continue
|
continue
|
||||||
_update_post_schedule(base_dir, account,
|
_update_post_schedule(base_dir, account,
|
||||||
|
|
26
shares.py
26
shares.py
|
@ -23,6 +23,7 @@ from session import post_json
|
||||||
from session import post_image
|
from session import post_image
|
||||||
from session import create_session
|
from session import create_session
|
||||||
from session import get_json_valid
|
from session import get_json_valid
|
||||||
|
from utils import data_dir
|
||||||
from utils import resembles_url
|
from utils import resembles_url
|
||||||
from utils import date_utcnow
|
from utils import date_utcnow
|
||||||
from utils import dangerous_markup
|
from utils import dangerous_markup
|
||||||
|
@ -282,7 +283,8 @@ def _indicate_new_share_available(base_dir: str, http_prefix: str,
|
||||||
block_federated: []) -> None:
|
block_federated: []) -> None:
|
||||||
"""Indicate to each account that a new share is available
|
"""Indicate to each account that a new share is available
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
@ -417,7 +419,8 @@ def expire_shares(base_dir: str, max_shares_on_profile: int,
|
||||||
person_cache: {}) -> None:
|
person_cache: {}) -> None:
|
||||||
"""Removes expired items from shares
|
"""Removes expired items from shares
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if not is_account_dir(account):
|
if not is_account_dir(account):
|
||||||
continue
|
continue
|
||||||
|
@ -1401,7 +1404,8 @@ def shares_catalog_endpoint(base_dir: str, http_prefix: str,
|
||||||
curr_date = date_utcnow()
|
curr_date = date_utcnow()
|
||||||
curr_date_str = curr_date.strftime("%Y-%m-%d")
|
curr_date_str = curr_date.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
|
@ -1505,7 +1509,7 @@ def generate_shared_item_federation_tokens(shared_items_federated_domains: [],
|
||||||
tokens_json = {}
|
tokens_json = {}
|
||||||
if base_dir:
|
if base_dir:
|
||||||
tokens_filename = \
|
tokens_filename = \
|
||||||
base_dir + '/accounts/sharedItemsFederationTokens.json'
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
||||||
if os.path.isfile(tokens_filename):
|
if os.path.isfile(tokens_filename):
|
||||||
tokens_json = load_json(tokens_filename, 1, 2)
|
tokens_json = load_json(tokens_filename, 1, 2)
|
||||||
if tokens_json is None:
|
if tokens_json is None:
|
||||||
|
@ -1536,7 +1540,7 @@ def update_shared_item_federation_token(base_dir: str,
|
||||||
tokens_json = {}
|
tokens_json = {}
|
||||||
if base_dir:
|
if base_dir:
|
||||||
tokens_filename = \
|
tokens_filename = \
|
||||||
base_dir + '/accounts/sharedItemsFederationTokens.json'
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
||||||
if os.path.isfile(tokens_filename):
|
if os.path.isfile(tokens_filename):
|
||||||
if debug:
|
if debug:
|
||||||
print('Update loading tokens for ' + token_domain_full)
|
print('Update loading tokens for ' + token_domain_full)
|
||||||
|
@ -1581,7 +1585,7 @@ def merge_shared_item_tokens(base_dir: str, domain_full: str,
|
||||||
changed = True
|
changed = True
|
||||||
if base_dir and changed:
|
if base_dir and changed:
|
||||||
tokens_filename = \
|
tokens_filename = \
|
||||||
base_dir + '/accounts/sharedItemsFederationTokens.json'
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
||||||
save_json(tokens_json, tokens_filename)
|
save_json(tokens_json, tokens_filename)
|
||||||
return tokens_json
|
return tokens_json
|
||||||
|
|
||||||
|
@ -1596,7 +1600,7 @@ def create_shared_item_federation_token(base_dir: str,
|
||||||
tokens_json = {}
|
tokens_json = {}
|
||||||
if base_dir:
|
if base_dir:
|
||||||
tokens_filename = \
|
tokens_filename = \
|
||||||
base_dir + '/accounts/sharedItemsFederationTokens.json'
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
||||||
if os.path.isfile(tokens_filename):
|
if os.path.isfile(tokens_filename):
|
||||||
tokens_json = load_json(tokens_filename, 1, 2)
|
tokens_json = load_json(tokens_filename, 1, 2)
|
||||||
if tokens_json is None:
|
if tokens_json is None:
|
||||||
|
@ -1642,7 +1646,7 @@ def authorize_shared_items(shared_items_federated_domains: [],
|
||||||
return False
|
return False
|
||||||
if not tokens_json:
|
if not tokens_json:
|
||||||
tokens_filename = \
|
tokens_filename = \
|
||||||
base_dir + '/accounts/sharedItemsFederationTokens.json'
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
||||||
if not os.path.isfile(tokens_filename):
|
if not os.path.isfile(tokens_filename):
|
||||||
if debug:
|
if debug:
|
||||||
print('DEBUG: shared item federation tokens file missing ' +
|
print('DEBUG: shared item federation tokens file missing ' +
|
||||||
|
@ -1758,7 +1762,7 @@ def _generate_next_shares_token_update(base_dir: str,
|
||||||
"""Creates a file containing the next date when the shared items token
|
"""Creates a file containing the next date when the shared items token
|
||||||
for this instance will be updated
|
for this instance will be updated
|
||||||
"""
|
"""
|
||||||
token_update_dir = base_dir + '/accounts'
|
token_update_dir = data_dir(base_dir)
|
||||||
if not os.path.isdir(base_dir):
|
if not os.path.isdir(base_dir):
|
||||||
os.mkdir(base_dir)
|
os.mkdir(base_dir)
|
||||||
if not os.path.isdir(token_update_dir):
|
if not os.path.isdir(token_update_dir):
|
||||||
|
@ -1810,7 +1814,7 @@ def _regenerate_shares_token(base_dir: str, domain_full: str,
|
||||||
federated shares list of domains continue to follow and communicate
|
federated shares list of domains continue to follow and communicate
|
||||||
then they will receive the new token automatically
|
then they will receive the new token automatically
|
||||||
"""
|
"""
|
||||||
token_update_filename = base_dir + '/accounts/.tokenUpdate'
|
token_update_filename = data_dir(base_dir) + '/.tokenUpdate'
|
||||||
if not os.path.isfile(token_update_filename):
|
if not os.path.isfile(token_update_filename):
|
||||||
return
|
return
|
||||||
next_update_sec = None
|
next_update_sec = None
|
||||||
|
@ -1870,7 +1874,7 @@ def run_federated_shares_daemon(base_dir: str, httpd, http_prefix: str,
|
||||||
|
|
||||||
# load the tokens
|
# load the tokens
|
||||||
tokens_filename = \
|
tokens_filename = \
|
||||||
base_dir + '/accounts/sharedItemsFederationTokens.json'
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
||||||
if not os.path.isfile(tokens_filename):
|
if not os.path.isfile(tokens_filename):
|
||||||
time.sleep(file_check_interval_sec)
|
time.sleep(file_check_interval_sec)
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -9,8 +9,9 @@ __status__ = "Production"
|
||||||
__module_group__ = "Core"
|
__module_group__ = "Core"
|
||||||
|
|
||||||
import http.client
|
import http.client
|
||||||
from urllib.parse import urlparse
|
|
||||||
import ssl
|
import ssl
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
from utils import data_dir
|
||||||
|
|
||||||
|
|
||||||
class Result:
|
class Result:
|
||||||
|
@ -157,7 +158,7 @@ def referer_is_active(http_prefix: str,
|
||||||
def save_unavailable_sites(base_dir: str, sites_unavailable: []) -> None:
|
def save_unavailable_sites(base_dir: str, sites_unavailable: []) -> None:
|
||||||
"""Save a list of unavailable sites
|
"""Save a list of unavailable sites
|
||||||
"""
|
"""
|
||||||
unavailable_sites_filename = base_dir + '/accounts/unavailable_sites.txt'
|
unavailable_sites_filename = data_dir(base_dir) + '/unavailable_sites.txt'
|
||||||
sites_unavailable.sort()
|
sites_unavailable.sort()
|
||||||
try:
|
try:
|
||||||
with open(unavailable_sites_filename, 'w+',
|
with open(unavailable_sites_filename, 'w+',
|
||||||
|
@ -172,7 +173,7 @@ def save_unavailable_sites(base_dir: str, sites_unavailable: []) -> None:
|
||||||
def load_unavailable_sites(base_dir: str) -> []:
|
def load_unavailable_sites(base_dir: str) -> []:
|
||||||
"""load a list of unavailable sites
|
"""load a list of unavailable sites
|
||||||
"""
|
"""
|
||||||
unavailable_sites_filename = base_dir + '/accounts/unavailable_sites.txt'
|
unavailable_sites_filename = data_dir(base_dir) + '/unavailable_sites.txt'
|
||||||
sites_unavailable = []
|
sites_unavailable = []
|
||||||
try:
|
try:
|
||||||
with open(unavailable_sites_filename, 'r',
|
with open(unavailable_sites_filename, 'r',
|
||||||
|
|
|
@ -11,6 +11,7 @@ import os
|
||||||
import html
|
import html
|
||||||
import random
|
import random
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_post_attachments
|
from utils import get_post_attachments
|
||||||
from utils import get_cached_post_filename
|
from utils import get_cached_post_filename
|
||||||
from utils import remove_id_ending
|
from utils import remove_id_ending
|
||||||
|
@ -77,7 +78,7 @@ def _speaker_pronounce(base_dir: str, say_text: str, translate: {}) -> str:
|
||||||
line items such as:
|
line items such as:
|
||||||
Epicyon -> Epi-cyon
|
Epicyon -> Epi-cyon
|
||||||
"""
|
"""
|
||||||
pronounce_filename = base_dir + '/accounts/speaker_pronounce.txt'
|
pronounce_filename = data_dir(base_dir) + '/speaker_pronounce.txt'
|
||||||
convert_dict = {}
|
convert_dict = {}
|
||||||
if translate:
|
if translate:
|
||||||
convert_dict = {
|
convert_dict = {
|
||||||
|
|
70
theme.py
70
theme.py
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Web Interface"
|
__module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_ends_with
|
from utils import string_ends_with
|
||||||
from utils import is_account_dir
|
from utils import is_account_dir
|
||||||
from utils import load_json
|
from utils import load_json
|
||||||
|
@ -153,6 +154,7 @@ def _copy_theme_help_files(base_dir: str, theme_name: str,
|
||||||
theme_dir = base_dir + '/theme/' + theme_name + '/welcome'
|
theme_dir = base_dir + '/theme/' + theme_name + '/welcome'
|
||||||
if not os.path.isdir(theme_dir):
|
if not os.path.isdir(theme_dir):
|
||||||
theme_dir = base_dir + '/defaultwelcome'
|
theme_dir = base_dir + '/defaultwelcome'
|
||||||
|
dir_str = data_dir(base_dir)
|
||||||
for _, _, files in os.walk(theme_dir):
|
for _, _, files in os.walk(theme_dir):
|
||||||
for help_markdown_file in files:
|
for help_markdown_file in files:
|
||||||
if not help_markdown_file.endswith('_' + system_language + '.md'):
|
if not help_markdown_file.endswith('_' + system_language + '.md'):
|
||||||
|
@ -162,9 +164,9 @@ def _copy_theme_help_files(base_dir: str, theme_name: str,
|
||||||
'.md')
|
'.md')
|
||||||
if dest_help_markdown_file in ('profile.md', 'final.md'):
|
if dest_help_markdown_file in ('profile.md', 'final.md'):
|
||||||
dest_help_markdown_file = 'welcome_' + dest_help_markdown_file
|
dest_help_markdown_file = 'welcome_' + dest_help_markdown_file
|
||||||
if os.path.isdir(base_dir + '/accounts'):
|
if os.path.isdir(dir_str):
|
||||||
copyfile(theme_dir + '/' + help_markdown_file,
|
copyfile(theme_dir + '/' + help_markdown_file,
|
||||||
base_dir + '/accounts/' + dest_help_markdown_file)
|
dir_str + '/' + dest_help_markdown_file)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
|
@ -453,7 +455,7 @@ def enable_grayscale(base_dir: str) -> None:
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
print('EX: enable_grayscale unable to read ' +
|
print('EX: enable_grayscale unable to read ' +
|
||||||
template_filename + ' ' + str(ex))
|
template_filename + ' ' + str(ex))
|
||||||
grayscale_filename = base_dir + '/accounts/.grayscale'
|
grayscale_filename = data_dir(base_dir) + '/.grayscale'
|
||||||
if not os.path.isfile(grayscale_filename):
|
if not os.path.isfile(grayscale_filename):
|
||||||
try:
|
try:
|
||||||
with open(grayscale_filename, 'w+', encoding='utf-8') as grayfile:
|
with open(grayscale_filename, 'w+', encoding='utf-8') as grayfile:
|
||||||
|
@ -483,7 +485,7 @@ def disable_grayscale(base_dir: str) -> None:
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
print('EX: disable_grayscale unable to read ' +
|
print('EX: disable_grayscale unable to read ' +
|
||||||
template_filename + ' ' + str(ex))
|
template_filename + ' ' + str(ex))
|
||||||
grayscale_filename = base_dir + '/accounts/.grayscale'
|
grayscale_filename = data_dir(base_dir) + '/.grayscale'
|
||||||
if os.path.isfile(grayscale_filename):
|
if os.path.isfile(grayscale_filename):
|
||||||
try:
|
try:
|
||||||
os.remove(grayscale_filename)
|
os.remove(grayscale_filename)
|
||||||
|
@ -563,7 +565,7 @@ def set_theme_from_designer(base_dir: str, theme_name: str, domain: str,
|
||||||
allow_local_network_access: bool,
|
allow_local_network_access: bool,
|
||||||
system_language: str,
|
system_language: str,
|
||||||
dyslexic_font: bool):
|
dyslexic_font: bool):
|
||||||
custom_theme_filename = base_dir + '/accounts/theme.json'
|
custom_theme_filename = data_dir(base_dir) + '/theme.json'
|
||||||
save_json(theme_params, custom_theme_filename)
|
save_json(theme_params, custom_theme_filename)
|
||||||
set_theme(base_dir, theme_name, domain,
|
set_theme(base_dir, theme_name, domain,
|
||||||
allow_local_network_access, system_language,
|
allow_local_network_access, system_language,
|
||||||
|
@ -573,7 +575,7 @@ def set_theme_from_designer(base_dir: str, theme_name: str, domain: str,
|
||||||
def reset_theme_designer_settings(base_dir: str) -> None:
|
def reset_theme_designer_settings(base_dir: str) -> None:
|
||||||
"""Resets the theme designer settings
|
"""Resets the theme designer settings
|
||||||
"""
|
"""
|
||||||
custom_variables_file = base_dir + '/accounts/theme.json'
|
custom_variables_file = data_dir(base_dir) + '/theme.json'
|
||||||
if os.path.isfile(custom_variables_file):
|
if os.path.isfile(custom_variables_file):
|
||||||
try:
|
try:
|
||||||
os.remove(custom_variables_file)
|
os.remove(custom_variables_file)
|
||||||
|
@ -593,7 +595,7 @@ def _read_variables_file(base_dir: str, theme_name: str,
|
||||||
return
|
return
|
||||||
|
|
||||||
# set custom theme parameters
|
# set custom theme parameters
|
||||||
custom_variables_file = base_dir + '/accounts/theme.json'
|
custom_variables_file = data_dir(base_dir) + '/theme.json'
|
||||||
if os.path.isfile(custom_variables_file):
|
if os.path.isfile(custom_variables_file):
|
||||||
custom_theme_params = load_json(custom_variables_file, 0)
|
custom_theme_params = load_json(custom_variables_file, 0)
|
||||||
if custom_theme_params:
|
if custom_theme_params:
|
||||||
|
@ -667,7 +669,7 @@ def _set_theme_fonts(base_dir: str, theme_name: str) -> None:
|
||||||
def get_text_mode_banner(base_dir: str) -> str:
|
def get_text_mode_banner(base_dir: str) -> str:
|
||||||
"""Returns the banner used for shell browsers, like Lynx
|
"""Returns the banner used for shell browsers, like Lynx
|
||||||
"""
|
"""
|
||||||
text_mode_banner_filename = base_dir + '/accounts/banner.txt'
|
text_mode_banner_filename = data_dir(base_dir) + '/banner.txt'
|
||||||
if os.path.isfile(text_mode_banner_filename):
|
if os.path.isfile(text_mode_banner_filename):
|
||||||
with open(text_mode_banner_filename, 'r',
|
with open(text_mode_banner_filename, 'r',
|
||||||
encoding='utf-8') as fp_text:
|
encoding='utf-8') as fp_text:
|
||||||
|
@ -680,7 +682,7 @@ def get_text_mode_banner(base_dir: str) -> str:
|
||||||
def get_text_mode_logo(base_dir: str) -> str:
|
def get_text_mode_logo(base_dir: str) -> str:
|
||||||
"""Returns the login screen logo used for shell browsers, like Lynx
|
"""Returns the login screen logo used for shell browsers, like Lynx
|
||||||
"""
|
"""
|
||||||
text_mode_logo_filename = base_dir + '/accounts/logo.txt'
|
text_mode_logo_filename = data_dir(base_dir) + '/logo.txt'
|
||||||
if not os.path.isfile(text_mode_logo_filename):
|
if not os.path.isfile(text_mode_logo_filename):
|
||||||
text_mode_logo_filename = base_dir + '/img/logo.txt'
|
text_mode_logo_filename = base_dir + '/img/logo.txt'
|
||||||
|
|
||||||
|
@ -696,40 +698,38 @@ def _set_text_mode_theme(base_dir: str, name: str) -> None:
|
||||||
# in browsers such as Lynx
|
# in browsers such as Lynx
|
||||||
text_mode_logo_filename = \
|
text_mode_logo_filename = \
|
||||||
base_dir + '/theme/' + name + '/logo.txt'
|
base_dir + '/theme/' + name + '/logo.txt'
|
||||||
|
dir_str = data_dir(base_dir)
|
||||||
if os.path.isfile(text_mode_logo_filename):
|
if os.path.isfile(text_mode_logo_filename):
|
||||||
try:
|
try:
|
||||||
copyfile(text_mode_logo_filename,
|
copyfile(text_mode_logo_filename, dir_str + '/logo.txt')
|
||||||
base_dir + '/accounts/logo.txt')
|
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: _set_text_mode_theme unable to copy ' +
|
print('EX: _set_text_mode_theme unable to copy ' +
|
||||||
text_mode_logo_filename + ' ' +
|
text_mode_logo_filename + ' ' +
|
||||||
base_dir + '/accounts/logo.txt')
|
dir_str + '/logo.txt')
|
||||||
else:
|
else:
|
||||||
|
dir_str = data_dir(base_dir)
|
||||||
try:
|
try:
|
||||||
copyfile(base_dir + '/img/logo.txt',
|
copyfile(base_dir + '/img/logo.txt', dir_str + '/logo.txt')
|
||||||
base_dir + '/accounts/logo.txt')
|
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: _set_text_mode_theme unable to copy ' +
|
print('EX: _set_text_mode_theme unable to copy ' +
|
||||||
base_dir + '/img/logo.txt ' +
|
base_dir + '/img/logo.txt ' + dir_str + '/logo.txt')
|
||||||
base_dir + '/accounts/logo.txt')
|
|
||||||
|
|
||||||
# set the text mode banner which appears in browsers such as Lynx
|
# set the text mode banner which appears in browsers such as Lynx
|
||||||
text_mode_banner_filename = \
|
text_mode_banner_filename = \
|
||||||
base_dir + '/theme/' + name + '/banner.txt'
|
base_dir + '/theme/' + name + '/banner.txt'
|
||||||
if os.path.isfile(base_dir + '/accounts/banner.txt'):
|
if os.path.isfile(dir_str + '/banner.txt'):
|
||||||
try:
|
try:
|
||||||
os.remove(base_dir + '/accounts/banner.txt')
|
os.remove(dir_str + '/banner.txt')
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: _set_text_mode_theme unable to delete ' +
|
print('EX: _set_text_mode_theme unable to delete ' +
|
||||||
base_dir + '/accounts/banner.txt')
|
dir_str + '/banner.txt')
|
||||||
if os.path.isfile(text_mode_banner_filename):
|
if os.path.isfile(text_mode_banner_filename):
|
||||||
try:
|
try:
|
||||||
copyfile(text_mode_banner_filename,
|
copyfile(text_mode_banner_filename, dir_str + '/banner.txt')
|
||||||
base_dir + '/accounts/banner.txt')
|
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: _set_text_mode_theme unable to copy ' +
|
print('EX: _set_text_mode_theme unable to copy ' +
|
||||||
text_mode_banner_filename + ' ' +
|
text_mode_banner_filename + ' ' +
|
||||||
base_dir + '/accounts/banner.txt')
|
dir_str + '/banner.txt')
|
||||||
|
|
||||||
|
|
||||||
def _set_theme_images(base_dir: str, name: str) -> None:
|
def _set_theme_images(base_dir: str, name: str) -> None:
|
||||||
|
@ -756,11 +756,12 @@ def _set_theme_images(base_dir: str, name: str) -> None:
|
||||||
'welcome')
|
'welcome')
|
||||||
extensions = get_image_extensions()
|
extensions = get_image_extensions()
|
||||||
|
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
|
|
||||||
for background_type in background_names:
|
for background_type in background_names:
|
||||||
for ext in extensions:
|
for ext in extensions:
|
||||||
|
@ -776,7 +777,7 @@ def _set_theme_images(base_dir: str, name: str) -> None:
|
||||||
if os.path.isfile(background_image_filename):
|
if os.path.isfile(background_image_filename):
|
||||||
try:
|
try:
|
||||||
copyfile(background_image_filename,
|
copyfile(background_image_filename,
|
||||||
base_dir + '/accounts/' +
|
dir_str + '/' +
|
||||||
background_type + '-background.' + ext)
|
background_type + '-background.' + ext)
|
||||||
continue
|
continue
|
||||||
except OSError:
|
except OSError:
|
||||||
|
@ -784,14 +785,14 @@ def _set_theme_images(base_dir: str, name: str) -> None:
|
||||||
background_image_filename)
|
background_image_filename)
|
||||||
# background image was not found
|
# background image was not found
|
||||||
# so remove any existing file
|
# so remove any existing file
|
||||||
if os.path.isfile(base_dir + '/accounts/' +
|
if os.path.isfile(dir_str + '/' +
|
||||||
background_type + '-background.' + ext):
|
background_type + '-background.' + ext):
|
||||||
try:
|
try:
|
||||||
os.remove(base_dir + '/accounts/' +
|
os.remove(dir_str + '/' +
|
||||||
background_type + '-background.' + ext)
|
background_type + '-background.' + ext)
|
||||||
except OSError:
|
except OSError:
|
||||||
print('EX: _set_theme_images unable to delete ' +
|
print('EX: _set_theme_images unable to delete ' +
|
||||||
base_dir + '/accounts/' +
|
dir_str + '/' +
|
||||||
background_type + '-background.' + ext)
|
background_type + '-background.' + ext)
|
||||||
|
|
||||||
if os.path.isfile(profile_image_filename) and \
|
if os.path.isfile(profile_image_filename) and \
|
||||||
|
@ -883,9 +884,10 @@ def _set_clear_cache_flag(base_dir: str) -> None:
|
||||||
"""Sets a flag which can be used by an external system
|
"""Sets a flag which can be used by an external system
|
||||||
(eg. a script in a cron job) to clear the browser cache
|
(eg. a script in a cron job) to clear the browser cache
|
||||||
"""
|
"""
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if not os.path.isdir(dir_str):
|
||||||
return
|
return
|
||||||
flag_filename = base_dir + '/accounts/.clear_cache'
|
flag_filename = dir_str + '/.clear_cache'
|
||||||
try:
|
try:
|
||||||
with open(flag_filename, 'w+', encoding='utf-8') as fp_flag:
|
with open(flag_filename, 'w+', encoding='utf-8') as fp_flag:
|
||||||
fp_flag.write('\n')
|
fp_flag.write('\n')
|
||||||
|
@ -944,13 +946,13 @@ def set_theme(base_dir: str, name: str, domain: str,
|
||||||
# set the news avatar
|
# set the news avatar
|
||||||
news_avatar_theme_filename = \
|
news_avatar_theme_filename = \
|
||||||
base_dir + '/theme/' + name + '/icons/avatar_news.png'
|
base_dir + '/theme/' + name + '/icons/avatar_news.png'
|
||||||
if os.path.isdir(base_dir + '/accounts/news@' + domain):
|
dir_str = data_dir(base_dir)
|
||||||
|
if os.path.isdir(dir_str + '/news@' + domain):
|
||||||
if os.path.isfile(news_avatar_theme_filename):
|
if os.path.isfile(news_avatar_theme_filename):
|
||||||
news_avatar_filename = \
|
news_avatar_filename = dir_str + '/news@' + domain + '/avatar.png'
|
||||||
base_dir + '/accounts/news@' + domain + '/avatar.png'
|
|
||||||
copyfile(news_avatar_theme_filename, news_avatar_filename)
|
copyfile(news_avatar_theme_filename, news_avatar_filename)
|
||||||
|
|
||||||
grayscale_filename = base_dir + '/accounts/.grayscale'
|
grayscale_filename = dir_str + '/.grayscale'
|
||||||
if os.path.isfile(grayscale_filename):
|
if os.path.isfile(grayscale_filename):
|
||||||
enable_grayscale(base_dir)
|
enable_grayscale(base_dir)
|
||||||
else:
|
else:
|
||||||
|
|
85
utils.py
85
utils.py
|
@ -570,12 +570,22 @@ def get_base_content_from_post(post_json_object: {},
|
||||||
return this_post_json['content']
|
return this_post_json['content']
|
||||||
|
|
||||||
|
|
||||||
|
def data_dir(base_dir) -> str:
|
||||||
|
"""Returns the directory where account data is stored
|
||||||
|
"""
|
||||||
|
return base_dir + '/accounts'
|
||||||
|
|
||||||
|
|
||||||
def acct_dir(base_dir: str, nickname: str, domain: str) -> str:
|
def acct_dir(base_dir: str, nickname: str, domain: str) -> str:
|
||||||
return base_dir + '/accounts/' + nickname + '@' + domain
|
"""Returns the directory for an account on this instance
|
||||||
|
"""
|
||||||
|
return data_dir(base_dir) + '/' + nickname + '@' + domain
|
||||||
|
|
||||||
|
|
||||||
def acct_handle_dir(base_dir: str, handle: str) -> str:
|
def acct_handle_dir(base_dir: str, handle: str) -> str:
|
||||||
return base_dir + '/accounts/' + handle
|
"""Returns the directory for an account on this instance
|
||||||
|
"""
|
||||||
|
return data_dir(base_dir) + '/' + handle
|
||||||
|
|
||||||
|
|
||||||
def is_featured_writer(base_dir: str, nickname: str, domain: str) -> bool:
|
def is_featured_writer(base_dir: str, nickname: str, domain: str) -> bool:
|
||||||
|
@ -590,7 +600,7 @@ def is_featured_writer(base_dir: str, nickname: str, domain: str) -> bool:
|
||||||
def refresh_newswire(base_dir: str):
|
def refresh_newswire(base_dir: str):
|
||||||
"""Causes the newswire to be updates after a change to user accounts
|
"""Causes the newswire to be updates after a change to user accounts
|
||||||
"""
|
"""
|
||||||
refresh_newswire_filename = base_dir + '/accounts/.refresh_newswire'
|
refresh_newswire_filename = data_dir(base_dir) + '/.refresh_newswire'
|
||||||
if os.path.isfile(refresh_newswire_filename):
|
if os.path.isfile(refresh_newswire_filename):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
|
@ -750,7 +760,7 @@ def is_dormant(base_dir: str, nickname: str, domain: str, actor: str,
|
||||||
def is_editor(base_dir: str, nickname: str) -> bool:
|
def is_editor(base_dir: str, nickname: str) -> bool:
|
||||||
"""Returns true if the given nickname is an editor
|
"""Returns true if the given nickname is an editor
|
||||||
"""
|
"""
|
||||||
editors_file = base_dir + '/accounts/editors.txt'
|
editors_file = data_dir(base_dir) + '/editors.txt'
|
||||||
|
|
||||||
if not os.path.isfile(editors_file):
|
if not os.path.isfile(editors_file):
|
||||||
admin_name = get_config_param(base_dir, 'admin')
|
admin_name = get_config_param(base_dir, 'admin')
|
||||||
|
@ -776,7 +786,7 @@ def is_editor(base_dir: str, nickname: str) -> bool:
|
||||||
def is_artist(base_dir: str, nickname: str) -> bool:
|
def is_artist(base_dir: str, nickname: str) -> bool:
|
||||||
"""Returns true if the given nickname is an artist
|
"""Returns true if the given nickname is an artist
|
||||||
"""
|
"""
|
||||||
artists_file = base_dir + '/accounts/artists.txt'
|
artists_file = data_dir(base_dir) + '/artists.txt'
|
||||||
|
|
||||||
if not os.path.isfile(artists_file):
|
if not os.path.isfile(artists_file):
|
||||||
admin_name = get_config_param(base_dir, 'admin')
|
admin_name = get_config_param(base_dir, 'admin')
|
||||||
|
@ -986,7 +996,7 @@ def is_system_account(nickname: str) -> bool:
|
||||||
def get_memorials(base_dir: str) -> str:
|
def get_memorials(base_dir: str) -> str:
|
||||||
"""Returns the nicknames for memorial accounts
|
"""Returns the nicknames for memorial accounts
|
||||||
"""
|
"""
|
||||||
memorial_file = base_dir + '/accounts/memorial'
|
memorial_file = data_dir(base_dir) + '/memorial'
|
||||||
if not os.path.isfile(memorial_file):
|
if not os.path.isfile(memorial_file):
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
@ -1013,7 +1023,7 @@ def set_memorials(base_dir: str, domain: str, memorial_str) -> None:
|
||||||
memorial_str = new_memorial_str
|
memorial_str = new_memorial_str
|
||||||
|
|
||||||
# save the accounts
|
# save the accounts
|
||||||
memorial_file = base_dir + '/accounts/memorial'
|
memorial_file = data_dir(base_dir) + '/memorial'
|
||||||
try:
|
try:
|
||||||
with open(memorial_file, 'w+', encoding='utf-8') as fp_memorial:
|
with open(memorial_file, 'w+', encoding='utf-8') as fp_memorial:
|
||||||
fp_memorial.write(memorial_str)
|
fp_memorial.write(memorial_str)
|
||||||
|
@ -1024,7 +1034,7 @@ def set_memorials(base_dir: str, domain: str, memorial_str) -> None:
|
||||||
def is_memorial_account(base_dir: str, nickname: str) -> bool:
|
def is_memorial_account(base_dir: str, nickname: str) -> bool:
|
||||||
"""Returns true if the given nickname is a memorial account
|
"""Returns true if the given nickname is a memorial account
|
||||||
"""
|
"""
|
||||||
memorial_file = base_dir + '/accounts/memorial'
|
memorial_file = data_dir(base_dir) + '/memorial'
|
||||||
if not os.path.isfile(memorial_file):
|
if not os.path.isfile(memorial_file):
|
||||||
return False
|
return False
|
||||||
memorial_list = []
|
memorial_list = []
|
||||||
|
@ -1085,7 +1095,7 @@ def is_suspended(base_dir: str, nickname: str) -> bool:
|
||||||
if nickname == admin_nickname:
|
if nickname == admin_nickname:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
suspended_filename = base_dir + '/accounts/suspended.txt'
|
suspended_filename = data_dir(base_dir) + '/suspended.txt'
|
||||||
if os.path.isfile(suspended_filename):
|
if os.path.isfile(suspended_filename):
|
||||||
with open(suspended_filename, 'r', encoding='utf-8') as susp_file:
|
with open(suspended_filename, 'r', encoding='utf-8') as susp_file:
|
||||||
lines = susp_file.readlines()
|
lines = susp_file.readlines()
|
||||||
|
@ -1125,7 +1135,8 @@ def get_followers_of_person(base_dir: str,
|
||||||
handle_dir = acct_handle_dir(base_dir, handle)
|
handle_dir = acct_handle_dir(base_dir, handle)
|
||||||
if not os.path.isdir(handle_dir):
|
if not os.path.isdir(handle_dir):
|
||||||
return followers
|
return followers
|
||||||
for subdir, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for subdir, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
filename = os.path.join(subdir, account) + '/' + follow_file
|
filename = os.path.join(subdir, account) + '/' + follow_file
|
||||||
if account == handle or \
|
if account == handle or \
|
||||||
|
@ -1942,8 +1953,9 @@ def follow_person(base_dir: str, nickname: str, domain: str,
|
||||||
print('EX: follow_person unable to write ' +
|
print('EX: follow_person unable to write ' +
|
||||||
unfollowed_filename)
|
unfollowed_filename)
|
||||||
|
|
||||||
if not os.path.isdir(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
os.mkdir(base_dir + '/accounts')
|
if not os.path.isdir(dir_str):
|
||||||
|
os.mkdir(dir_str)
|
||||||
handle_to_follow = follow_nickname + '@' + follow_domain
|
handle_to_follow = follow_nickname + '@' + follow_domain
|
||||||
if group_account:
|
if group_account:
|
||||||
handle_to_follow = '!' + handle_to_follow
|
handle_to_follow = '!' + handle_to_follow
|
||||||
|
@ -2019,7 +2031,7 @@ def locate_news_votes(base_dir: str, domain: str,
|
||||||
else:
|
else:
|
||||||
post_url = post_url + '.json.votes'
|
post_url = post_url + '.json.votes'
|
||||||
|
|
||||||
account_dir = base_dir + '/accounts/news@' + domain + '/'
|
account_dir = data_dir(base_dir) + '/news@' + domain + '/'
|
||||||
post_filename = account_dir + 'outbox/' + post_url
|
post_filename = account_dir + 'outbox/' + post_url
|
||||||
if os.path.isfile(post_filename):
|
if os.path.isfile(post_filename):
|
||||||
return post_filename
|
return post_filename
|
||||||
|
@ -2043,7 +2055,7 @@ def locate_news_arrival(base_dir: str, domain: str,
|
||||||
else:
|
else:
|
||||||
post_url = post_url + '.json.arrived'
|
post_url = post_url + '.json.arrived'
|
||||||
|
|
||||||
account_dir = base_dir + '/accounts/news@' + domain + '/'
|
account_dir = data_dir(base_dir) + '/news@' + domain + '/'
|
||||||
post_filename = account_dir + 'outbox/' + post_url
|
post_filename = account_dir + 'outbox/' + post_url
|
||||||
if os.path.isfile(post_filename):
|
if os.path.isfile(post_filename):
|
||||||
with open(post_filename, 'r', encoding='utf-8') as arrival_file:
|
with open(post_filename, 'r', encoding='utf-8') as arrival_file:
|
||||||
|
@ -2063,13 +2075,14 @@ def clear_from_post_caches(base_dir: str, recent_posts_cache: {},
|
||||||
to news will appear
|
to news will appear
|
||||||
"""
|
"""
|
||||||
filename = '/postcache/' + post_id + '.html'
|
filename = '/postcache/' + post_id + '.html'
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if '@' not in acct:
|
if '@' not in acct:
|
||||||
continue
|
continue
|
||||||
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
||||||
continue
|
continue
|
||||||
cache_dir = os.path.join(base_dir + '/accounts', acct)
|
cache_dir = os.path.join(dir_str, acct)
|
||||||
post_filename = cache_dir + filename
|
post_filename = cache_dir + filename
|
||||||
if os.path.isfile(post_filename):
|
if os.path.isfile(post_filename):
|
||||||
try:
|
try:
|
||||||
|
@ -2114,7 +2127,7 @@ def locate_post(base_dir: str, nickname: str, domain: str,
|
||||||
return post_filename
|
return post_filename
|
||||||
|
|
||||||
# check news posts
|
# check news posts
|
||||||
account_dir = base_dir + '/accounts/news' + '@' + domain + '/'
|
account_dir = data_dir(base_dir) + '/news' + '@' + domain + '/'
|
||||||
post_filename = account_dir + 'outbox/' + post_url
|
post_filename = account_dir + 'outbox/' + post_url
|
||||||
if os.path.isfile(post_filename):
|
if os.path.isfile(post_filename):
|
||||||
return post_filename
|
return post_filename
|
||||||
|
@ -2265,7 +2278,7 @@ def remove_moderation_post_from_index(base_dir: str, post_url: str,
|
||||||
debug: bool) -> None:
|
debug: bool) -> None:
|
||||||
"""Removes a url from the moderation index
|
"""Removes a url from the moderation index
|
||||||
"""
|
"""
|
||||||
moderation_index_file = base_dir + '/accounts/moderation.txt'
|
moderation_index_file = data_dir(base_dir) + '/moderation.txt'
|
||||||
if not os.path.isfile(moderation_index_file):
|
if not os.path.isfile(moderation_index_file):
|
||||||
return
|
return
|
||||||
post_id = remove_id_ending(post_url)
|
post_id = remove_id_ending(post_url)
|
||||||
|
@ -2843,7 +2856,8 @@ def no_of_accounts(base_dir: str) -> bool:
|
||||||
"""Returns the number of accounts on the system
|
"""Returns the number of accounts on the system
|
||||||
"""
|
"""
|
||||||
account_ctr = 0
|
account_ctr = 0
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if is_account_dir(account):
|
if is_account_dir(account):
|
||||||
account_ctr += 1
|
account_ctr += 1
|
||||||
|
@ -2857,12 +2871,13 @@ def no_of_active_accounts_monthly(base_dir: str, months: int) -> bool:
|
||||||
account_ctr = 0
|
account_ctr = 0
|
||||||
curr_time = int(time.time())
|
curr_time = int(time.time())
|
||||||
month_seconds = int(60*60*24*30*months)
|
month_seconds = int(60*60*24*30*months)
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if not is_account_dir(account):
|
if not is_account_dir(account):
|
||||||
continue
|
continue
|
||||||
last_used_filename = \
|
last_used_filename = \
|
||||||
base_dir + '/accounts/' + account + '/.lastUsed'
|
dir_str + '/' + account + '/.lastUsed'
|
||||||
if not os.path.isfile(last_used_filename):
|
if not os.path.isfile(last_used_filename):
|
||||||
continue
|
continue
|
||||||
with open(last_used_filename, 'r',
|
with open(last_used_filename, 'r',
|
||||||
|
@ -4358,13 +4373,14 @@ def load_account_timezones(base_dir: str) -> {}:
|
||||||
"""Returns a dictionary containing the preferred timezone for each account
|
"""Returns a dictionary containing the preferred timezone for each account
|
||||||
"""
|
"""
|
||||||
account_timezone = {}
|
account_timezone = {}
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if '@' not in acct:
|
if '@' not in acct:
|
||||||
continue
|
continue
|
||||||
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
||||||
continue
|
continue
|
||||||
acct_directory = os.path.join(base_dir + '/accounts', acct)
|
acct_directory = os.path.join(dir_str, acct)
|
||||||
tz_filename = acct_directory + '/timezone.txt'
|
tz_filename = acct_directory + '/timezone.txt'
|
||||||
if not os.path.isfile(tz_filename):
|
if not os.path.isfile(tz_filename):
|
||||||
continue
|
continue
|
||||||
|
@ -4382,14 +4398,14 @@ def load_bold_reading(base_dir: str) -> {}:
|
||||||
"""Returns a dictionary containing the bold reading status for each account
|
"""Returns a dictionary containing the bold reading status for each account
|
||||||
"""
|
"""
|
||||||
bold_reading = {}
|
bold_reading = {}
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if '@' not in acct:
|
if '@' not in acct:
|
||||||
continue
|
continue
|
||||||
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
||||||
continue
|
continue
|
||||||
bold_reading_filename = \
|
bold_reading_filename = dir_str + '/' + acct + '/.boldReading'
|
||||||
base_dir + '/accounts/' + acct + '/.boldReading'
|
|
||||||
if os.path.isfile(bold_reading_filename):
|
if os.path.isfile(bold_reading_filename):
|
||||||
nickname = acct.split('@')[0]
|
nickname = acct.split('@')[0]
|
||||||
bold_reading[nickname] = True
|
bold_reading[nickname] = True
|
||||||
|
@ -4401,14 +4417,14 @@ def load_hide_follows(base_dir: str) -> {}:
|
||||||
"""Returns a dictionary containing the hide follows status for each account
|
"""Returns a dictionary containing the hide follows status for each account
|
||||||
"""
|
"""
|
||||||
hide_follows = {}
|
hide_follows = {}
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if '@' not in acct:
|
if '@' not in acct:
|
||||||
continue
|
continue
|
||||||
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
if acct.startswith('inbox@') or acct.startswith('Actor@'):
|
||||||
continue
|
continue
|
||||||
hide_follows_filename = \
|
hide_follows_filename = dir_str + '/' + acct + '/.hideFollows'
|
||||||
base_dir + '/accounts/' + acct + '/.hideFollows'
|
|
||||||
if os.path.isfile(hide_follows_filename):
|
if os.path.isfile(hide_follows_filename):
|
||||||
nickname = acct.split('@')[0]
|
nickname = acct.split('@')[0]
|
||||||
hide_follows[nickname] = True
|
hide_follows[nickname] = True
|
||||||
|
@ -4682,7 +4698,8 @@ def load_min_images_for_accounts(base_dir: str) -> []:
|
||||||
be minimized by default
|
be minimized by default
|
||||||
"""
|
"""
|
||||||
min_images_for_accounts = []
|
min_images_for_accounts = []
|
||||||
for subdir, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for subdir, dirs, _ in os.walk(dir_str):
|
||||||
for account in dirs:
|
for account in dirs:
|
||||||
if not is_account_dir(account):
|
if not is_account_dir(account):
|
||||||
continue
|
continue
|
||||||
|
@ -4726,7 +4743,8 @@ def load_reverse_timeline(base_dir: str) -> []:
|
||||||
see reversed timelines
|
see reversed timelines
|
||||||
"""
|
"""
|
||||||
reverse_sequence = []
|
reverse_sequence = []
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
|
@ -4745,7 +4763,8 @@ def save_reverse_timeline(base_dir: str, reverse_sequence: []) -> []:
|
||||||
"""Saves flags for each user indicating whether they prefer to
|
"""Saves flags for each user indicating whether they prefer to
|
||||||
see reversed timelines
|
see reversed timelines
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
|
@ -5043,7 +5062,7 @@ def get_status_count(base_dir: str) -> int:
|
||||||
"""Get the total number of posts
|
"""Get the total number of posts
|
||||||
"""
|
"""
|
||||||
status_ctr = 0
|
status_ctr = 0
|
||||||
accounts_dir = base_dir + '/accounts'
|
accounts_dir = data_dir(base_dir)
|
||||||
for _, dirs, _ in os.walk(accounts_dir):
|
for _, dirs, _ in os.walk(accounts_dir):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from webapp_utils import html_header_with_website_markup
|
from webapp_utils import html_header_with_website_markup
|
||||||
from webapp_utils import html_footer
|
from webapp_utils import html_footer
|
||||||
|
@ -21,19 +22,19 @@ def html_about(base_dir: str, http_prefix: str,
|
||||||
"""Show the about screen
|
"""Show the about screen
|
||||||
"""
|
"""
|
||||||
admin_nickname = get_config_param(base_dir, 'admin')
|
admin_nickname = get_config_param(base_dir, 'admin')
|
||||||
if not os.path.isfile(base_dir + '/accounts/about.md'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if not os.path.isfile(dir_str + '/about.md'):
|
||||||
copyfile(base_dir + '/default_about.md',
|
copyfile(base_dir + '/default_about.md',
|
||||||
base_dir + '/accounts/about.md')
|
dir_str + '/about.md')
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/login-background-custom.jpg'):
|
if os.path.isfile(dir_str + '/login-background-custom.jpg'):
|
||||||
if not os.path.isfile(base_dir + '/accounts/login-background.jpg'):
|
if not os.path.isfile(dir_str + '/login-background.jpg'):
|
||||||
copyfile(base_dir + '/accounts/login-background-custom.jpg',
|
copyfile(dir_str + '/login-background-custom.jpg',
|
||||||
base_dir + '/accounts/login-background.jpg')
|
dir_str + '/login-background.jpg')
|
||||||
|
|
||||||
about_text = 'Information about this instance goes here.'
|
about_text = 'Information about this instance goes here.'
|
||||||
if os.path.isfile(base_dir + '/accounts/about.md'):
|
if os.path.isfile(dir_str + '/about.md'):
|
||||||
with open(base_dir + '/accounts/about.md', 'r',
|
with open(dir_str + '/about.md', 'r', encoding='utf-8') as fp_about:
|
||||||
encoding='utf-8') as fp_about:
|
|
||||||
about_text = markdown_to_html(fp_about.read())
|
about_text = markdown_to_html(fp_about.read())
|
||||||
|
|
||||||
about_form = ''
|
about_form = ''
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Accessibility"
|
__module_group__ = "Accessibility"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_account_dir
|
from utils import is_account_dir
|
||||||
from utils import load_json
|
from utils import load_json
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
|
@ -21,11 +22,12 @@ def load_access_keys_for_accounts(base_dir: str, key_shortcuts: {},
|
||||||
access_keys_template: {}) -> None:
|
access_keys_template: {}) -> None:
|
||||||
"""Loads key shortcuts for each account
|
"""Loads key shortcuts for each account
|
||||||
"""
|
"""
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
access_keys_filename = account_dir + '/access_keys.json'
|
access_keys_filename = account_dir + '/access_keys.json'
|
||||||
if not os.path.isfile(access_keys_filename):
|
if not os.path.isfile(access_keys_filename):
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Web Interface Columns"
|
__module_group__ = "Web Interface Columns"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
from utils import is_editor
|
from utils import is_editor
|
||||||
|
@ -28,7 +29,7 @@ from shares import share_category_icon
|
||||||
def _links_exist(base_dir: str) -> bool:
|
def _links_exist(base_dir: str) -> bool:
|
||||||
"""Returns true if links have been created
|
"""Returns true if links have been created
|
||||||
"""
|
"""
|
||||||
links_filename = base_dir + '/accounts/links.txt'
|
links_filename = data_dir(base_dir) + '/links.txt'
|
||||||
return os.path.isfile(links_filename)
|
return os.path.isfile(links_filename)
|
||||||
|
|
||||||
|
|
||||||
|
@ -214,7 +215,7 @@ def get_left_column_content(base_dir: str, nickname: str, domain_full: str,
|
||||||
# flag used not to show the first separator
|
# flag used not to show the first separator
|
||||||
first_separator_added = False
|
first_separator_added = False
|
||||||
|
|
||||||
links_filename = base_dir + '/accounts/links.txt'
|
links_filename = data_dir(base_dir) + '/links.txt'
|
||||||
links_file_contains_entries = False
|
links_file_contains_entries = False
|
||||||
links_list = None
|
links_list = None
|
||||||
if os.path.isfile(links_filename):
|
if os.path.isfile(links_filename):
|
||||||
|
@ -490,7 +491,7 @@ def html_edit_links(translate: {}, base_dir: str, path: str,
|
||||||
edit_links_form += \
|
edit_links_form += \
|
||||||
' </div>\n'
|
' </div>\n'
|
||||||
|
|
||||||
links_filename = base_dir + '/accounts/links.txt'
|
links_filename = data_dir(base_dir) + '/links.txt'
|
||||||
links_str = ''
|
links_str = ''
|
||||||
if os.path.isfile(links_filename):
|
if os.path.isfile(links_filename):
|
||||||
with open(links_filename, 'r', encoding='utf-8') as fp_links:
|
with open(links_filename, 'r', encoding='utf-8') as fp_links:
|
||||||
|
@ -515,7 +516,7 @@ def html_edit_links(translate: {}, base_dir: str, path: str,
|
||||||
admin_nickname = get_config_param(base_dir, 'admin')
|
admin_nickname = get_config_param(base_dir, 'admin')
|
||||||
if admin_nickname:
|
if admin_nickname:
|
||||||
if nickname == admin_nickname:
|
if nickname == admin_nickname:
|
||||||
about_filename = base_dir + '/accounts/about.md'
|
about_filename = data_dir(base_dir) + '/about.md'
|
||||||
about_str = ''
|
about_str = ''
|
||||||
if os.path.isfile(about_filename):
|
if os.path.isfile(about_filename):
|
||||||
with open(about_filename, 'r', encoding='utf-8') as fp_about:
|
with open(about_filename, 'r', encoding='utf-8') as fp_about:
|
||||||
|
@ -534,7 +535,7 @@ def html_edit_links(translate: {}, base_dir: str, path: str,
|
||||||
edit_links_form += \
|
edit_links_form += \
|
||||||
'</div>'
|
'</div>'
|
||||||
|
|
||||||
tos_filename = base_dir + '/accounts/tos.md'
|
tos_filename = data_dir(base_dir) + '/tos.md'
|
||||||
tos_str = ''
|
tos_str = ''
|
||||||
if os.path.isfile(tos_filename):
|
if os.path.isfile(tos_filename):
|
||||||
with open(tos_filename, 'r', encoding='utf-8') as fp_tos:
|
with open(tos_filename, 'r', encoding='utf-8') as fp_tos:
|
||||||
|
@ -553,7 +554,7 @@ def html_edit_links(translate: {}, base_dir: str, path: str,
|
||||||
edit_links_form += \
|
edit_links_form += \
|
||||||
'</div>'
|
'</div>'
|
||||||
|
|
||||||
specification_filename = base_dir + '/accounts/activitypub.md'
|
specification_filename = data_dir(base_dir) + '/activitypub.md'
|
||||||
specification_str = ''
|
specification_str = ''
|
||||||
if os.path.isfile(specification_filename):
|
if os.path.isfile(specification_filename):
|
||||||
with open(specification_filename, 'r',
|
with open(specification_filename, 'r',
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Web Interface Columns"
|
||||||
import os
|
import os
|
||||||
from content import remove_long_words
|
from content import remove_long_words
|
||||||
from content import limit_repeated_words
|
from content import limit_repeated_words
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_image_extensions
|
from utils import get_image_extensions
|
||||||
from utils import get_fav_filename_from_url
|
from utils import get_fav_filename_from_url
|
||||||
from utils import get_base_content_from_post
|
from utils import get_base_content_from_post
|
||||||
|
@ -131,7 +132,8 @@ def get_right_column_content(base_dir: str, nickname: str, domain_full: str,
|
||||||
|
|
||||||
# show the edit icon
|
# show the edit icon
|
||||||
if editor:
|
if editor:
|
||||||
if os.path.isfile(base_dir + '/accounts/newswiremoderation.txt'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if os.path.isfile(dir_str + '/newswiremoderation.txt'):
|
||||||
# show the edit icon highlighted
|
# show the edit icon highlighted
|
||||||
html_str += \
|
html_str += \
|
||||||
' <a href="' + \
|
' <a href="' + \
|
||||||
|
@ -606,7 +608,7 @@ def html_edit_newswire(translate: {}, base_dir: str, path: str,
|
||||||
edit_newswire_form += \
|
edit_newswire_form += \
|
||||||
' </div>\n'
|
' </div>\n'
|
||||||
|
|
||||||
newswire_filename = base_dir + '/accounts/newswire.txt'
|
newswire_filename = data_dir(base_dir) + '/newswire.txt'
|
||||||
newswire_str = ''
|
newswire_str = ''
|
||||||
if os.path.isfile(newswire_filename):
|
if os.path.isfile(newswire_filename):
|
||||||
with open(newswire_filename, 'r', encoding='utf-8') as fp_news:
|
with open(newswire_filename, 'r', encoding='utf-8') as fp_news:
|
||||||
|
@ -629,7 +631,7 @@ def html_edit_newswire(translate: {}, base_dir: str, path: str,
|
||||||
|
|
||||||
filter_str = ''
|
filter_str = ''
|
||||||
filter_filename = \
|
filter_filename = \
|
||||||
base_dir + '/accounts/news@' + domain + '/filters.txt'
|
data_dir(base_dir) + '/news@' + domain + '/filters.txt'
|
||||||
if os.path.isfile(filter_filename):
|
if os.path.isfile(filter_filename):
|
||||||
with open(filter_filename, 'r', encoding='utf-8') as filterfile:
|
with open(filter_filename, 'r', encoding='utf-8') as filterfile:
|
||||||
filter_str = filterfile.read()
|
filter_str = filterfile.read()
|
||||||
|
@ -660,8 +662,7 @@ def html_edit_newswire(translate: {}, base_dir: str, path: str,
|
||||||
'spellcheck="true">' + dogwhistle_str + '</textarea>\n'
|
'spellcheck="true">' + dogwhistle_str + '</textarea>\n'
|
||||||
|
|
||||||
hashtag_rules_str = ''
|
hashtag_rules_str = ''
|
||||||
hashtag_rules_filename = \
|
hashtag_rules_filename = data_dir(base_dir) + '/hashtagrules.txt'
|
||||||
base_dir + '/accounts/hashtagrules.txt'
|
|
||||||
if os.path.isfile(hashtag_rules_filename):
|
if os.path.isfile(hashtag_rules_filename):
|
||||||
with open(hashtag_rules_filename, 'r', encoding='utf-8') as rulesfile:
|
with open(hashtag_rules_filename, 'r', encoding='utf-8') as rulesfile:
|
||||||
hashtag_rules_str = rulesfile.read()
|
hashtag_rules_str = rulesfile.read()
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_full_domain
|
from utils import get_full_domain
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
from utils import get_domain_from_actor
|
from utils import get_domain_from_actor
|
||||||
|
@ -212,10 +213,11 @@ def html_confirm_follow(translate: {}, base_dir: str,
|
||||||
"""
|
"""
|
||||||
follow_domain, _ = get_domain_from_actor(follow_actor)
|
follow_domain, _ = get_domain_from_actor(follow_actor)
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/follow-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/follow-background.jpg'):
|
if os.path.isfile(dir_str + '/follow-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/follow-background-custom.jpg',
|
if not os.path.isfile(dir_str + '/follow-background.jpg'):
|
||||||
base_dir + '/accounts/follow-background.jpg')
|
copyfile(dir_str + '/follow-background-custom.jpg',
|
||||||
|
dir_str + '/follow-background.jpg')
|
||||||
|
|
||||||
css_filename = base_dir + '/epicyon-follow.css'
|
css_filename = base_dir + '/epicyon-follow.css'
|
||||||
if os.path.isfile(base_dir + '/follow.css'):
|
if os.path.isfile(base_dir + '/follow.css'):
|
||||||
|
@ -270,10 +272,11 @@ def html_confirm_unfollow(translate: {}, base_dir: str,
|
||||||
"""
|
"""
|
||||||
follow_domain, _ = get_domain_from_actor(follow_actor)
|
follow_domain, _ = get_domain_from_actor(follow_actor)
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/follow-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/follow-background.jpg'):
|
if os.path.isfile(dir_str + '/follow-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/follow-background-custom.jpg',
|
if not os.path.isfile(dir_str + '/follow-background.jpg'):
|
||||||
base_dir + '/accounts/follow-background.jpg')
|
copyfile(dir_str + '/follow-background-custom.jpg',
|
||||||
|
dir_str + '/follow-background.jpg')
|
||||||
|
|
||||||
css_filename = base_dir + '/epicyon-follow.css'
|
css_filename = base_dir + '/epicyon-follow.css'
|
||||||
if os.path.isfile(base_dir + '/follow.css'):
|
if os.path.isfile(base_dir + '/follow.css'):
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Web Interface"
|
__module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_premium_account
|
from utils import is_premium_account
|
||||||
from utils import dangerous_markup
|
from utils import dangerous_markup
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
|
@ -510,8 +511,9 @@ def html_new_post(edit_post_params: {},
|
||||||
'<h1>' + translate['Write your report below.'] + '</h1>\n'
|
'<h1>' + translate['Write your report below.'] + '</h1>\n'
|
||||||
|
|
||||||
# custom report header with any additional instructions
|
# custom report header with any additional instructions
|
||||||
if os.path.isfile(base_dir + '/accounts/report.txt'):
|
dir_str = data_dir(base_dir)
|
||||||
with open(base_dir + '/accounts/report.txt', 'r',
|
if os.path.isfile(dir_str + '/report.txt'):
|
||||||
|
with open(dir_str + '/report.txt', 'r',
|
||||||
encoding='utf-8') as file:
|
encoding='utf-8') as file:
|
||||||
custom_report_text = file.read()
|
custom_report_text = file.read()
|
||||||
if '</p>' not in custom_report_text:
|
if '</p>' not in custom_report_text:
|
||||||
|
@ -548,11 +550,10 @@ def html_new_post(edit_post_params: {},
|
||||||
translate['Enter the choices for your question below.'] + \
|
translate['Enter the choices for your question below.'] + \
|
||||||
'</h1>\n'
|
'</h1>\n'
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/newpost.txt'):
|
dir_str = data_dir(base_dir)
|
||||||
with open(base_dir + '/accounts/newpost.txt', 'r',
|
if os.path.isfile(dir_str + '/newpost.txt'):
|
||||||
encoding='utf-8') as file:
|
with open(dir_str + '/newpost.txt', 'r', encoding='utf-8') as file:
|
||||||
new_post_text = \
|
new_post_text = '<p>' + file.read() + '</p>\n'
|
||||||
'<p>' + file.read() + '</p>\n'
|
|
||||||
|
|
||||||
css_filename = base_dir + '/epicyon-profile.css'
|
css_filename = base_dir + '/epicyon-profile.css'
|
||||||
if os.path.isfile(base_dir + '/epicyon.css'):
|
if os.path.isfile(base_dir + '/epicyon.css'):
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_nickname_from_actor
|
from utils import get_nickname_from_actor
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import escape_text
|
from utils import escape_text
|
||||||
|
@ -82,7 +83,7 @@ def html_hash_tag_swarm(base_dir: str, actor: str, translate: {}) -> str:
|
||||||
# Load the blocked hashtags into memory.
|
# Load the blocked hashtags into memory.
|
||||||
# This avoids needing to repeatedly load the blocked file for each hashtag
|
# This avoids needing to repeatedly load the blocked file for each hashtag
|
||||||
blocked_str = ''
|
blocked_str = ''
|
||||||
global_blocking_filename = base_dir + '/accounts/blocking.txt'
|
global_blocking_filename = data_dir(base_dir) + '/blocking.txt'
|
||||||
if os.path.isfile(global_blocking_filename):
|
if os.path.isfile(global_blocking_filename):
|
||||||
with open(global_blocking_filename, 'r',
|
with open(global_blocking_filename, 'r',
|
||||||
encoding='utf-8') as fp_block:
|
encoding='utf-8') as fp_block:
|
||||||
|
|
|
@ -11,6 +11,7 @@ import os
|
||||||
import time
|
import time
|
||||||
import filecmp
|
import filecmp
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_image_extensions
|
from utils import get_image_extensions
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import no_of_accounts
|
from utils import no_of_accounts
|
||||||
|
@ -85,7 +86,7 @@ def html_login(translate: {},
|
||||||
'/' + login_image
|
'/' + login_image
|
||||||
if os.path.isfile(theme_image):
|
if os.path.isfile(theme_image):
|
||||||
login_image_filename = \
|
login_image_filename = \
|
||||||
base_dir + '/accounts/' + login_image
|
data_dir(base_dir) + '/' + login_image
|
||||||
if os.path.isfile(login_image_filename):
|
if os.path.isfile(login_image_filename):
|
||||||
if not filecmp.cmp(theme_image,
|
if not filecmp.cmp(theme_image,
|
||||||
login_image_filename):
|
login_image_filename):
|
||||||
|
@ -98,7 +99,7 @@ def html_login(translate: {},
|
||||||
if not login_image_filename:
|
if not login_image_filename:
|
||||||
for ext in extensions:
|
for ext in extensions:
|
||||||
login_image = 'login.' + ext
|
login_image = 'login.' + ext
|
||||||
image_filename = base_dir + '/accounts/' + login_image
|
image_filename = data_dir(base_dir) + '/' + login_image
|
||||||
if os.path.isfile(image_filename):
|
if os.path.isfile(image_filename):
|
||||||
login_image_filename = image_filename
|
login_image_filename = image_filename
|
||||||
break
|
break
|
||||||
|
@ -106,7 +107,7 @@ def html_login(translate: {},
|
||||||
# no login image found, so use the default
|
# no login image found, so use the default
|
||||||
if not login_image_filename:
|
if not login_image_filename:
|
||||||
login_image = 'login.png'
|
login_image = 'login.png'
|
||||||
login_image_filename = base_dir + '/accounts/' + login_image
|
login_image_filename = data_dir(base_dir) + '/' + login_image
|
||||||
source_image = base_dir + '/img/login.png'
|
source_image = base_dir + '/img/login.png'
|
||||||
copyfile(source_image, login_image_filename)
|
copyfile(source_image, login_image_filename)
|
||||||
|
|
||||||
|
@ -129,10 +130,10 @@ def html_login(translate: {},
|
||||||
'<p class="login-text">' + \
|
'<p class="login-text">' + \
|
||||||
translate['You will become the admin of this site.'] + \
|
translate['You will become the admin of this site.'] + \
|
||||||
'</p>'
|
'</p>'
|
||||||
if os.path.isfile(base_dir + '/accounts/login.txt'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if os.path.isfile(dir_str + '/login.txt'):
|
||||||
# custom login message
|
# custom login message
|
||||||
with open(base_dir + '/accounts/login.txt', 'r',
|
with open(dir_str + '/login.txt', 'r', encoding='utf-8') as file:
|
||||||
encoding='utf-8') as file:
|
|
||||||
login_text = '<p class="login-text">' + file.read() + '</p>'
|
login_text = '<p class="login-text">' + file.read() + '</p>'
|
||||||
|
|
||||||
css_filename = base_dir + '/epicyon-login.css'
|
css_filename = base_dir + '/epicyon-login.css'
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from webapp_utils import html_header_with_website_markup
|
from webapp_utils import html_header_with_website_markup
|
||||||
from webapp_utils import html_footer
|
from webapp_utils import html_footer
|
||||||
|
@ -23,13 +24,14 @@ def html_manual(base_dir: str, http_prefix: str,
|
||||||
"""
|
"""
|
||||||
manual_filename = base_dir + '/manual/manual.md'
|
manual_filename = base_dir + '/manual/manual.md'
|
||||||
admin_nickname = get_config_param(base_dir, 'admin')
|
admin_nickname = get_config_param(base_dir, 'admin')
|
||||||
if os.path.isfile(base_dir + '/accounts/manual.md'):
|
dir_str = data_dir(base_dir)
|
||||||
manual_filename = base_dir + '/accounts/manual.md'
|
if os.path.isfile(dir_str + '/manual.md'):
|
||||||
|
manual_filename = dir_str + '/manual.md'
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/login-background-custom.jpg'):
|
if os.path.isfile(dir_str + '/login-background-custom.jpg'):
|
||||||
if not os.path.isfile(base_dir + '/accounts/login-background.jpg'):
|
if not os.path.isfile(dir_str + '/login-background.jpg'):
|
||||||
copyfile(base_dir + '/accounts/login-background-custom.jpg',
|
copyfile(dir_str + '/login-background-custom.jpg',
|
||||||
base_dir + '/accounts/login-background.jpg')
|
dir_str + '/login-background.jpg')
|
||||||
|
|
||||||
manual_text = 'User Manual.'
|
manual_text = 'User Manual.'
|
||||||
if os.path.isfile(manual_filename):
|
if os.path.isfile(manual_filename):
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Timeline"
|
__module_group__ = "Timeline"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_ends_with
|
from utils import string_ends_with
|
||||||
from utils import valid_url_prefix
|
from utils import valid_url_prefix
|
||||||
|
|
||||||
|
@ -16,7 +17,7 @@ def load_peertube_instances(base_dir: str, peertube_instances: []) -> None:
|
||||||
"""Loads peertube instances from file into the given list
|
"""Loads peertube instances from file into the given list
|
||||||
"""
|
"""
|
||||||
peertube_list = None
|
peertube_list = None
|
||||||
peertube_instances_filename = base_dir + '/accounts/peertube.txt'
|
peertube_instances_filename = data_dir(base_dir) + '/peertube.txt'
|
||||||
if os.path.isfile(peertube_instances_filename):
|
if os.path.isfile(peertube_instances_filename):
|
||||||
with open(peertube_instances_filename, 'r',
|
with open(peertube_instances_filename, 'r',
|
||||||
encoding='utf-8') as fp_inst:
|
encoding='utf-8') as fp_inst:
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Moderation"
|
__module_group__ = "Moderation"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_url_from_post
|
from utils import get_url_from_post
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
from utils import is_artist
|
from utils import is_artist
|
||||||
|
@ -361,7 +362,8 @@ def html_moderation_info(translate: {}, base_dir: str,
|
||||||
info_shown = False
|
info_shown = False
|
||||||
|
|
||||||
accounts = []
|
accounts = []
|
||||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, _ in os.walk(dir_str):
|
||||||
for acct in dirs:
|
for acct in dirs:
|
||||||
if not is_account_dir(acct):
|
if not is_account_dir(acct):
|
||||||
continue
|
continue
|
||||||
|
@ -382,9 +384,10 @@ def html_moderation_info(translate: {}, base_dir: str,
|
||||||
info_form += '<tr>\n'
|
info_form += '<tr>\n'
|
||||||
|
|
||||||
col = 0
|
col = 0
|
||||||
|
dir_str = data_dir(base_dir)
|
||||||
for acct in accounts:
|
for acct in accounts:
|
||||||
acct_nickname = acct.split('@')[0]
|
acct_nickname = acct.split('@')[0]
|
||||||
account_dir = os.path.join(base_dir + '/accounts', acct)
|
account_dir = os.path.join(dir_str, acct)
|
||||||
actor_json = load_json(account_dir + '.json')
|
actor_json = load_json(account_dir + '.json')
|
||||||
if not actor_json:
|
if not actor_json:
|
||||||
continue
|
continue
|
||||||
|
@ -420,7 +423,7 @@ def html_moderation_info(translate: {}, base_dir: str,
|
||||||
if len(accounts) > 10:
|
if len(accounts) > 10:
|
||||||
info_form += '</details>\n'
|
info_form += '</details>\n'
|
||||||
|
|
||||||
suspended_filename = base_dir + '/accounts/suspended.txt'
|
suspended_filename = dir_str + '/suspended.txt'
|
||||||
if os.path.isfile(suspended_filename):
|
if os.path.isfile(suspended_filename):
|
||||||
with open(suspended_filename, 'r', encoding='utf-8') as fp_sus:
|
with open(suspended_filename, 'r', encoding='utf-8') as fp_sus:
|
||||||
suspended_str = fp_sus.read()
|
suspended_str = fp_sus.read()
|
||||||
|
@ -436,10 +439,9 @@ def html_moderation_info(translate: {}, base_dir: str,
|
||||||
info_form += '</div>\n'
|
info_form += '</div>\n'
|
||||||
info_shown = True
|
info_shown = True
|
||||||
|
|
||||||
blocking_filename = base_dir + '/accounts/blocking.txt'
|
blocking_filename = dir_str + '/blocking.txt'
|
||||||
if os.path.isfile(blocking_filename):
|
if os.path.isfile(blocking_filename):
|
||||||
blocking_reasons_filename = \
|
blocking_reasons_filename = dir_str + '/blocking_reasons.txt'
|
||||||
base_dir + '/accounts/blocking_reasons.txt'
|
|
||||||
blocking_reasons_exist = False
|
blocking_reasons_exist = False
|
||||||
if os.path.isfile(blocking_reasons_filename):
|
if os.path.isfile(blocking_reasons_filename):
|
||||||
blocking_reasons_exist = True
|
blocking_reasons_exist = True
|
||||||
|
@ -475,7 +477,7 @@ def html_moderation_info(translate: {}, base_dir: str,
|
||||||
info_form += '</div>\n'
|
info_form += '</div>\n'
|
||||||
info_shown = True
|
info_shown = True
|
||||||
|
|
||||||
filters_filename = base_dir + '/accounts/filters.txt'
|
filters_filename = dir_str + '/filters.txt'
|
||||||
if os.path.isfile(filters_filename):
|
if os.path.isfile(filters_filename):
|
||||||
with open(filters_filename, 'r', encoding='utf-8') as fp_filt:
|
with open(filters_filename, 'r', encoding='utf-8') as fp_filt:
|
||||||
filtered_str = fp_filt.read()
|
filtered_str = fp_filt.read()
|
||||||
|
|
|
@ -12,6 +12,7 @@ from shutil import copyfile
|
||||||
from petnames import get_pet_name
|
from petnames import get_pet_name
|
||||||
from person import is_person_snoozed
|
from person import is_person_snoozed
|
||||||
from posts import is_moderator
|
from posts import is_moderator
|
||||||
|
from utils import data_dir
|
||||||
from utils import quote_toots_allowed
|
from utils import quote_toots_allowed
|
||||||
from utils import get_full_domain
|
from utils import get_full_domain
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
|
@ -177,10 +178,11 @@ def html_person_options(default_timeline: str,
|
||||||
return None
|
return None
|
||||||
options_domain_full = get_full_domain(options_domain, options_port)
|
options_domain_full = get_full_domain(options_domain, options_port)
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/options-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/options-background.jpg'):
|
if os.path.isfile(dir_str + '/options-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/options-background.jpg',
|
if not os.path.isfile(dir_str + '/options-background.jpg'):
|
||||||
base_dir + '/accounts/options-background.jpg')
|
copyfile(dir_str + '/options-background.jpg',
|
||||||
|
dir_str + '/options-background.jpg')
|
||||||
|
|
||||||
dormant = False
|
dormant = False
|
||||||
offline = False
|
offline = False
|
||||||
|
@ -558,7 +560,7 @@ def html_person_options(default_timeline: str,
|
||||||
(is_moderator(base_dir, nickname) and
|
(is_moderator(base_dir, nickname) and
|
||||||
not is_moderator(base_dir, options_nickname))):
|
not is_moderator(base_dir, options_nickname))):
|
||||||
newswire_blocked_filename = \
|
newswire_blocked_filename = \
|
||||||
base_dir + '/accounts/' + \
|
dir_str + '/' + \
|
||||||
options_nickname + '@' + options_domain + \
|
options_nickname + '@' + options_domain + \
|
||||||
'/.nonewswire'
|
'/.nonewswire'
|
||||||
checkbox_str = \
|
checkbox_str = \
|
||||||
|
@ -579,7 +581,7 @@ def html_person_options(default_timeline: str,
|
||||||
# the newswire
|
# the newswire
|
||||||
if newswire_posts_permitted:
|
if newswire_posts_permitted:
|
||||||
moderated_filename = \
|
moderated_filename = \
|
||||||
base_dir + '/accounts/' + \
|
dir_str + '/' + \
|
||||||
options_nickname + '@' + \
|
options_nickname + '@' + \
|
||||||
options_domain + '/.newswiremoderated'
|
options_domain + '/.newswiremoderated'
|
||||||
checkbox_str = \
|
checkbox_str = \
|
||||||
|
|
|
@ -12,6 +12,7 @@ import html
|
||||||
import datetime
|
import datetime
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_url_from_post
|
from utils import get_url_from_post
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
|
@ -310,10 +311,11 @@ def html_podcast_episode(translate: {},
|
||||||
if os.path.isfile(base_dir + '/podcast.css'):
|
if os.path.isfile(base_dir + '/podcast.css'):
|
||||||
css_filename = base_dir + '/podcast.css'
|
css_filename = base_dir + '/podcast.css'
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/podcast-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/podcast-background.jpg'):
|
if os.path.isfile(dir_str + '/podcast-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/podcast-background.jpg',
|
if not os.path.isfile(dir_str + '/podcast-background.jpg'):
|
||||||
base_dir + '/accounts/podcast-background.jpg')
|
copyfile(dir_str + '/podcast-background.jpg',
|
||||||
|
dir_str + '/podcast-background.jpg')
|
||||||
|
|
||||||
instance_title = get_config_param(base_dir, 'instanceTitle')
|
instance_title = get_config_param(base_dir, 'instanceTitle')
|
||||||
podcast_str = \
|
podcast_str = \
|
||||||
|
|
|
@ -24,6 +24,7 @@ from posts import post_is_muted
|
||||||
from posts import get_person_box
|
from posts import get_person_box
|
||||||
from posts import download_announce
|
from posts import download_announce
|
||||||
from posts import populate_replies_json
|
from posts import populate_replies_json
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_quote_toot_url
|
from utils import get_quote_toot_url
|
||||||
from utils import get_post_attachments
|
from utils import get_post_attachments
|
||||||
from utils import get_url_from_post
|
from utils import get_url_from_post
|
||||||
|
@ -1905,7 +1906,7 @@ def _substitute_onion_domains(base_dir: str, content: str) -> str:
|
||||||
"twitter.com": twitter_onion
|
"twitter.com": twitter_onion
|
||||||
}
|
}
|
||||||
|
|
||||||
onion_domains_filename = base_dir + '/accounts/onion_domains.txt'
|
onion_domains_filename = data_dir(base_dir) + '/onion_domains.txt'
|
||||||
if os.path.isfile(onion_domains_filename):
|
if os.path.isfile(onion_domains_filename):
|
||||||
onion_domains_list = []
|
onion_domains_list = []
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Web Interface"
|
||||||
import os
|
import os
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
from webfinger import webfinger_handle
|
from webfinger import webfinger_handle
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_premium_account
|
from utils import is_premium_account
|
||||||
from utils import time_days_ago
|
from utils import time_days_ago
|
||||||
from utils import uninvert_text
|
from utils import uninvert_text
|
||||||
|
@ -960,7 +961,7 @@ def html_profile(signing_priv_key_pem: str,
|
||||||
"""
|
"""
|
||||||
show_moved_accounts = False
|
show_moved_accounts = False
|
||||||
if authorized:
|
if authorized:
|
||||||
moved_accounts_filename = base_dir + '/accounts/actors_moved.txt'
|
moved_accounts_filename = data_dir(base_dir) + '/actors_moved.txt'
|
||||||
if os.path.isfile(moved_accounts_filename):
|
if os.path.isfile(moved_accounts_filename):
|
||||||
show_moved_accounts = True
|
show_moved_accounts = True
|
||||||
|
|
||||||
|
@ -1826,7 +1827,8 @@ def _html_profile_shares(actor: str, translate: {},
|
||||||
def _grayscale_enabled(base_dir: str) -> bool:
|
def _grayscale_enabled(base_dir: str) -> bool:
|
||||||
"""Is grayscale UI enabled?
|
"""Is grayscale UI enabled?
|
||||||
"""
|
"""
|
||||||
return os.path.isfile(base_dir + '/accounts/.grayscale')
|
dir_str = data_dir(base_dir)
|
||||||
|
return os.path.isfile(dir_str + '/.grayscale')
|
||||||
|
|
||||||
|
|
||||||
def _html_themes_dropdown(base_dir: str, translate: {}) -> str:
|
def _html_themes_dropdown(base_dir: str, translate: {}) -> str:
|
||||||
|
@ -2051,7 +2053,7 @@ def _html_edit_profile_instance(base_dir: str, translate: {},
|
||||||
|
|
||||||
# site moderators
|
# site moderators
|
||||||
moderators = ''
|
moderators = ''
|
||||||
moderators_file = base_dir + '/accounts/moderators.txt'
|
moderators_file = data_dir(base_dir) + '/moderators.txt'
|
||||||
if os.path.isfile(moderators_file):
|
if os.path.isfile(moderators_file):
|
||||||
with open(moderators_file, 'r', encoding='utf-8') as mod_file:
|
with open(moderators_file, 'r', encoding='utf-8') as mod_file:
|
||||||
moderators = mod_file.read()
|
moderators = mod_file.read()
|
||||||
|
@ -2061,7 +2063,7 @@ def _html_edit_profile_instance(base_dir: str, translate: {},
|
||||||
'moderators', moderators, 200, '', False)
|
'moderators', moderators, 200, '', False)
|
||||||
# site editors
|
# site editors
|
||||||
editors = ''
|
editors = ''
|
||||||
editors_file = base_dir + '/accounts/editors.txt'
|
editors_file = data_dir(base_dir) + '/editors.txt'
|
||||||
if os.path.isfile(editors_file):
|
if os.path.isfile(editors_file):
|
||||||
with open(editors_file, 'r', encoding='utf-8') as edit_file:
|
with open(editors_file, 'r', encoding='utf-8') as edit_file:
|
||||||
editors = edit_file.read()
|
editors = edit_file.read()
|
||||||
|
@ -2072,7 +2074,7 @@ def _html_edit_profile_instance(base_dir: str, translate: {},
|
||||||
|
|
||||||
# counselors
|
# counselors
|
||||||
counselors = ''
|
counselors = ''
|
||||||
counselors_file = base_dir + '/accounts/counselors.txt'
|
counselors_file = data_dir(base_dir) + '/counselors.txt'
|
||||||
if os.path.isfile(counselors_file):
|
if os.path.isfile(counselors_file):
|
||||||
with open(counselors_file, 'r', encoding='utf-8') as co_file:
|
with open(counselors_file, 'r', encoding='utf-8') as co_file:
|
||||||
counselors = co_file.read()
|
counselors = co_file.read()
|
||||||
|
@ -2082,7 +2084,7 @@ def _html_edit_profile_instance(base_dir: str, translate: {},
|
||||||
|
|
||||||
# artists
|
# artists
|
||||||
artists = ''
|
artists = ''
|
||||||
artists_file = base_dir + '/accounts/artists.txt'
|
artists_file = data_dir(base_dir) + '/artists.txt'
|
||||||
if os.path.isfile(artists_file):
|
if os.path.isfile(artists_file):
|
||||||
with open(artists_file, 'r', encoding='utf-8') as art_file:
|
with open(artists_file, 'r', encoding='utf-8') as art_file:
|
||||||
artists = art_file.read()
|
artists = art_file.read()
|
||||||
|
@ -2092,7 +2094,7 @@ def _html_edit_profile_instance(base_dir: str, translate: {},
|
||||||
|
|
||||||
# site devops
|
# site devops
|
||||||
devops = ''
|
devops = ''
|
||||||
devops_file = base_dir + '/accounts/devops.txt'
|
devops_file = data_dir(base_dir) + '/devops.txt'
|
||||||
if os.path.isfile(devops_file):
|
if os.path.isfile(devops_file):
|
||||||
with open(devops_file, 'r', encoding='utf-8') as edit_file:
|
with open(devops_file, 'r', encoding='utf-8') as edit_file:
|
||||||
devops = edit_file.read()
|
devops = edit_file.read()
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Web Interface"
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_post_attachments
|
from utils import get_post_attachments
|
||||||
from utils import get_url_from_post
|
from utils import get_url_from_post
|
||||||
from utils import date_from_string_format
|
from utils import date_from_string_format
|
||||||
|
@ -306,7 +307,8 @@ def html_search_shared_items(translate: {},
|
||||||
'<center><h1>' + \
|
'<center><h1>' + \
|
||||||
'<a href="' + actor + '/search">' + title_str + '</a></h1></center>'
|
'<a href="' + actor + '/search">' + title_str + '</a></h1></center>'
|
||||||
results_exist = False
|
results_exist = False
|
||||||
for _, dirs, files in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, files in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
@ -547,7 +549,8 @@ def html_skills_search(actor: str, translate: {}, base_dir: str,
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
# search instance accounts
|
# search instance accounts
|
||||||
for subdir, _, files in os.walk(base_dir + '/accounts/'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for subdir, _, files in os.walk(dir_str + '/'):
|
||||||
for fname in files:
|
for fname in files:
|
||||||
if not fname.endswith('.json'):
|
if not fname.endswith('.json'):
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from webapp_utils import html_header_with_website_markup
|
from webapp_utils import html_header_with_website_markup
|
||||||
from webapp_utils import html_footer
|
from webapp_utils import html_footer
|
||||||
|
@ -23,13 +24,14 @@ def html_specification(base_dir: str, http_prefix: str,
|
||||||
"""
|
"""
|
||||||
specification_filename = base_dir + '/specification/activitypub.md'
|
specification_filename = base_dir + '/specification/activitypub.md'
|
||||||
admin_nickname = get_config_param(base_dir, 'admin')
|
admin_nickname = get_config_param(base_dir, 'admin')
|
||||||
if os.path.isfile(base_dir + '/accounts/activitypub.md'):
|
dir_str = data_dir(base_dir)
|
||||||
specification_filename = base_dir + '/accounts/activitypub.md'
|
if os.path.isfile(dir_str + '/activitypub.md'):
|
||||||
|
specification_filename = dir_str + '/activitypub.md'
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/login-background-custom.jpg'):
|
if os.path.isfile(dir_str + '/login-background-custom.jpg'):
|
||||||
if not os.path.isfile(base_dir + '/accounts/login-background.jpg'):
|
if not os.path.isfile(dir_str + '/login-background.jpg'):
|
||||||
copyfile(base_dir + '/accounts/login-background-custom.jpg',
|
copyfile(dir_str + '/login-background-custom.jpg',
|
||||||
base_dir + '/accounts/login-background.jpg')
|
dir_str + '/login-background.jpg')
|
||||||
|
|
||||||
specification_text = 'ActivityPub Protocol Specification.'
|
specification_text = 'ActivityPub Protocol Specification.'
|
||||||
if os.path.isfile(specification_filename):
|
if os.path.isfile(specification_filename):
|
||||||
|
|
|
@ -8,6 +8,7 @@ __status__ = "Production"
|
||||||
__module_group__ = "Web Interface"
|
__module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from utils import data_dir
|
||||||
from utils import load_json
|
from utils import load_json
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from webapp_utils import html_header_with_external_style
|
from webapp_utils import html_header_with_external_style
|
||||||
|
@ -178,7 +179,7 @@ def html_theme_designer(base_dir: str,
|
||||||
theme_json = load_json(theme_filename)
|
theme_json = load_json(theme_filename)
|
||||||
|
|
||||||
# set custom theme parameters
|
# set custom theme parameters
|
||||||
custom_variables_file = base_dir + '/accounts/theme.json'
|
custom_variables_file = data_dir(base_dir) + '/theme.json'
|
||||||
if os.path.isfile(custom_variables_file):
|
if os.path.isfile(custom_variables_file):
|
||||||
custom_theme_params = load_json(custom_variables_file, 0)
|
custom_theme_params = load_json(custom_variables_file, 0)
|
||||||
if custom_theme_params:
|
if custom_theme_params:
|
||||||
|
|
|
@ -10,6 +10,7 @@ __module_group__ = "Timeline"
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import is_artist
|
from utils import is_artist
|
||||||
from utils import dangerous_markup
|
from utils import dangerous_markup
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
|
@ -60,7 +61,7 @@ def _get_help_for_timeline(base_dir: str, box_name: str) -> str:
|
||||||
"""Shows help text for the given timeline
|
"""Shows help text for the given timeline
|
||||||
"""
|
"""
|
||||||
# get the filename for help for this timeline
|
# get the filename for help for this timeline
|
||||||
help_filename = base_dir + '/accounts/help_' + box_name + '.md'
|
help_filename = data_dir(base_dir) + '/help_' + box_name + '.md'
|
||||||
if not os.path.isfile(help_filename):
|
if not os.path.isfile(help_filename):
|
||||||
language = \
|
language = \
|
||||||
get_config_param(base_dir, 'language')
|
get_config_param(base_dir, 'language')
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Web Interface"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import local_actor_url
|
from utils import local_actor_url
|
||||||
from webapp_utils import html_header_with_external_style
|
from webapp_utils import html_header_with_external_style
|
||||||
|
@ -21,19 +22,19 @@ def html_terms_of_service(base_dir: str,
|
||||||
"""Show the terms of service screen
|
"""Show the terms of service screen
|
||||||
"""
|
"""
|
||||||
admin_nickname = get_config_param(base_dir, 'admin')
|
admin_nickname = get_config_param(base_dir, 'admin')
|
||||||
if not os.path.isfile(base_dir + '/accounts/tos.md'):
|
dir_str = data_dir(base_dir)
|
||||||
|
if not os.path.isfile(dir_str + '/tos.md'):
|
||||||
copyfile(base_dir + '/default_tos.md',
|
copyfile(base_dir + '/default_tos.md',
|
||||||
base_dir + '/accounts/tos.md')
|
dir_str + '/tos.md')
|
||||||
|
|
||||||
if os.path.isfile(base_dir + '/accounts/login-background-custom.jpg'):
|
if os.path.isfile(dir_str + '/login-background-custom.jpg'):
|
||||||
if not os.path.isfile(base_dir + '/accounts/login-background.jpg'):
|
if not os.path.isfile(dir_str + '/login-background.jpg'):
|
||||||
copyfile(base_dir + '/accounts/login-background-custom.jpg',
|
copyfile(dir_str + '/login-background-custom.jpg',
|
||||||
base_dir + '/accounts/login-background.jpg')
|
dir_str + '/login-background.jpg')
|
||||||
|
|
||||||
tos_text = 'Terms of Service go here.'
|
tos_text = 'Terms of Service go here.'
|
||||||
if os.path.isfile(base_dir + '/accounts/tos.md'):
|
if os.path.isfile(dir_str + '/tos.md'):
|
||||||
with open(base_dir + '/accounts/tos.md', 'r',
|
with open(dir_str + '/tos.md', 'r', encoding='utf-8') as file:
|
||||||
encoding='utf-8') as file:
|
|
||||||
tos_text = markdown_to_html(file.read())
|
tos_text = markdown_to_html(file.read())
|
||||||
|
|
||||||
tos_form = ''
|
tos_form = ''
|
||||||
|
|
|
@ -12,6 +12,7 @@ from shutil import copyfile
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from session import get_json
|
from session import get_json
|
||||||
from session import get_json_valid
|
from session import get_json_valid
|
||||||
|
from utils import data_dir
|
||||||
from utils import string_contains
|
from utils import string_contains
|
||||||
from utils import get_post_attachments
|
from utils import get_post_attachments
|
||||||
from utils import image_mime_types_dict
|
from utils import image_mime_types_dict
|
||||||
|
@ -471,7 +472,8 @@ def shares_timeline_json(actor: str, page_number: int, items_per_page: int,
|
||||||
by sharing a large number of things
|
by sharing a large number of things
|
||||||
"""
|
"""
|
||||||
all_shares_json = {}
|
all_shares_json = {}
|
||||||
for _, dirs, files in os.walk(base_dir + '/accounts'):
|
dir_str = data_dir(base_dir)
|
||||||
|
for _, dirs, files in os.walk(dir_str):
|
||||||
for handle in dirs:
|
for handle in dirs:
|
||||||
if not is_account_dir(handle):
|
if not is_account_dir(handle):
|
||||||
continue
|
continue
|
||||||
|
@ -2093,10 +2095,11 @@ def set_custom_background(base_dir: str, background: str,
|
||||||
if os.path.isfile(base_dir + '/img/' + background + '.' + ext):
|
if os.path.isfile(base_dir + '/img/' + background + '.' + ext):
|
||||||
if not new_background:
|
if not new_background:
|
||||||
new_background = background
|
new_background = background
|
||||||
if not os.path.isfile(base_dir + '/accounts/' +
|
dir_str = data_dir(base_dir)
|
||||||
|
if not os.path.isfile(dir_str + '/' +
|
||||||
new_background + '.' + ext):
|
new_background + '.' + ext):
|
||||||
copyfile(base_dir + '/img/' + background + '.' + ext,
|
copyfile(base_dir + '/img/' + background + '.' + ext,
|
||||||
base_dir + '/accounts/' + new_background + '.' + ext)
|
dir_str + '/' + new_background + '.' + ext)
|
||||||
return ext
|
return ext
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -2109,7 +2112,7 @@ def html_common_emoji(base_dir: str, no_of_emoji: int) -> str:
|
||||||
emojis_filename = base_dir + '/emoji/default_emoji.json'
|
emojis_filename = base_dir + '/emoji/default_emoji.json'
|
||||||
emojis_json = load_json(emojis_filename)
|
emojis_json = load_json(emojis_filename)
|
||||||
|
|
||||||
common_emoji_filename = base_dir + '/accounts/common_emoji.txt'
|
common_emoji_filename = data_dir(base_dir) + '/common_emoji.txt'
|
||||||
if not os.path.isfile(common_emoji_filename):
|
if not os.path.isfile(common_emoji_filename):
|
||||||
return ''
|
return ''
|
||||||
common_emoji = None
|
common_emoji = None
|
||||||
|
@ -2365,7 +2368,7 @@ def get_buy_links(post_json_object: str, translate: {}, buy_sites: {}) -> {}:
|
||||||
def load_buy_sites(base_dir: str) -> {}:
|
def load_buy_sites(base_dir: str) -> {}:
|
||||||
"""Loads domains from which buying is permitted
|
"""Loads domains from which buying is permitted
|
||||||
"""
|
"""
|
||||||
buy_sites_filename = base_dir + '/accounts/buy_sites.json'
|
buy_sites_filename = data_dir(base_dir) + '/buy_sites.json'
|
||||||
if os.path.isfile(buy_sites_filename):
|
if os.path.isfile(buy_sites_filename):
|
||||||
buy_sites_json = load_json(buy_sites_filename)
|
buy_sites_json = load_json(buy_sites_filename)
|
||||||
if buy_sites_json:
|
if buy_sites_json:
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Onboarding"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
from utils import acct_dir
|
from utils import acct_dir
|
||||||
|
@ -51,13 +52,14 @@ def html_welcome_screen(base_dir: str, nickname: str,
|
||||||
"""Returns the welcome screen
|
"""Returns the welcome screen
|
||||||
"""
|
"""
|
||||||
# set a custom background for the welcome screen
|
# set a custom background for the welcome screen
|
||||||
if os.path.isfile(base_dir + '/accounts/welcome-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/welcome-background.jpg'):
|
if os.path.isfile(dir_str + '/welcome-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/welcome-background-custom.jpg',
|
if not os.path.isfile(dir_str + '/welcome-background.jpg'):
|
||||||
base_dir + '/accounts/welcome-background.jpg')
|
copyfile(dir_str + '/welcome-background-custom.jpg',
|
||||||
|
dir_str + '/welcome-background.jpg')
|
||||||
|
|
||||||
welcome_text = 'Welcome to Epicyon'
|
welcome_text = 'Welcome to Epicyon'
|
||||||
welcome_filename = base_dir + '/accounts/' + curr_screen + '.md'
|
welcome_filename = dir_str + '/' + curr_screen + '.md'
|
||||||
if not os.path.isfile(welcome_filename):
|
if not os.path.isfile(welcome_filename):
|
||||||
default_filename = None
|
default_filename = None
|
||||||
if theme_name:
|
if theme_name:
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Onboarding"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
from webapp_utils import html_header_with_external_style
|
from webapp_utils import html_header_with_external_style
|
||||||
|
@ -22,13 +23,14 @@ def html_welcome_final(base_dir: str, nickname: str,
|
||||||
"""Returns the final welcome screen after first login
|
"""Returns the final welcome screen after first login
|
||||||
"""
|
"""
|
||||||
# set a custom background for the welcome screen
|
# set a custom background for the welcome screen
|
||||||
if os.path.isfile(base_dir + '/accounts/welcome-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/welcome-background.jpg'):
|
if os.path.isfile(dir_str + '/welcome-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/welcome-background-custom.jpg',
|
if not os.path.isfile(dir_str + '/welcome-background.jpg'):
|
||||||
base_dir + '/accounts/welcome-background.jpg')
|
copyfile(dir_str + '/welcome-background-custom.jpg',
|
||||||
|
dir_str + '/welcome-background.jpg')
|
||||||
|
|
||||||
final_text = 'Welcome to Epicyon'
|
final_text = 'Welcome to Epicyon'
|
||||||
final_filename = base_dir + '/accounts/welcome_final.md'
|
final_filename = dir_str + '/welcome_final.md'
|
||||||
if not os.path.isfile(final_filename):
|
if not os.path.isfile(final_filename):
|
||||||
default_filename = None
|
default_filename = None
|
||||||
if theme_name:
|
if theme_name:
|
||||||
|
|
|
@ -9,6 +9,7 @@ __module_group__ = "Onboarding"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from utils import data_dir
|
||||||
from utils import remove_html
|
from utils import remove_html
|
||||||
from utils import load_json
|
from utils import load_json
|
||||||
from utils import get_config_param
|
from utils import get_config_param
|
||||||
|
@ -29,13 +30,14 @@ def html_welcome_profile(base_dir: str, nickname: str, domain: str,
|
||||||
"""Returns the welcome profile screen to set avatar and bio
|
"""Returns the welcome profile screen to set avatar and bio
|
||||||
"""
|
"""
|
||||||
# set a custom background for the welcome screen
|
# set a custom background for the welcome screen
|
||||||
if os.path.isfile(base_dir + '/accounts/welcome-background-custom.jpg'):
|
dir_str = data_dir(base_dir)
|
||||||
if not os.path.isfile(base_dir + '/accounts/welcome-background.jpg'):
|
if os.path.isfile(dir_str + '/welcome-background-custom.jpg'):
|
||||||
copyfile(base_dir + '/accounts/welcome-background-custom.jpg',
|
if not os.path.isfile(dir_str + '/welcome-background.jpg'):
|
||||||
base_dir + '/accounts/welcome-background.jpg')
|
copyfile(dir_str + '/welcome-background-custom.jpg',
|
||||||
|
dir_str + '/welcome-background.jpg')
|
||||||
|
|
||||||
profile_text = 'Welcome to Epicyon'
|
profile_text = 'Welcome to Epicyon'
|
||||||
profile_filename = base_dir + '/accounts/welcome_profile.md'
|
profile_filename = dir_str + '/welcome_profile.md'
|
||||||
if not os.path.isfile(profile_filename):
|
if not os.path.isfile(profile_filename):
|
||||||
default_filename = None
|
default_filename = None
|
||||||
if theme_name:
|
if theme_name:
|
||||||
|
|
Loading…
Reference in New Issue