mirror of https://gitlab.com/bashrc2/epicyon
Breaking up the giant daemon
parent
b3277afcad
commit
3f0e166f76
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,789 @@
|
|||
__filename__ = "daemon_utils.py"
|
||||
__author__ = "Bob Mottram"
|
||||
__license__ = "AGPL3+"
|
||||
__version__ = "1.5.0"
|
||||
__maintainer__ = "Bob Mottram"
|
||||
__email__ = "bob@libreserver.org"
|
||||
__status__ = "Production"
|
||||
__module_group__ = "Core"
|
||||
|
||||
import time
|
||||
from auth import authorize
|
||||
from threads import thread_with_trace
|
||||
from threads import begin_thread
|
||||
from outbox import post_message_to_outbox
|
||||
from city import get_spoofed_city
|
||||
from httpcodes import http_404
|
||||
from httpcodes import http_503
|
||||
from httpcodes import http_400
|
||||
from httpcodes import write2
|
||||
from context import has_valid_context
|
||||
from inbox import save_post_to_inbox_queue
|
||||
from inbox import clear_queue_items
|
||||
from blocking import update_blocked_cache
|
||||
from blocking import is_blocked_nickname
|
||||
from blocking import is_blocked_domain
|
||||
from content import contains_invalid_local_links
|
||||
from content import valid_url_lengths
|
||||
from posts import add_to_field
|
||||
from utils import get_instance_url
|
||||
from utils import remove_html
|
||||
from utils import get_locked_account
|
||||
from utils import post_summary_contains_links
|
||||
from utils import local_only_is_local
|
||||
from utils import get_local_network_addresses
|
||||
from utils import has_object_dict
|
||||
from utils import get_nickname_from_actor
|
||||
from utils import get_domain_from_actor
|
||||
from utils import get_actor_from_post
|
||||
from utils import has_actor
|
||||
from utils import resembles_url
|
||||
from utils import is_system_account
|
||||
from cache import check_for_changed_actor
|
||||
from cache import get_person_from_cache
|
||||
from donate import get_donation_url
|
||||
from donate import get_website
|
||||
from donate import get_gemini_link
|
||||
from xmpp import get_xmpp_address
|
||||
from matrix import get_matrix_address
|
||||
from ssb import get_ssb_address
|
||||
from blog import get_blog_address
|
||||
from tox import get_tox_address
|
||||
from briar import get_briar_address
|
||||
from cwtch import get_cwtch_address
|
||||
from pgp import get_pgp_fingerprint
|
||||
from pgp import get_email_address
|
||||
from pgp import get_pgp_pub_key
|
||||
from enigma import get_enigma_pub_key
|
||||
from git import get_repo_url
|
||||
from webapp_person_options import html_person_options
|
||||
from httpheaders import redirect_headers
|
||||
from httpheaders import set_headers
|
||||
from fitnessFunctions import fitness_performance
|
||||
|
||||
|
||||
def post_to_outbox(self, message_json: {}, version: str,
|
||||
post_to_nickname: str,
|
||||
curr_session, proxy_type: str) -> bool:
|
||||
"""post is received by the outbox
|
||||
Client to server message post
|
||||
https://www.w3.org/TR/activitypub/#client-to-server-outbox-delivery
|
||||
"""
|
||||
if not curr_session:
|
||||
return False
|
||||
|
||||
city = self.server.city
|
||||
|
||||
if post_to_nickname:
|
||||
print('Posting to nickname ' + post_to_nickname)
|
||||
self.post_to_nickname = post_to_nickname
|
||||
city = get_spoofed_city(self.server.city,
|
||||
self.server.base_dir,
|
||||
post_to_nickname, self.server.domain)
|
||||
|
||||
shared_items_federated_domains = \
|
||||
self.server.shared_items_federated_domains
|
||||
shared_item_federation_tokens = \
|
||||
self.server.shared_item_federation_tokens
|
||||
return post_message_to_outbox(curr_session,
|
||||
self.server.translate,
|
||||
message_json, self.post_to_nickname,
|
||||
self.server, self.server.base_dir,
|
||||
self.server.http_prefix,
|
||||
self.server.domain,
|
||||
self.server.domain_full,
|
||||
self.server.onion_domain,
|
||||
self.server.i2p_domain,
|
||||
self.server.port,
|
||||
self.server.recent_posts_cache,
|
||||
self.server.followers_threads,
|
||||
self.server.federation_list,
|
||||
self.server.send_threads,
|
||||
self.server.postLog,
|
||||
self.server.cached_webfingers,
|
||||
self.server.person_cache,
|
||||
self.server.allow_deletion,
|
||||
proxy_type, version,
|
||||
self.server.debug,
|
||||
self.server.yt_replace_domain,
|
||||
self.server.twitter_replacement_domain,
|
||||
self.server.show_published_date_only,
|
||||
self.server.allow_local_network_access,
|
||||
city, self.server.system_language,
|
||||
shared_items_federated_domains,
|
||||
shared_item_federation_tokens,
|
||||
self.server.low_bandwidth,
|
||||
self.server.signing_priv_key_pem,
|
||||
self.server.peertube_instances,
|
||||
self.server.theme_name,
|
||||
self.server.max_like_count,
|
||||
self.server.max_recent_posts,
|
||||
self.server.cw_lists,
|
||||
self.server.lists_enabled,
|
||||
self.server.content_license_url,
|
||||
self.server.dogwhistles,
|
||||
self.server.min_images_for_accounts,
|
||||
self.server.buy_sites,
|
||||
self.server.sites_unavailable,
|
||||
self.server.max_recent_books,
|
||||
self.server.books_cache,
|
||||
self.server.max_cached_readers,
|
||||
self.server.auto_cw_cache,
|
||||
self.server.block_federated)
|
||||
|
||||
|
||||
def _get_outbox_thread_index(self, nickname: str,
|
||||
max_outbox_threads_per_account: int) -> int:
|
||||
"""Returns the outbox thread index for the given account
|
||||
This is a ring buffer used to store the thread objects which
|
||||
are sending out posts
|
||||
"""
|
||||
account_outbox_thread_name = nickname
|
||||
if not account_outbox_thread_name:
|
||||
account_outbox_thread_name = '*'
|
||||
|
||||
# create the buffer for the given account
|
||||
if not self.server.outboxThread.get(account_outbox_thread_name):
|
||||
self.server.outboxThread[account_outbox_thread_name] = \
|
||||
[None] * max_outbox_threads_per_account
|
||||
self.server.outbox_thread_index[account_outbox_thread_name] = 0
|
||||
return 0
|
||||
|
||||
# increment the ring buffer index
|
||||
index = self.server.outbox_thread_index[account_outbox_thread_name] + 1
|
||||
if index >= max_outbox_threads_per_account:
|
||||
index = 0
|
||||
|
||||
self.server.outbox_thread_index[account_outbox_thread_name] = index
|
||||
|
||||
# remove any existing thread from the current index in the buffer
|
||||
acct = account_outbox_thread_name
|
||||
if self.server.outboxThread.get(acct):
|
||||
if len(self.server.outboxThread[acct]) > index:
|
||||
try:
|
||||
if self.server.outboxThread[acct][index].is_alive():
|
||||
self.server.outboxThread[acct][index].kill()
|
||||
except BaseException:
|
||||
pass
|
||||
return index
|
||||
|
||||
|
||||
def post_to_outbox_thread(self, message_json: {},
|
||||
curr_session, proxy_type: str) -> bool:
|
||||
"""Creates a thread to send a post
|
||||
"""
|
||||
account_outbox_thread_name = self.post_to_nickname
|
||||
if not account_outbox_thread_name:
|
||||
account_outbox_thread_name = '*'
|
||||
|
||||
index = _get_outbox_thread_index(self, account_outbox_thread_name, 8)
|
||||
|
||||
print('Creating outbox thread ' +
|
||||
account_outbox_thread_name + '/' +
|
||||
str(self.server.outbox_thread_index[account_outbox_thread_name]))
|
||||
print('THREAD: _post_to_outbox')
|
||||
self.server.outboxThread[account_outbox_thread_name][index] = \
|
||||
thread_with_trace(target=post_to_outbox,
|
||||
args=(message_json.copy(),
|
||||
self.server.project_version, None,
|
||||
curr_session, proxy_type),
|
||||
daemon=True)
|
||||
print('Starting outbox thread')
|
||||
outbox_thread = \
|
||||
self.server.outboxThread[account_outbox_thread_name][index]
|
||||
begin_thread(outbox_thread, '_post_to_outbox_thread')
|
||||
return True
|
||||
|
||||
|
||||
def _detect_mitm(self) -> bool:
|
||||
"""Detect if a request contains a MiTM
|
||||
"""
|
||||
mitm_domains = ['cloudflare']
|
||||
# look for domains within these headers
|
||||
check_headers = (
|
||||
'Server', 'Report-To', 'Report-to', 'report-to',
|
||||
'Expect-CT', 'Expect-Ct', 'expect-ct'
|
||||
)
|
||||
for interloper in mitm_domains:
|
||||
for header_name in check_headers:
|
||||
if self.headers.get(header_name):
|
||||
if interloper in self.headers[header_name]:
|
||||
print('MITM: ' + header_name + ' = ' +
|
||||
self.headers[header_name])
|
||||
return True
|
||||
# The presence of these headers on their own indicates a MiTM
|
||||
mitm_headers = (
|
||||
'CF-Connecting-IP', 'CF-RAY', 'CF-IPCountry', 'CF-Visitor',
|
||||
'CDN-Loop', 'CF-Worker', 'CF-Cache-Status'
|
||||
)
|
||||
for header_name in mitm_headers:
|
||||
if self.headers.get(header_name):
|
||||
print('MITM: ' + header_name + ' = ' +
|
||||
self.headers[header_name])
|
||||
return True
|
||||
if self.headers.get(header_name.lower()):
|
||||
print('MITM: ' + header_name + ' = ' +
|
||||
self.headers[header_name.lower()])
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def update_inbox_queue(self, nickname: str, message_json: {},
|
||||
message_bytes: str, debug: bool) -> int:
|
||||
"""Update the inbox queue
|
||||
"""
|
||||
if debug:
|
||||
print('INBOX: checking inbox queue restart')
|
||||
if self.server.restart_inbox_queue_in_progress:
|
||||
http_503(self)
|
||||
print('INBOX: ' +
|
||||
'message arrived but currently restarting inbox queue')
|
||||
self.server.postreq_busy = False
|
||||
return 2
|
||||
|
||||
# check that the incoming message has a fully recognized
|
||||
# linked data context
|
||||
if debug:
|
||||
print('INBOX: checking valid context')
|
||||
if not has_valid_context(message_json):
|
||||
print('INBOX: ' +
|
||||
'message arriving at inbox queue has no valid context ' +
|
||||
str(message_json))
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# check for blocked domains so that they can be rejected early
|
||||
if debug:
|
||||
print('INBOX: checking for actor')
|
||||
message_domain = None
|
||||
if not has_actor(message_json, self.server.debug):
|
||||
print('INBOX: message arriving at inbox queue has no actor')
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# actor should be a string
|
||||
if debug:
|
||||
print('INBOX: checking that actor is string')
|
||||
actor_url = get_actor_from_post(message_json)
|
||||
if not isinstance(actor_url, str):
|
||||
print('INBOX: ' +
|
||||
'actor should be a string ' + str(actor_url))
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# check that some additional fields are strings
|
||||
if debug:
|
||||
print('INBOX: checking fields 1')
|
||||
string_fields = ('id', 'type', 'published')
|
||||
for check_field in string_fields:
|
||||
if not message_json.get(check_field):
|
||||
continue
|
||||
if not isinstance(message_json[check_field], str):
|
||||
print('INBOX: ' +
|
||||
'id, type and published fields should be strings ' +
|
||||
check_field + ' ' + str(message_json[check_field]))
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# check that to/cc fields are lists
|
||||
if debug:
|
||||
print('INBOX: checking to and cc fields')
|
||||
list_fields = ('to', 'cc')
|
||||
for check_field in list_fields:
|
||||
if not message_json.get(check_field):
|
||||
continue
|
||||
if not isinstance(message_json[check_field], list):
|
||||
print('INBOX: WARN: To and Cc fields should be lists, ' +
|
||||
check_field + '=' + str(message_json[check_field]))
|
||||
# NOTE: this does not prevent further processing
|
||||
|
||||
if has_object_dict(message_json):
|
||||
if debug:
|
||||
print('INBOX: checking object fields')
|
||||
# check that some fields are a string or list
|
||||
string_or_list_fields = ('url', 'attributedTo')
|
||||
for check_field in string_or_list_fields:
|
||||
if not message_json['object'].get(check_field):
|
||||
continue
|
||||
field_value = message_json['object'][check_field]
|
||||
if not isinstance(field_value, str) and \
|
||||
not isinstance(field_value, list):
|
||||
print('INBOX: ' +
|
||||
check_field + ' should be a string or list ' +
|
||||
str(message_json['object'][check_field]))
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
# check that some fields are strings
|
||||
string_fields = (
|
||||
'id', 'actor', 'type', 'content', 'published',
|
||||
'summary'
|
||||
)
|
||||
for check_field in string_fields:
|
||||
if not message_json['object'].get(check_field):
|
||||
continue
|
||||
if not isinstance(message_json['object'][check_field], str):
|
||||
print('INBOX: ' +
|
||||
check_field + ' should be a string ' +
|
||||
str(message_json['object'][check_field]))
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
# check that some fields are lists
|
||||
if debug:
|
||||
print('INBOX: checking object to and cc fields')
|
||||
list_fields = ('to', 'cc', 'attachment')
|
||||
for check_field in list_fields:
|
||||
if not message_json['object'].get(check_field):
|
||||
continue
|
||||
if not isinstance(message_json['object'][check_field], list):
|
||||
print('INBOX: ' +
|
||||
check_field + ' should be a list ' +
|
||||
str(message_json['object'][check_field]))
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
# check that the content does not contain impossibly long urls
|
||||
if message_json['object'].get('content'):
|
||||
content_str = message_json['object']['content']
|
||||
if not valid_url_lengths(content_str, 2048):
|
||||
actor_url = get_actor_from_post(message_json)
|
||||
print('INBOX: content contains urls which are too long ' +
|
||||
actor_url)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
# check that the summary does not contain links
|
||||
if post_summary_contains_links(message_json):
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
# if this is a local only post, is it really local?
|
||||
if 'localOnly' in message_json['object'] and \
|
||||
message_json['object'].get('to') and \
|
||||
message_json['object'].get('attributedTo'):
|
||||
if not local_only_is_local(message_json,
|
||||
self.server.domain_full):
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# actor should look like a url
|
||||
if debug:
|
||||
print('INBOX: checking that actor looks like a url')
|
||||
actor_url = get_actor_from_post(message_json)
|
||||
if not resembles_url(actor_url):
|
||||
print('INBOX: POST actor does not look like a url ' +
|
||||
actor_url)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# sent by an actor on a local network address?
|
||||
if debug:
|
||||
print('INBOX: checking for local network access')
|
||||
if not self.server.allow_local_network_access:
|
||||
local_network_pattern_list = get_local_network_addresses()
|
||||
actor_url = get_actor_from_post(message_json)
|
||||
for local_network_pattern in local_network_pattern_list:
|
||||
if local_network_pattern in actor_url:
|
||||
print('INBOX: POST actor contains local network address ' +
|
||||
actor_url)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
actor_url = get_actor_from_post(message_json)
|
||||
message_domain, _ = get_domain_from_actor(actor_url)
|
||||
if not message_domain:
|
||||
print('INBOX: POST from unknown domain ' + actor_url)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
self.server.blocked_cache_last_updated = \
|
||||
update_blocked_cache(self.server.base_dir,
|
||||
self.server.blocked_cache,
|
||||
self.server.blocked_cache_last_updated,
|
||||
self.server.blocked_cache_update_secs)
|
||||
|
||||
if debug:
|
||||
print('INBOX: checking for blocked domain ' + message_domain)
|
||||
if is_blocked_domain(self.server.base_dir, message_domain,
|
||||
self.server.blocked_cache,
|
||||
self.server.block_federated):
|
||||
print('INBOX: POST from blocked domain ' + message_domain)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
message_nickname = get_nickname_from_actor(actor_url)
|
||||
if not message_nickname:
|
||||
print('INBOX: POST from unknown nickname ' + actor_url)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
if debug:
|
||||
print('INBOX: checking for blocked nickname ' + message_nickname)
|
||||
if is_blocked_nickname(self.server.base_dir, message_nickname,
|
||||
self.server.blocked_cache):
|
||||
print('INBOX: POST from blocked nickname ' + message_nickname)
|
||||
http_400(self)
|
||||
self.server.postreq_busy = False
|
||||
return 3
|
||||
|
||||
# if the inbox queue is full then return a busy code
|
||||
if debug:
|
||||
print('INBOX: checking for full queue')
|
||||
if len(self.server.inbox_queue) >= self.server.max_queue_length:
|
||||
if message_domain:
|
||||
print('INBOX: Queue: ' +
|
||||
'Inbox queue is full. Incoming post from ' +
|
||||
actor_url)
|
||||
else:
|
||||
print('INBOX: Queue: Inbox queue is full')
|
||||
http_503(self)
|
||||
clear_queue_items(self.server.base_dir, self.server.inbox_queue)
|
||||
if not self.server.restart_inbox_queue_in_progress:
|
||||
self.server.restart_inbox_queue = True
|
||||
self.server.postreq_busy = False
|
||||
return 2
|
||||
|
||||
# follower synchronization endpoint information
|
||||
if self.headers.get('Collection-Synchronization'):
|
||||
if debug:
|
||||
print('Collection-Synchronization: ' +
|
||||
str(self.headers['Collection-Synchronization']))
|
||||
|
||||
# Convert the headers needed for signature verification to dict
|
||||
headers_dict = {}
|
||||
headers_dict['host'] = self.headers['host']
|
||||
headers_dict['signature'] = self.headers['signature']
|
||||
if self.headers.get('Date'):
|
||||
headers_dict['Date'] = self.headers['Date']
|
||||
elif self.headers.get('date'):
|
||||
headers_dict['Date'] = self.headers['date']
|
||||
if self.headers.get('digest'):
|
||||
headers_dict['digest'] = self.headers['digest']
|
||||
if self.headers.get('Collection-Synchronization'):
|
||||
headers_dict['Collection-Synchronization'] = \
|
||||
self.headers['Collection-Synchronization']
|
||||
if self.headers.get('Content-type'):
|
||||
headers_dict['Content-type'] = self.headers['Content-type']
|
||||
if self.headers.get('Content-Length'):
|
||||
headers_dict['Content-Length'] = self.headers['Content-Length']
|
||||
elif self.headers.get('content-length'):
|
||||
headers_dict['content-length'] = self.headers['content-length']
|
||||
|
||||
original_message_json = message_json.copy()
|
||||
|
||||
# whether to add a 'to' field to the message
|
||||
add_to_field_types = (
|
||||
'Follow', 'Like', 'EmojiReact', 'Add', 'Remove', 'Ignore', 'Move'
|
||||
)
|
||||
for add_to_type in add_to_field_types:
|
||||
message_json, _ = \
|
||||
add_to_field(add_to_type, message_json, self.server.debug)
|
||||
|
||||
begin_save_time = time.time()
|
||||
# save the json for later queue processing
|
||||
message_bytes_decoded = message_bytes.decode('utf-8')
|
||||
|
||||
if debug:
|
||||
print('INBOX: checking for invalid links')
|
||||
if contains_invalid_local_links(message_bytes_decoded):
|
||||
print('INBOX: post contains invalid local links ' +
|
||||
str(original_message_json))
|
||||
return 5
|
||||
|
||||
self.server.blocked_cache_last_updated = \
|
||||
update_blocked_cache(self.server.base_dir,
|
||||
self.server.blocked_cache,
|
||||
self.server.blocked_cache_last_updated,
|
||||
self.server.blocked_cache_update_secs)
|
||||
|
||||
mitm = _detect_mitm(self)
|
||||
|
||||
if debug:
|
||||
print('INBOX: saving post to queue')
|
||||
queue_filename = \
|
||||
save_post_to_inbox_queue(self.server.base_dir,
|
||||
self.server.http_prefix,
|
||||
nickname,
|
||||
self.server.domain_full,
|
||||
message_json, original_message_json,
|
||||
message_bytes_decoded,
|
||||
headers_dict,
|
||||
self.path,
|
||||
self.server.debug,
|
||||
self.server.blocked_cache,
|
||||
self.server.block_federated,
|
||||
self.server.system_language,
|
||||
mitm)
|
||||
if queue_filename:
|
||||
# add json to the queue
|
||||
if queue_filename not in self.server.inbox_queue:
|
||||
self.server.inbox_queue.append(queue_filename)
|
||||
if self.server.debug:
|
||||
time_diff = int((time.time() - begin_save_time) * 1000)
|
||||
if time_diff > 200:
|
||||
print('SLOW: slow save of inbox queue item ' +
|
||||
queue_filename + ' took ' + str(time_diff) + ' mS')
|
||||
self.send_response(201)
|
||||
self.end_headers()
|
||||
self.server.postreq_busy = False
|
||||
return 0
|
||||
http_503(self)
|
||||
self.server.postreq_busy = False
|
||||
return 1
|
||||
|
||||
|
||||
def is_authorized(self) -> bool:
|
||||
self.authorized_nickname = None
|
||||
|
||||
not_auth_paths = (
|
||||
'/icons/', '/avatars/', '/favicons/',
|
||||
'/system/accounts/avatars/',
|
||||
'/system/accounts/headers/',
|
||||
'/system/media_attachments/files/',
|
||||
'/accounts/avatars/', '/accounts/headers/',
|
||||
'/favicon.ico', '/newswire.xml',
|
||||
'/newswire_favicon.ico', '/categories.xml'
|
||||
)
|
||||
for not_auth_str in not_auth_paths:
|
||||
if self.path.startswith(not_auth_str):
|
||||
return False
|
||||
|
||||
# token based authenticated used by the web interface
|
||||
if self.headers.get('Cookie'):
|
||||
if self.headers['Cookie'].startswith('epicyon='):
|
||||
token_str = self.headers['Cookie'].split('=', 1)[1].strip()
|
||||
if ';' in token_str:
|
||||
token_str = token_str.split(';')[0].strip()
|
||||
if self.server.tokens_lookup.get(token_str):
|
||||
nickname = self.server.tokens_lookup[token_str]
|
||||
if not is_system_account(nickname):
|
||||
self.authorized_nickname = nickname
|
||||
# default to the inbox of the person
|
||||
if self.path == '/':
|
||||
self.path = '/users/' + nickname + '/inbox'
|
||||
# check that the path contains the same nickname
|
||||
# as the cookie otherwise it would be possible
|
||||
# to be authorized to use an account you don't own
|
||||
if '/' + nickname + '/' in self.path:
|
||||
return True
|
||||
if '/' + nickname + '?' in self.path:
|
||||
return True
|
||||
if self.path.endswith('/' + nickname):
|
||||
return True
|
||||
if self.server.debug:
|
||||
print('AUTH: nickname ' + nickname +
|
||||
' was not found in path ' + self.path)
|
||||
return False
|
||||
print('AUTH: epicyon cookie ' +
|
||||
'authorization failed, header=' +
|
||||
self.headers['Cookie'].replace('epicyon=', '') +
|
||||
' token_str=' + token_str)
|
||||
return False
|
||||
print('AUTH: Header cookie was not authorized')
|
||||
return False
|
||||
# basic auth for c2s
|
||||
if self.headers.get('Authorization'):
|
||||
if authorize(self.server.base_dir, self.path,
|
||||
self.headers['Authorization'],
|
||||
self.server.debug):
|
||||
return True
|
||||
print('AUTH: C2S Basic auth did not authorize ' +
|
||||
self.headers['Authorization'])
|
||||
return False
|
||||
|
||||
|
||||
def show_person_options(self, calling_domain: str, path: str,
|
||||
base_dir: str,
|
||||
domain: str, domain_full: str,
|
||||
getreq_start_time,
|
||||
cookie: str, debug: bool,
|
||||
authorized: bool,
|
||||
curr_session) -> None:
|
||||
"""Show person options screen
|
||||
"""
|
||||
back_to_path = ''
|
||||
options_str = path.split('?options=')[1]
|
||||
origin_path_str = path.split('?options=')[0]
|
||||
if ';' in options_str and '/users/news/' not in path:
|
||||
page_number = 1
|
||||
options_list = options_str.split(';')
|
||||
options_actor = options_list[0]
|
||||
options_page_number = 1
|
||||
if len(options_list) > 1:
|
||||
options_page_number = options_list[1]
|
||||
options_profile_url = ''
|
||||
if len(options_list) > 2:
|
||||
options_profile_url = options_list[2]
|
||||
if '.' in options_profile_url and \
|
||||
options_profile_url.startswith('/members/'):
|
||||
ext = options_profile_url.split('.')[-1]
|
||||
options_profile_url = options_profile_url.split('/members/')[1]
|
||||
options_profile_url = \
|
||||
options_profile_url.replace('.' + ext, '')
|
||||
options_profile_url = \
|
||||
'/users/' + options_profile_url + '/avatar.' + ext
|
||||
back_to_path = 'moderation'
|
||||
if len(options_page_number) > 5:
|
||||
options_page_number = "1"
|
||||
if options_page_number.isdigit():
|
||||
page_number = int(options_page_number)
|
||||
options_link = None
|
||||
if len(options_list) > 3:
|
||||
options_link = options_list[3]
|
||||
is_group = False
|
||||
donate_url = None
|
||||
website_url = None
|
||||
gemini_link = None
|
||||
enigma_pub_key = None
|
||||
pgp_pub_key = None
|
||||
pgp_fingerprint = None
|
||||
xmpp_address = None
|
||||
matrix_address = None
|
||||
blog_address = None
|
||||
tox_address = None
|
||||
briar_address = None
|
||||
cwtch_address = None
|
||||
ssb_address = None
|
||||
email_address = None
|
||||
locked_account = False
|
||||
also_known_as = None
|
||||
moved_to = ''
|
||||
repo_url = None
|
||||
actor_json = \
|
||||
get_person_from_cache(base_dir,
|
||||
options_actor,
|
||||
self.server.person_cache)
|
||||
if actor_json:
|
||||
if actor_json.get('movedTo'):
|
||||
moved_to = actor_json['movedTo']
|
||||
if '"' in moved_to:
|
||||
moved_to = moved_to.split('"')[1]
|
||||
if actor_json.get('type'):
|
||||
if actor_json['type'] == 'Group':
|
||||
is_group = True
|
||||
locked_account = get_locked_account(actor_json)
|
||||
donate_url = get_donation_url(actor_json)
|
||||
website_url = get_website(actor_json, self.server.translate)
|
||||
gemini_link = get_gemini_link(actor_json)
|
||||
xmpp_address = get_xmpp_address(actor_json)
|
||||
matrix_address = get_matrix_address(actor_json)
|
||||
ssb_address = get_ssb_address(actor_json)
|
||||
blog_address = get_blog_address(actor_json)
|
||||
tox_address = get_tox_address(actor_json)
|
||||
briar_address = get_briar_address(actor_json)
|
||||
cwtch_address = get_cwtch_address(actor_json)
|
||||
email_address = get_email_address(actor_json)
|
||||
enigma_pub_key = get_enigma_pub_key(actor_json)
|
||||
pgp_pub_key = get_pgp_pub_key(actor_json)
|
||||
pgp_fingerprint = get_pgp_fingerprint(actor_json)
|
||||
if actor_json.get('alsoKnownAs'):
|
||||
also_known_as = remove_html(actor_json['alsoKnownAs'])
|
||||
repo_url = get_repo_url(actor_json)
|
||||
|
||||
access_keys = self.server.access_keys
|
||||
nickname = 'instance'
|
||||
if '/users/' in path:
|
||||
nickname = path.split('/users/')[1]
|
||||
if '/' in nickname:
|
||||
nickname = nickname.split('/')[0]
|
||||
if self.server.key_shortcuts.get(nickname):
|
||||
access_keys = self.server.key_shortcuts[nickname]
|
||||
|
||||
if curr_session:
|
||||
# because this is slow, do it in a separate thread
|
||||
if self.server.thrCheckActor.get(nickname):
|
||||
# kill existing thread
|
||||
self.server.thrCheckActor[nickname].kill()
|
||||
|
||||
self.server.thrCheckActor[nickname] = \
|
||||
thread_with_trace(target=check_for_changed_actor,
|
||||
args=(curr_session,
|
||||
self.server.base_dir,
|
||||
self.server.http_prefix,
|
||||
self.server.domain_full,
|
||||
options_actor, options_profile_url,
|
||||
self.server.person_cache,
|
||||
self.server.check_actor_timeout),
|
||||
daemon=True)
|
||||
begin_thread(self.server.thrCheckActor[nickname],
|
||||
'_show_person_options')
|
||||
|
||||
msg = \
|
||||
html_person_options(self.server.default_timeline,
|
||||
self.server.translate,
|
||||
base_dir, domain,
|
||||
domain_full,
|
||||
origin_path_str,
|
||||
options_actor,
|
||||
options_profile_url,
|
||||
options_link,
|
||||
page_number, donate_url, website_url,
|
||||
gemini_link,
|
||||
xmpp_address, matrix_address,
|
||||
ssb_address, blog_address,
|
||||
tox_address, briar_address,
|
||||
cwtch_address,
|
||||
enigma_pub_key,
|
||||
pgp_pub_key, pgp_fingerprint,
|
||||
email_address,
|
||||
self.server.dormant_months,
|
||||
back_to_path,
|
||||
locked_account,
|
||||
moved_to, also_known_as,
|
||||
self.server.text_mode_banner,
|
||||
self.server.news_instance,
|
||||
authorized,
|
||||
access_keys, is_group,
|
||||
self.server.theme_name,
|
||||
self.server.blocked_cache,
|
||||
repo_url,
|
||||
self.server.sites_unavailable)
|
||||
if msg:
|
||||
msg = msg.encode('utf-8')
|
||||
msglen = len(msg)
|
||||
set_headers(self, 'text/html', msglen,
|
||||
cookie, calling_domain, False)
|
||||
write2(self, msg)
|
||||
fitness_performance(getreq_start_time, self.server.fitness,
|
||||
'_GET', '_show_person_options', debug)
|
||||
else:
|
||||
http_404(self, 31)
|
||||
return
|
||||
|
||||
if '/users/news/' in path:
|
||||
redirect_headers(self, origin_path_str + '/tlfeatures',
|
||||
cookie, calling_domain)
|
||||
return
|
||||
|
||||
origin_path_str_absolute = \
|
||||
get_instance_url(calling_domain,
|
||||
self.server.http_prefix,
|
||||
self.server.domain_full,
|
||||
self.server.onion_domain,
|
||||
self.server.i2p_domain) + \
|
||||
origin_path_str
|
||||
redirect_headers(self, origin_path_str_absolute, cookie,
|
||||
calling_domain)
|
||||
|
||||
|
||||
def get_user_agent(self) -> str:
|
||||
"""Returns the user agent string from the headers
|
||||
"""
|
||||
ua_str = None
|
||||
if self.headers.get('User-Agent'):
|
||||
ua_str = self.headers['User-Agent']
|
||||
elif self.headers.get('user-agent'):
|
||||
ua_str = self.headers['user-agent']
|
||||
elif self.headers.get('User-agent'):
|
||||
ua_str = self.headers['User-agent']
|
||||
return ua_str
|
|
@ -0,0 +1,177 @@
|
|||
__filename__ = "httpcodes.py"
|
||||
__author__ = "Bob Mottram"
|
||||
__license__ = "AGPL3+"
|
||||
__version__ = "1.5.0"
|
||||
__maintainer__ = "Bob Mottram"
|
||||
__email__ = "bob@libreserver.org"
|
||||
__status__ = "Production"
|
||||
__module_group__ = "Core"
|
||||
|
||||
import time
|
||||
|
||||
|
||||
def write2(self, msg) -> bool:
|
||||
tries = 0
|
||||
while tries < 5:
|
||||
try:
|
||||
self.wfile.write(msg)
|
||||
return True
|
||||
except BrokenPipeError as ex:
|
||||
if self.server.debug:
|
||||
print('EX: _write error ' + str(tries) + ' ' + str(ex))
|
||||
break
|
||||
except BaseException as ex:
|
||||
print('EX: _write error ' + str(tries) + ' ' + str(ex))
|
||||
time.sleep(0.5)
|
||||
tries += 1
|
||||
return False
|
||||
|
||||
|
||||
def _http_return_code(self, http_code: int, http_description: str,
|
||||
long_description: str, etag: str) -> None:
|
||||
msg = \
|
||||
'<html><head><title>' + str(http_code) + '</title></head>' + \
|
||||
'<body bgcolor="linen" text="black">' + \
|
||||
'<div style="font-size: 400px; ' + \
|
||||
'text-align: center;">' + str(http_code) + '</div>' + \
|
||||
'<div style="font-size: 128px; ' + \
|
||||
'text-align: center; font-variant: ' + \
|
||||
'small-caps;"><p role="alert">' + str(http_description) + \
|
||||
'</p></div>' + \
|
||||
'<div style="text-align: center;" aria-live="polite">' + \
|
||||
str(long_description) + '</div></body></html>'
|
||||
msg = msg.encode('utf-8')
|
||||
self.send_response(http_code)
|
||||
self.send_header('Content-Type', 'text/html; charset=utf-8')
|
||||
msg_len_str = str(len(msg))
|
||||
self.send_header('Content-Length', msg_len_str)
|
||||
if etag:
|
||||
self.send_header('ETag', etag)
|
||||
self.end_headers()
|
||||
if not write2(self, msg):
|
||||
print('Error when showing ' + str(http_code))
|
||||
|
||||
|
||||
def http_200(self) -> None:
|
||||
if self.server.translate:
|
||||
ok_str = self.server.translate['This is nothing ' +
|
||||
'less than an utter triumph']
|
||||
_http_return_code(self, 200, self.server.translate['Ok'],
|
||||
ok_str, None)
|
||||
else:
|
||||
_http_return_code(self, 200, 'Ok',
|
||||
'This is nothing less ' +
|
||||
'than an utter triumph', None)
|
||||
|
||||
|
||||
def http_401(self, post_msg: str) -> None:
|
||||
if self.server.translate:
|
||||
if self.server.translate.get(post_msg):
|
||||
ok_str = self.server.translate[post_msg]
|
||||
else:
|
||||
ok_str = post_msg
|
||||
_http_return_code(self, 401,
|
||||
self.server.translate['Unauthorized'],
|
||||
ok_str, None)
|
||||
else:
|
||||
_http_return_code(self, 401, 'Unauthorized',
|
||||
post_msg, None)
|
||||
|
||||
|
||||
def http_402(self) -> None:
|
||||
if self.server.translate:
|
||||
text = self.server.translate["It's time to splash that cash"]
|
||||
_http_return_code(self, 402,
|
||||
self.server.translate['Payment required'],
|
||||
text, None)
|
||||
else:
|
||||
text = "It's time to splash that cash"
|
||||
_http_return_code(self, 402, 'Payment required', text, None)
|
||||
|
||||
|
||||
def http_201(self, etag: str) -> None:
|
||||
if self.server.translate:
|
||||
done_str = self.server.translate['It is done']
|
||||
_http_return_code(self, 201,
|
||||
self.server.translate['Created'], done_str,
|
||||
etag)
|
||||
else:
|
||||
_http_return_code(self, 201, 'Created', 'It is done', etag)
|
||||
|
||||
|
||||
def http_207(self) -> None:
|
||||
if self.server.translate:
|
||||
multi_str = self.server.translate['Lots of things']
|
||||
_http_return_code(self, 207,
|
||||
self.server.translate['Multi Status'],
|
||||
multi_str, None)
|
||||
else:
|
||||
_http_return_code(self, 207, 'Multi Status',
|
||||
'Lots of things', None)
|
||||
|
||||
|
||||
def http_403(self) -> None:
|
||||
if self.server.translate:
|
||||
_http_return_code(self, 403, self.server.translate['Forbidden'],
|
||||
self.server.translate["You're not allowed"],
|
||||
None)
|
||||
else:
|
||||
_http_return_code(self, 403, 'Forbidden',
|
||||
"You're not allowed", None)
|
||||
|
||||
|
||||
def http_404(self, ref: int) -> None:
|
||||
if self.server.translate:
|
||||
text = \
|
||||
self.server.translate['These are not the ' +
|
||||
'droids you are ' +
|
||||
'looking for'] + \
|
||||
' ' + str(ref)
|
||||
_http_return_code(self, 404,
|
||||
self.server.translate['Not Found'],
|
||||
text, None)
|
||||
else:
|
||||
text = \
|
||||
'These are not the droids you are looking for ' + str(ref)
|
||||
_http_return_code(self, 404, 'Not Found', text, None)
|
||||
|
||||
|
||||
def http_304(self) -> None:
|
||||
if self.server.translate:
|
||||
_http_return_code(self, 304, self.server.translate['Not changed'],
|
||||
self.server.translate['The contents of ' +
|
||||
'your local cache ' +
|
||||
'are up to date'],
|
||||
None)
|
||||
else:
|
||||
_http_return_code(self, 304, 'Not changed',
|
||||
'The contents of ' +
|
||||
'your local cache ' +
|
||||
'are up to date',
|
||||
None)
|
||||
|
||||
|
||||
def http_400(self) -> None:
|
||||
if self.server.translate:
|
||||
_http_return_code(self, 400,
|
||||
self.server.translate['Bad Request'],
|
||||
self.server.translate['Better luck ' +
|
||||
'next time'],
|
||||
None)
|
||||
else:
|
||||
_http_return_code(self, 400, 'Bad Request',
|
||||
'Better luck next time', None)
|
||||
|
||||
|
||||
def http_503(self) -> None:
|
||||
if self.server.translate:
|
||||
busy_str = \
|
||||
self.server.translate['The server is busy. ' +
|
||||
'Please try again later']
|
||||
_http_return_code(self, 503,
|
||||
self.server.translate['Unavailable'],
|
||||
busy_str, None)
|
||||
else:
|
||||
_http_return_code(self, 503, 'Unavailable',
|
||||
'The server is busy. Please try again ' +
|
||||
'later', None)
|
|
@ -0,0 +1,203 @@
|
|||
__filename__ = "httpheaders.py"
|
||||
__author__ = "Bob Mottram"
|
||||
__license__ = "AGPL3+"
|
||||
__version__ = "1.5.0"
|
||||
__maintainer__ = "Bob Mottram"
|
||||
__email__ = "bob@libreserver.org"
|
||||
__status__ = "Production"
|
||||
__module_group__ = "Core"
|
||||
|
||||
import os
|
||||
import urllib.parse
|
||||
from hashlib import md5
|
||||
from utils import get_instance_url
|
||||
|
||||
|
||||
def login_headers(self, file_format: str, length: int,
|
||||
calling_domain: str) -> None:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', file_format)
|
||||
self.send_header('Content-Length', str(length))
|
||||
self.send_header('Host', calling_domain)
|
||||
self.send_header('WWW-Authenticate',
|
||||
'title="Login to Epicyon", Basic realm="epicyon"')
|
||||
self.end_headers()
|
||||
|
||||
|
||||
def logout_headers(self, file_format: str, length: int,
|
||||
calling_domain: str) -> None:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', file_format)
|
||||
self.send_header('Content-Length', str(length))
|
||||
self.send_header('Set-Cookie', 'epicyon=; SameSite=Strict')
|
||||
self.send_header('Host', calling_domain)
|
||||
self.send_header('WWW-Authenticate',
|
||||
'title="Login to Epicyon", Basic realm="epicyon"')
|
||||
self.end_headers()
|
||||
|
||||
|
||||
def _quoted_redirect(redirect: str) -> str:
|
||||
"""hashtag screen urls sometimes contain non-ascii characters which
|
||||
need to be url encoded
|
||||
"""
|
||||
if '/tags/' not in redirect:
|
||||
return redirect
|
||||
last_str = redirect.split('/')[-1]
|
||||
return redirect.replace('/' + last_str, '/' +
|
||||
urllib.parse.quote_plus(last_str))
|
||||
|
||||
|
||||
def logout_redirect(self, redirect: str, calling_domain: str) -> None:
|
||||
if '://' not in redirect:
|
||||
redirect = get_instance_url(calling_domain,
|
||||
self.server.http_prefix,
|
||||
self.server.domain_full,
|
||||
self.server.onion_domain,
|
||||
self.server.i2p_domain) + \
|
||||
redirect
|
||||
print('WARN: redirect was not an absolute url, changed to ' +
|
||||
redirect)
|
||||
|
||||
quot_redirect = _quoted_redirect(redirect)
|
||||
self.send_response(303)
|
||||
self.send_header('Set-Cookie', 'epicyon=; SameSite=Strict')
|
||||
self.send_header('Location', quot_redirect)
|
||||
self.send_header('Host', calling_domain)
|
||||
self.send_header('X-AP-Instance-ID', self.server.instance_id)
|
||||
self.send_header('Content-Length', '0')
|
||||
self.end_headers()
|
||||
|
||||
|
||||
def redirect_headers(self, redirect: str, cookie: str,
|
||||
calling_domain: str,
|
||||
code: int = 303) -> None:
|
||||
if '://' not in redirect:
|
||||
redirect = get_instance_url(calling_domain,
|
||||
self.server.http_prefix,
|
||||
self.server.domain_full,
|
||||
self.server.onion_domain,
|
||||
self.server.i2p_domain) + \
|
||||
redirect
|
||||
print('WARN: redirect was not an absolute url, changed to ' +
|
||||
redirect)
|
||||
|
||||
self.send_response(code)
|
||||
|
||||
if code != 303:
|
||||
print('Redirect headers: ' + str(code))
|
||||
|
||||
if cookie:
|
||||
cookie_str = cookie.replace('SET:', '').strip()
|
||||
if 'HttpOnly;' not in cookie_str:
|
||||
if self.server.http_prefix == 'https':
|
||||
cookie_str += '; Secure'
|
||||
cookie_str += '; HttpOnly; SameSite=Strict'
|
||||
if not cookie.startswith('SET:'):
|
||||
self.send_header('Cookie', cookie_str)
|
||||
else:
|
||||
self.send_header('Set-Cookie', cookie_str)
|
||||
quot_redirect = _quoted_redirect(redirect)
|
||||
self.send_header('Location', quot_redirect)
|
||||
self.send_header('Host', calling_domain)
|
||||
self.send_header('X-AP-Instance-ID', self.server.instance_id)
|
||||
self.send_header('Content-Length', '0')
|
||||
self.end_headers()
|
||||
|
||||
|
||||
def clear_login_details(self, nickname: str, calling_domain: str) -> None:
|
||||
"""Clears login details for the given account
|
||||
"""
|
||||
# remove any token
|
||||
if self.server.tokens.get(nickname):
|
||||
del self.server.tokens_lookup[self.server.tokens[nickname]]
|
||||
del self.server.tokens[nickname]
|
||||
redirect_headers(self, self.server.http_prefix + '://' +
|
||||
self.server.domain_full + '/login',
|
||||
'epicyon=; SameSite=Strict',
|
||||
calling_domain)
|
||||
|
||||
|
||||
def _set_headers_base(self, file_format: str, length: int, cookie: str,
|
||||
calling_domain: str, permissive: bool) -> None:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', file_format)
|
||||
if 'image/' in file_format or \
|
||||
'audio/' in file_format or \
|
||||
'video/' in file_format:
|
||||
cache_control = 'public, max-age=84600, immutable'
|
||||
self.send_header('Cache-Control', cache_control)
|
||||
else:
|
||||
self.send_header('Cache-Control', 'public')
|
||||
self.send_header('Origin', self.server.domain_full)
|
||||
if length > -1:
|
||||
self.send_header('Content-Length', str(length))
|
||||
if calling_domain:
|
||||
self.send_header('Host', calling_domain)
|
||||
if permissive:
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
return
|
||||
self.send_header('X-AP-Instance-ID', self.server.instance_id)
|
||||
self.send_header('X-Clacks-Overhead', self.server.clacks)
|
||||
self.send_header('User-Agent',
|
||||
'Epicyon/' + __version__ +
|
||||
'; +' + self.server.http_prefix + '://' +
|
||||
self.server.domain_full + '/')
|
||||
if cookie:
|
||||
cookie_str = cookie
|
||||
if 'HttpOnly;' not in cookie_str:
|
||||
if self.server.http_prefix == 'https':
|
||||
cookie_str += '; Secure'
|
||||
cookie_str += '; HttpOnly; SameSite=Strict'
|
||||
self.send_header('Cookie', cookie_str)
|
||||
|
||||
|
||||
def set_headers(self, file_format: str, length: int, cookie: str,
|
||||
calling_domain: str, permissive: bool) -> None:
|
||||
_set_headers_base(self, file_format, length, cookie, calling_domain,
|
||||
permissive)
|
||||
self.end_headers()
|
||||
|
||||
|
||||
def set_headers_head(self, file_format: str, length: int, etag: str,
|
||||
calling_domain: str, permissive: bool,
|
||||
last_modified_time_str: str) -> None:
|
||||
_set_headers_base(self, file_format, length, None, calling_domain,
|
||||
permissive)
|
||||
if etag:
|
||||
self.send_header('ETag', '"' + etag + '"')
|
||||
if last_modified_time_str:
|
||||
self.send_header('last-modified',
|
||||
last_modified_time_str)
|
||||
self.end_headers()
|
||||
|
||||
|
||||
def set_headers_etag(self, media_filename: str, file_format: str,
|
||||
data, cookie: str, calling_domain: str,
|
||||
permissive: bool, last_modified: str) -> None:
|
||||
datalen = len(data)
|
||||
_set_headers_base(self, file_format, datalen, cookie, calling_domain,
|
||||
permissive)
|
||||
etag = None
|
||||
if os.path.isfile(media_filename + '.etag'):
|
||||
try:
|
||||
with open(media_filename + '.etag', 'r',
|
||||
encoding='utf-8') as efile:
|
||||
etag = efile.read()
|
||||
except OSError:
|
||||
print('EX: _set_headers_etag ' +
|
||||
'unable to read ' + media_filename + '.etag')
|
||||
if not etag:
|
||||
etag = md5(data).hexdigest() # nosec
|
||||
try:
|
||||
with open(media_filename + '.etag', 'w+',
|
||||
encoding='utf-8') as efile:
|
||||
efile.write(etag)
|
||||
except OSError:
|
||||
print('EX: _set_headers_etag ' +
|
||||
'unable to write ' + media_filename + '.etag')
|
||||
# if etag:
|
||||
# self.send_header('ETag', '"' + etag + '"')
|
||||
if last_modified:
|
||||
self.send_header('last-modified', last_modified)
|
||||
self.send_header('accept-ranges', 'bytes')
|
||||
self.end_headers()
|
4
posts.py
4
posts.py
|
@ -1942,8 +1942,8 @@ def _post_is_addressed_to_followers(nickname: str, domain: str, port: int,
|
|||
return addressed_to_followers
|
||||
|
||||
|
||||
def pin_post(base_dir: str, nickname: str, domain: str,
|
||||
pinned_content: str) -> None:
|
||||
def pin_post2(base_dir: str, nickname: str, domain: str,
|
||||
pinned_content: str) -> None:
|
||||
"""Pins the given post Id to the profile of then given account
|
||||
"""
|
||||
account_dir = acct_dir(base_dir, nickname, domain)
|
||||
|
|
10
shares.py
10
shares.py
|
@ -130,8 +130,8 @@ def _get_valid_shared_item_id(actor: str, display_name: str) -> str:
|
|||
return actor + '--shareditems--' + display_name
|
||||
|
||||
|
||||
def remove_shared_item(base_dir: str, nickname: str, domain: str,
|
||||
item_id: str, shares_file_type: str) -> None:
|
||||
def remove_shared_item2(base_dir: str, nickname: str, domain: str,
|
||||
item_id: str, shares_file_type: str) -> None:
|
||||
"""Removes a share for a person
|
||||
"""
|
||||
shares_filename = \
|
||||
|
@ -1241,9 +1241,9 @@ def outbox_undo_share_upload(base_dir: str, nickname: str, domain: str,
|
|||
if debug:
|
||||
print('DEBUG: displayName missing from Offer')
|
||||
return
|
||||
remove_shared_item(base_dir, nickname, domain,
|
||||
message_json['object']['displayName'],
|
||||
'shares')
|
||||
remove_shared_item2(base_dir, nickname, domain,
|
||||
message_json['object']['displayName'],
|
||||
'shares')
|
||||
if debug:
|
||||
print('DEBUG: shared item removed via c2s')
|
||||
|
||||
|
|
12
utils.py
12
utils.py
|
@ -5201,3 +5201,15 @@ def get_instance_url(calling_domain: str,
|
|||
instance_url = \
|
||||
http_prefix + '://' + domain_full
|
||||
return instance_url
|
||||
|
||||
|
||||
def check_bad_path(path: str):
|
||||
"""for http GET or POST check that the path looks valid
|
||||
"""
|
||||
path_lower = path.lower()
|
||||
if '..' in path_lower or \
|
||||
'%2e%2e' in path_lower or \
|
||||
'%252e%252e' in path_lower:
|
||||
print('WARN: bad path ' + path)
|
||||
return True
|
||||
return False
|
||||
|
|
Loading…
Reference in New Issue