epicyon/blocking.py

1808 lines
67 KiB
Python
Raw Normal View History

2020-04-01 20:06:27 +00:00
__filename__ = "blocking.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
2023-01-21 23:03:30 +00:00
__version__ = "1.4.0"
2020-04-01 20:06:27 +00:00
__maintainer__ = "Bob Mottram"
2021-09-10 16:14:50 +00:00
__email__ = "bob@libreserver.org"
2020-04-01 20:06:27 +00:00
__status__ = "Production"
2021-06-25 16:10:09 +00:00
__module_group__ = "Core"
2019-07-14 19:27:13 +00:00
import os
2021-03-20 21:20:41 +00:00
import json
2021-06-21 09:22:24 +00:00
import time
2023-08-13 09:58:02 +00:00
from session import get_json_valid
2023-11-20 22:27:58 +00:00
from utils import date_from_string_format
from utils import date_utcnow
2022-06-21 11:58:50 +00:00
from utils import remove_eol
2021-12-26 17:12:07 +00:00
from utils import has_object_string
2021-12-26 15:54:46 +00:00
from utils import has_object_string_object
2022-04-09 15:11:22 +00:00
from utils import has_object_string_type
2021-12-26 18:17:37 +00:00
from utils import remove_domain_port
2021-12-26 10:57:03 +00:00
from utils import has_object_dict
2021-12-26 18:46:43 +00:00
from utils import is_account_dir
2021-12-26 23:41:34 +00:00
from utils import get_cached_post_filename
2021-12-26 15:13:34 +00:00
from utils import load_json
2021-12-26 14:47:21 +00:00
from utils import save_json
2021-12-28 14:01:37 +00:00
from utils import file_last_modified
2021-12-27 20:38:02 +00:00
from utils import set_config_param
2021-12-26 12:19:00 +00:00
from utils import has_users_path
2021-12-26 12:45:03 +00:00
from utils import get_full_domain
2021-12-27 11:20:57 +00:00
from utils import remove_id_ending
2021-12-27 17:49:35 +00:00
from utils import is_evil
2021-12-26 20:36:08 +00:00
from utils import locate_post
2021-12-27 17:46:27 +00:00
from utils import evil_incarnate
2021-12-27 19:05:25 +00:00
from utils import get_domain_from_actor
2021-12-27 22:19:18 +00:00
from utils import get_nickname_from_actor
2021-12-26 12:02:29 +00:00
from utils import acct_dir
2021-12-26 10:19:59 +00:00
from utils import local_actor_url
2021-12-26 17:15:04 +00:00
from utils import has_actor
2022-06-10 09:24:11 +00:00
from utils import text_in_file
2021-12-29 21:55:09 +00:00
from conversation import mute_conversation
from conversation import unmute_conversation
2023-07-05 11:56:02 +00:00
from auth import create_basic_auth_header
from session import get_json
2019-07-14 19:27:13 +00:00
2020-04-01 20:06:27 +00:00
2022-11-23 18:40:45 +00:00
def get_global_block_reason(search_text: str,
blocking_reasons_filename: str) -> str:
"""Returns the reason why a domain was globally blocked
"""
if not text_in_file(search_text, blocking_reasons_filename):
return ''
reasons_str = ''
try:
with open(blocking_reasons_filename, 'r',
encoding='utf-8') as fp_reas:
reasons_str = fp_reas.read()
except OSError:
print('WARN: Failed to raed blocking reasons ' +
blocking_reasons_filename)
if not reasons_str:
return ''
reasons_lines = reasons_str.split('\n')
for line in reasons_lines:
if line.startswith(search_text):
if ' ' in line:
return line.split(' ', 1)[1]
return ''
def get_account_blocks(base_dir: str,
nickname: str, domain: str) -> str:
"""Returne the text for the textarea for "blocked accounts"
when editing profile
"""
account_directory = acct_dir(base_dir, nickname, domain)
blocking_filename = \
account_directory + '/blocking.txt'
blocking_reasons_filename = \
account_directory + '/blocking_reasons.txt'
if not os.path.isfile(blocking_filename):
return ''
blocked_accounts_textarea = ''
blocking_file_text = ''
try:
with open(blocking_filename, 'r', encoding='utf-8') as fp_block:
blocking_file_text = fp_block.read()
except OSError:
print('EX: Failed to read ' + blocking_filename)
return ''
blocklist = blocking_file_text.split('\n')
for handle in blocklist:
handle = handle.strip()
2022-11-23 19:00:59 +00:00
if not handle:
continue
2022-11-23 18:40:45 +00:00
reason = \
get_global_block_reason(handle,
blocking_reasons_filename)
if reason:
blocked_accounts_textarea += \
handle + ' - ' + reason + '\n'
continue
blocked_accounts_textarea += handle + '\n'
return blocked_accounts_textarea
2023-07-05 11:16:23 +00:00
def blocked_timeline_json(actor: str, page_number: int, items_per_page: int,
base_dir: str,
nickname: str, domain: str) -> {}:
"""Returns blocked collection for an account
https://codeberg.org/fediverse/fep/src/branch/main/fep/c648/fep-c648.md
"""
blocked_accounts_textarea = \
get_account_blocks(base_dir, nickname, domain)
blocked_list = []
if blocked_accounts_textarea:
blocked_list = blocked_accounts_textarea.split('\n')
start_index = (page_number - 1) * items_per_page
if start_index >= len(blocked_list):
start_index = 0
2023-07-05 17:49:34 +00:00
last_page_number = (len(blocked_list) / items_per_page) + 1
2023-07-05 11:16:23 +00:00
result_json = {
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://purl.archive.org/socialweb/blocked"
],
2023-07-05 17:49:34 +00:00
"id": actor + '?page=' + str(page_number),
"first": actor + '?page=1',
"last": actor + '?page=' + str(last_page_number),
2023-07-05 11:16:23 +00:00
"type": "OrderedCollection",
"name": nickname + "'s Blocked Collection",
"orderedItems": []
}
index = start_index
for _ in range(items_per_page):
if index >= len(blocked_list):
break
block_handle = blocked_list[index]
block_reason = ''
if ' - ' in block_handle:
block_reason = block_handle.split(' - ')[1]
block_handle = block_handle.split(' - ')[0]
block_type = "Person"
if block_handle.startswith('*@'):
block_type = "Application"
block_handle = block_handle.split('*@', 1)[1]
block_json = {
"type": "Block",
"id": actor + '/' + str(index),
"object": {
"type": block_type,
"id": block_handle
}
}
if block_reason:
block_json["object"]["name"] = block_reason
result_json["orderedItems"].append(block_json)
index += 1
return result_json
def add_account_blocks(base_dir: str,
nickname: str, domain: str,
blocked_accounts_textarea: str) -> bool:
"""Update the blockfile for an account after editing their
profile and changing "blocked accounts"
"""
if blocked_accounts_textarea is None:
return False
blocklist = blocked_accounts_textarea.split('\n')
blocking_file_text = ''
blocking_reasons_file_text = ''
for line in blocklist:
line = line.strip()
reason = None
2022-11-23 18:40:45 +00:00
if ' - ' in line:
block_id = line.split(' - ', 1)[0]
reason = line.split(' - ', 1)[1]
blocking_reasons_file_text += block_id + ' ' + reason + '\n'
elif ' ' in line:
block_id = line.split(' ', 1)[0]
reason = line.split(' ', 1)[1]
blocking_reasons_file_text += block_id + ' ' + reason + '\n'
else:
block_id = line
blocking_file_text += block_id + '\n'
account_directory = acct_dir(base_dir, nickname, domain)
blocking_filename = \
account_directory + '/blocking.txt'
blocking_reasons_filename = \
account_directory + '/blocking_reasons.txt'
if not blocking_file_text:
if os.path.isfile(blocking_filename):
try:
os.remove(blocking_filename)
except OSError:
print('EX: _profile_edit unable to delete blocking ' +
blocking_filename)
if os.path.isfile(blocking_reasons_filename):
try:
os.remove(blocking_reasons_filename)
except OSError:
print('EX: _profile_edit unable to delete blocking reasons' +
blocking_reasons_filename)
return True
try:
with open(blocking_filename, 'w+', encoding='utf-8') as fp_block:
fp_block.write(blocking_file_text)
except OSError:
print('EX: Failed to write ' + blocking_filename)
try:
with open(blocking_reasons_filename, 'w+',
encoding='utf-8') as fp_block:
fp_block.write(blocking_reasons_file_text)
except OSError:
print('EX: Failed to write ' + blocking_reasons_filename)
return True
2022-11-23 13:57:38 +00:00
def _add_global_block_reason(base_dir: str,
block_nickname: str, block_domain: str,
reason: str) -> bool:
"""Store a global block reason
"""
if not reason:
return False
blocking_reasons_filename = \
base_dir + '/accounts/blocking_reasons.txt'
if not block_nickname.startswith('#'):
# is the handle already blocked?
block_id = block_nickname + '@' + block_domain
else:
block_id = block_nickname
reason = reason.replace('\n', '').strip()
reason_line = block_id + ' ' + reason + '\n'
if os.path.isfile(blocking_reasons_filename):
if not text_in_file(block_id,
blocking_reasons_filename):
try:
with open(blocking_reasons_filename, 'a+',
encoding='utf-8') as reas_file:
reas_file.write(reason_line)
except OSError:
print('EX: unable to add blocking reason ' +
block_id)
else:
reasons_str = ''
try:
with open(blocking_reasons_filename, 'r',
encoding='utf-8') as reas_file:
reasons_str = reas_file.read()
except OSError:
print('EX: unable to read blocking reasons')
reasons_lines = reasons_str.split('\n')
new_reasons_str = ''
for line in reasons_lines:
if not line.startswith(block_id + ' '):
new_reasons_str += line + '\n'
continue
2022-11-23 14:32:11 +00:00
new_reasons_str += reason_line
2022-11-23 13:57:38 +00:00
try:
with open(blocking_reasons_filename, 'w+',
encoding='utf-8') as reas_file:
reas_file.write(new_reasons_str)
except OSError:
print('EX: unable to save blocking reasons' +
blocking_reasons_filename)
else:
try:
with open(blocking_reasons_filename, 'w+',
encoding='utf-8') as reas_file:
reas_file.write(reason_line)
except OSError:
print('EX: unable to save blocking reason ' +
block_id + ' ' + blocking_reasons_filename)
2021-12-28 21:55:38 +00:00
def add_global_block(base_dir: str,
2022-11-23 13:57:38 +00:00
block_nickname: str, block_domain: str,
reason: str) -> bool:
"""Global block which applies to all accounts
"""
2022-11-23 13:57:38 +00:00
_add_global_block_reason(base_dir,
block_nickname, block_domain,
reason)
2021-12-30 10:16:57 +00:00
blocking_filename = base_dir + '/accounts/blocking.txt'
if not block_nickname.startswith('#'):
2020-09-05 09:41:09 +00:00
# is the handle already blocked?
2021-12-30 10:16:57 +00:00
block_handle = block_nickname + '@' + block_domain
if os.path.isfile(blocking_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(block_handle, blocking_filename):
2019-08-14 10:32:15 +00:00
return False
2020-09-05 09:41:09 +00:00
# block an account handle or domain
2021-11-25 17:01:01 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(blocking_filename, 'a+', encoding='utf-8') as block_file:
2021-12-30 10:16:57 +00:00
block_file.write(block_handle + '\n')
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to save blocked handle ' + block_handle)
2021-11-25 17:01:01 +00:00
return False
2019-08-14 10:32:15 +00:00
else:
2021-12-30 10:16:57 +00:00
block_hashtag = block_nickname
2020-09-05 09:41:09 +00:00
# is the hashtag already blocked?
2021-12-30 10:16:57 +00:00
if os.path.isfile(blocking_filename):
if text_in_file(block_hashtag + '\n', blocking_filename):
2019-08-14 10:32:15 +00:00
return False
2020-09-05 09:41:09 +00:00
# block a hashtag
2021-11-25 17:01:01 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(blocking_filename, 'a+', encoding='utf-8') as block_file:
2021-12-30 10:16:57 +00:00
block_file.write(block_hashtag + '\n')
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to save blocked hashtag ' + block_hashtag)
2021-11-25 17:01:01 +00:00
return False
return True
2020-04-01 20:06:27 +00:00
def _add_block_reason(base_dir: str,
nickname: str, domain: str,
block_nickname: str, block_domain: str,
reason: str) -> bool:
2023-09-03 19:18:03 +00:00
"""Store an account level block reason
"""
if not reason:
return False
domain = remove_domain_port(domain)
blocking_reasons_filename = \
2023-09-03 19:07:22 +00:00
acct_dir(base_dir, nickname, domain) + '/blocking_reasons.txt'
if not block_nickname.startswith('#'):
# is the handle already blocked?
block_id = block_nickname + '@' + block_domain
else:
block_id = block_nickname
reason = reason.replace('\n', '').strip()
reason_line = block_id + ' ' + reason + '\n'
if os.path.isfile(blocking_reasons_filename):
if not text_in_file(block_id,
blocking_reasons_filename):
try:
with open(blocking_reasons_filename, 'a+',
encoding='utf-8') as reas_file:
reas_file.write(reason_line)
except OSError:
print('EX: unable to add blocking reason 2 ' +
block_id)
else:
reasons_str = ''
try:
with open(blocking_reasons_filename, 'r',
encoding='utf-8') as reas_file:
reasons_str = reas_file.read()
except OSError:
print('EX: unable to read blocking reasons 2')
reasons_lines = reasons_str.split('\n')
new_reasons_str = ''
for line in reasons_lines:
if not line.startswith(block_id + ' '):
new_reasons_str += line + '\n'
continue
new_reasons_str += reason_line
try:
with open(blocking_reasons_filename, 'w+',
encoding='utf-8') as reas_file:
reas_file.write(new_reasons_str)
except OSError:
print('EX: unable to save blocking reasons 2' +
blocking_reasons_filename)
else:
try:
with open(blocking_reasons_filename, 'w+',
encoding='utf-8') as reas_file:
reas_file.write(reason_line)
except OSError:
print('EX: unable to save blocking reason 2 ' +
block_id + ' ' + blocking_reasons_filename)
2021-12-28 21:55:38 +00:00
def add_block(base_dir: str, nickname: str, domain: str,
block_nickname: str, block_domain: str,
reason: str) -> bool:
2019-07-14 19:27:13 +00:00
"""Block the given account
"""
2021-12-30 10:16:57 +00:00
if block_domain.startswith(domain) and nickname == block_nickname:
2021-08-11 21:21:56 +00:00
# don't block self
return False
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-30 10:16:57 +00:00
blocking_filename = acct_dir(base_dir, nickname, domain) + '/blocking.txt'
block_handle = block_nickname + '@' + block_domain
if os.path.isfile(blocking_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(block_handle + '\n', blocking_filename):
2019-07-14 19:57:05 +00:00
return False
2021-08-11 21:00:01 +00:00
# if we are following then unfollow
2021-12-30 10:16:57 +00:00
following_filename = \
acct_dir(base_dir, nickname, domain) + '/following.txt'
if os.path.isfile(following_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(block_handle + '\n', following_filename):
2021-12-30 10:16:57 +00:00
following_str = ''
2021-11-25 17:01:01 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(following_filename, 'r',
encoding='utf-8') as foll_file:
2021-12-30 10:16:57 +00:00
following_str = foll_file.read()
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: Unable to read following ' + following_filename)
2021-11-25 17:01:01 +00:00
return False
2021-12-30 10:16:57 +00:00
if following_str:
following_str = following_str.replace(block_handle + '\n', '')
2021-11-26 12:28:20 +00:00
2021-11-25 17:01:01 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(following_filename, 'w+',
encoding='utf-8') as foll_file:
2021-12-30 10:16:57 +00:00
foll_file.write(following_str)
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: Unable to write following ' + following_str)
2021-11-25 17:01:01 +00:00
return False
2021-08-11 21:00:01 +00:00
# if they are a follower then remove them
2021-12-30 10:16:57 +00:00
followers_filename = \
acct_dir(base_dir, nickname, domain) + '/followers.txt'
if os.path.isfile(followers_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(block_handle + '\n', followers_filename):
2021-12-30 10:16:57 +00:00
followers_str = ''
2021-11-25 17:01:01 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(followers_filename, 'r',
encoding='utf-8') as foll_file:
2021-12-30 10:16:57 +00:00
followers_str = foll_file.read()
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: Unable to read followers ' + followers_filename)
2021-11-25 17:01:01 +00:00
return False
2021-12-30 10:16:57 +00:00
if followers_str:
followers_str = followers_str.replace(block_handle + '\n', '')
2021-11-26 12:28:20 +00:00
2021-11-25 17:01:01 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(followers_filename, 'w+',
encoding='utf-8') as foll_file:
2021-12-30 10:16:57 +00:00
foll_file.write(followers_str)
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: Unable to write followers ' + followers_str)
2021-11-25 17:01:01 +00:00
return False
try:
2022-06-09 14:46:30 +00:00
with open(blocking_filename, 'a+', encoding='utf-8') as block_file:
2021-12-30 10:16:57 +00:00
block_file.write(block_handle + '\n')
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to append block handle ' + block_handle)
2021-11-25 17:01:01 +00:00
return False
if reason:
_add_block_reason(base_dir, nickname, domain,
block_nickname, block_domain, reason)
2019-07-14 19:57:05 +00:00
return True
2019-07-14 19:27:13 +00:00
2020-04-01 20:06:27 +00:00
2022-11-23 14:32:11 +00:00
def _remove_global_block_reason(base_dir: str,
unblock_nickname: str,
unblock_domain: str) -> bool:
"""Remove a globla block reason
"""
unblocking_filename = base_dir + '/accounts/blocking_reasons.txt'
if not os.path.isfile(unblocking_filename):
return False
if not unblock_nickname.startswith('#'):
unblock_id = unblock_nickname + '@' + unblock_domain
else:
unblock_id = unblock_nickname
if not text_in_file(unblock_id + ' ', unblocking_filename):
return False
reasons_str = ''
try:
with open(unblocking_filename, 'r',
encoding='utf-8') as reas_file:
reasons_str = reas_file.read()
except OSError:
print('EX: unable to read blocking reasons 2')
reasons_lines = reasons_str.split('\n')
new_reasons_str = ''
for line in reasons_lines:
if line.startswith(unblock_id + ' '):
continue
new_reasons_str += line + '\n'
try:
with open(unblocking_filename, 'w+',
encoding='utf-8') as reas_file:
reas_file.write(new_reasons_str)
except OSError:
print('EX: unable to save blocking reasons 2' +
unblocking_filename)
2021-12-28 21:55:38 +00:00
def remove_global_block(base_dir: str,
2021-12-30 10:16:57 +00:00
unblock_nickname: str,
unblock_domain: str) -> bool:
"""Unblock the given global block
"""
2022-11-23 14:32:11 +00:00
_remove_global_block_reason(base_dir,
unblock_nickname,
unblock_domain)
2021-12-30 10:16:57 +00:00
unblocking_filename = base_dir + '/accounts/blocking.txt'
if not unblock_nickname.startswith('#'):
unblock_handle = unblock_nickname + '@' + unblock_domain
if os.path.isfile(unblocking_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(unblock_handle, unblocking_filename):
2021-11-26 12:28:20 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(unblocking_filename, 'r',
encoding='utf-8') as fp_unblock:
with open(unblocking_filename + '.new', 'w+',
encoding='utf-8') as fpnew:
2021-12-30 10:16:57 +00:00
for line in fp_unblock:
2022-06-21 11:58:50 +00:00
handle = remove_eol(line)
2021-12-30 10:16:57 +00:00
if unblock_handle not in line:
2021-11-25 18:42:38 +00:00
fpnew.write(handle + '\n')
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-11-26 12:28:20 +00:00
print('EX: failed to remove global block ' +
2021-12-30 10:16:57 +00:00
unblocking_filename + ' ' + str(ex))
2021-11-26 12:28:20 +00:00
return False
2021-12-30 10:16:57 +00:00
if os.path.isfile(unblocking_filename + '.new'):
2021-11-26 12:28:20 +00:00
try:
2021-12-30 10:16:57 +00:00
os.rename(unblocking_filename + '.new',
unblocking_filename)
2021-11-26 12:28:20 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to rename ' + unblocking_filename)
2021-11-26 12:28:20 +00:00
return False
2019-08-14 10:32:15 +00:00
return True
else:
2021-12-30 10:16:57 +00:00
unblock_hashtag = unblock_nickname
if os.path.isfile(unblocking_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(unblock_hashtag + '\n', unblocking_filename):
2021-11-26 12:28:20 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(unblocking_filename, 'r',
encoding='utf-8') as fp_unblock:
with open(unblocking_filename + '.new', 'w+',
encoding='utf-8') as fpnew:
2021-12-30 10:16:57 +00:00
for line in fp_unblock:
2022-06-21 11:58:50 +00:00
block_line = remove_eol(line)
2021-12-30 10:16:57 +00:00
if unblock_hashtag not in line:
fpnew.write(block_line + '\n')
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-11-26 12:28:20 +00:00
print('EX: failed to remove global hashtag block ' +
2021-12-30 10:16:57 +00:00
unblocking_filename + ' ' + str(ex))
2021-11-26 12:28:20 +00:00
return False
2021-12-30 10:16:57 +00:00
if os.path.isfile(unblocking_filename + '.new'):
2021-11-26 12:28:20 +00:00
try:
2021-12-30 10:16:57 +00:00
os.rename(unblocking_filename + '.new',
unblocking_filename)
2021-11-26 12:28:20 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to rename 2 ' + unblocking_filename)
2021-11-26 12:28:20 +00:00
return False
2019-08-14 10:32:15 +00:00
return True
return False
2020-04-01 20:06:27 +00:00
2021-12-28 21:55:38 +00:00
def remove_block(base_dir: str, nickname: str, domain: str,
2021-12-30 10:16:57 +00:00
unblock_nickname: str, unblock_domain: str) -> bool:
2019-07-14 19:27:13 +00:00
"""Unblock the given account
"""
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-30 10:16:57 +00:00
unblocking_filename = \
acct_dir(base_dir, nickname, domain) + '/blocking.txt'
unblock_handle = unblock_nickname + '@' + unblock_domain
if os.path.isfile(unblocking_filename):
2022-06-10 09:24:11 +00:00
if text_in_file(unblock_handle, unblocking_filename):
2021-11-26 12:28:20 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(unblocking_filename, 'r',
encoding='utf-8') as fp_unblock:
with open(unblocking_filename + '.new', 'w+',
encoding='utf-8') as fpnew:
2021-12-30 10:16:57 +00:00
for line in fp_unblock:
2022-06-21 11:58:50 +00:00
handle = remove_eol(line)
2021-12-30 10:16:57 +00:00
if unblock_handle not in line:
2021-11-25 18:42:38 +00:00
fpnew.write(handle + '\n')
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-11-26 12:28:20 +00:00
print('EX: failed to remove block ' +
2021-12-30 10:16:57 +00:00
unblocking_filename + ' ' + str(ex))
2021-11-26 12:28:20 +00:00
return False
2021-12-30 10:16:57 +00:00
if os.path.isfile(unblocking_filename + '.new'):
2021-11-26 12:28:20 +00:00
try:
2021-12-30 10:16:57 +00:00
os.rename(unblocking_filename + '.new',
unblocking_filename)
2021-11-26 12:28:20 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to rename 3 ' + unblocking_filename)
2021-11-26 12:28:20 +00:00
return False
2019-07-14 19:57:05 +00:00
return True
return False
2019-08-14 10:32:15 +00:00
2020-04-01 20:06:27 +00:00
2021-12-28 21:55:38 +00:00
def is_blocked_hashtag(base_dir: str, hashtag: str) -> bool:
2019-08-14 10:32:15 +00:00
"""Is the given hashtag blocked?
"""
2020-08-07 20:40:53 +00:00
# avoid very long hashtags
if len(hashtag) > 32:
return True
2021-12-30 10:16:57 +00:00
global_blocking_filename = base_dir + '/accounts/blocking.txt'
if os.path.isfile(global_blocking_filename):
2020-05-22 11:32:38 +00:00
hashtag = hashtag.strip('\n').strip('\r')
if not hashtag.startswith('#'):
hashtag = '#' + hashtag
2022-06-10 09:24:11 +00:00
if text_in_file(hashtag + '\n', global_blocking_filename):
2019-08-14 10:32:15 +00:00
return True
return False
2020-04-01 20:06:27 +00:00
2021-12-28 21:55:38 +00:00
def get_domain_blocklist(base_dir: str) -> str:
2020-03-28 10:33:04 +00:00
"""Returns all globally blocked domains as a string
This can be used for fast matching to mitigate flooding
"""
2021-12-30 10:16:57 +00:00
blocked_str = ''
2020-03-28 10:33:04 +00:00
2021-12-30 10:16:57 +00:00
evil_domains = evil_incarnate()
for evil in evil_domains:
blocked_str += evil + '\n'
2020-03-28 10:33:04 +00:00
2021-12-30 10:16:57 +00:00
global_blocking_filename = base_dir + '/accounts/blocking.txt'
if not os.path.isfile(global_blocking_filename):
return blocked_str
2021-11-26 12:28:20 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(global_blocking_filename, 'r',
encoding='utf-8') as fp_blocked:
2021-12-30 10:16:57 +00:00
blocked_str += fp_blocked.read()
2021-11-26 12:28:20 +00:00
except OSError:
2021-12-30 10:16:57 +00:00
print('EX: unable to read ' + global_blocking_filename)
return blocked_str
2020-03-28 10:33:04 +00:00
2020-04-01 20:06:27 +00:00
2021-12-28 21:55:38 +00:00
def update_blocked_cache(base_dir: str,
2021-12-30 10:16:57 +00:00
blocked_cache: [],
blocked_cache_last_updated: int,
blocked_cache_update_secs: int) -> int:
2021-06-21 09:22:24 +00:00
"""Updates the cache of globally blocked domains held in memory
"""
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
2021-12-30 10:16:57 +00:00
if blocked_cache_last_updated > curr_time:
2021-06-21 09:40:43 +00:00
print('WARN: Cache updated in the future')
2021-12-30 10:16:57 +00:00
blocked_cache_last_updated = 0
seconds_since_last_update = curr_time - blocked_cache_last_updated
if seconds_since_last_update < blocked_cache_update_secs:
return blocked_cache_last_updated
global_blocking_filename = base_dir + '/accounts/blocking.txt'
if not os.path.isfile(global_blocking_filename):
return blocked_cache_last_updated
2021-11-26 12:28:20 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(global_blocking_filename, 'r',
encoding='utf-8') as fp_blocked:
2021-12-30 10:16:57 +00:00
blocked_lines = fp_blocked.readlines()
2021-11-26 12:28:20 +00:00
# remove newlines
2022-01-08 10:58:54 +00:00
for index, _ in enumerate(blocked_lines):
2022-06-21 11:58:50 +00:00
blocked_lines[index] = remove_eol(blocked_lines[index])
2021-11-26 12:28:20 +00:00
# update the cache
2021-12-30 10:16:57 +00:00
blocked_cache.clear()
blocked_cache += blocked_lines
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-12-30 10:16:57 +00:00
print('EX: unable to read ' + global_blocking_filename + ' ' + str(ex))
2021-12-26 13:17:46 +00:00
return curr_time
2021-06-21 09:22:24 +00:00
2021-12-29 21:55:09 +00:00
def _get_short_domain(domain: str) -> str:
""" by checking a shorter version we can thwart adversaries
who constantly change their subdomain
e.g. subdomain123.mydomain.com becomes mydomain.com
"""
sections = domain.split('.')
2021-12-30 10:16:57 +00:00
no_of_sections = len(sections)
if no_of_sections > 2:
return sections[no_of_sections-2] + '.' + sections[-1]
return None
2021-12-28 21:55:38 +00:00
def is_blocked_domain(base_dir: str, domain: str,
2021-12-30 10:16:57 +00:00
blocked_cache: [] = None) -> bool:
"""Is the given domain blocked?
"""
2020-10-29 10:36:38 +00:00
if '.' not in domain:
return False
2021-12-27 17:49:35 +00:00
if is_evil(domain):
return True
2020-10-29 10:36:38 +00:00
2021-12-30 10:16:57 +00:00
short_domain = _get_short_domain(domain)
2020-10-29 10:36:38 +00:00
2023-04-29 09:54:29 +00:00
search_str = '*@' + domain
2021-12-28 21:55:38 +00:00
if not broch_mode_is_active(base_dir):
2021-12-30 10:16:57 +00:00
if blocked_cache:
for blocked_str in blocked_cache:
2023-04-29 09:54:29 +00:00
if blocked_str == search_str:
2020-10-29 10:36:38 +00:00
return True
2021-12-30 10:16:57 +00:00
if short_domain:
2022-03-31 16:14:19 +00:00
if blocked_str == '*@' + short_domain:
2021-02-15 21:14:05 +00:00
return True
2021-06-21 09:22:24 +00:00
else:
# instance block list
2021-12-30 10:16:57 +00:00
global_blocking_filename = base_dir + '/accounts/blocking.txt'
if os.path.isfile(global_blocking_filename):
2023-04-29 09:54:29 +00:00
search_str += '\n'
search_str_short = None
if short_domain:
search_str_short = '*@' + short_domain + '\n'
2021-11-26 12:28:20 +00:00
try:
2022-06-09 16:05:42 +00:00
with open(global_blocking_filename, 'r',
encoding='utf-8') as fp_blocked:
2021-12-30 10:16:57 +00:00
blocked_str = fp_blocked.read()
2023-04-29 09:54:29 +00:00
if search_str in blocked_str:
2021-06-21 09:22:24 +00:00
return True
2021-12-30 10:16:57 +00:00
if short_domain:
2023-04-29 09:54:29 +00:00
if search_str_short in blocked_str:
2021-11-26 12:28:20 +00:00
return True
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-12-30 10:16:57 +00:00
print('EX: unable to read ' + global_blocking_filename +
2021-12-25 15:28:52 +00:00
' ' + str(ex))
2021-02-15 21:14:05 +00:00
else:
2021-12-30 10:16:57 +00:00
allow_filename = base_dir + '/accounts/allowedinstances.txt'
2021-02-15 21:14:05 +00:00
# instance allow list
2021-12-30 10:16:57 +00:00
if not short_domain:
2022-06-10 09:24:11 +00:00
if not text_in_file(domain, allow_filename):
2021-02-15 21:14:05 +00:00
return True
else:
2022-06-10 09:24:11 +00:00
if not text_in_file(short_domain, allow_filename):
2021-02-15 21:14:05 +00:00
return True
return False
2020-04-01 20:06:27 +00:00
2023-04-29 09:54:29 +00:00
def is_blocked_nickname(base_dir: str, nickname: str,
blocked_cache: [] = None) -> bool:
"""Is the given nickname blocked?
"""
search_str = nickname + '@*'
if blocked_cache:
for blocked_str in blocked_cache:
if blocked_str == search_str:
return True
else:
# instance-wide block list
global_blocking_filename = base_dir + '/accounts/blocking.txt'
if os.path.isfile(global_blocking_filename):
search_str += '\n'
try:
with open(global_blocking_filename, 'r',
encoding='utf-8') as fp_blocked:
blocked_str = fp_blocked.read()
if search_str in blocked_str:
return True
except OSError as ex:
print('EX: unable to read ' + global_blocking_filename +
' ' + str(ex))
return False
2021-12-29 21:55:09 +00:00
def is_blocked(base_dir: str, nickname: str, domain: str,
2021-12-30 10:16:57 +00:00
block_nickname: str, block_domain: str,
blocked_cache: [] = None) -> bool:
2023-04-29 11:15:07 +00:00
"""Is the given account blocked?
2019-07-14 19:27:13 +00:00
"""
2021-12-30 10:16:57 +00:00
if is_evil(block_domain):
2019-09-09 15:53:23 +00:00
return True
2021-12-30 10:16:57 +00:00
block_handle = None
if block_nickname and block_domain:
block_handle = block_nickname + '@' + block_domain
2021-12-28 21:55:38 +00:00
if not broch_mode_is_active(base_dir):
# instance level block list
2021-12-30 10:16:57 +00:00
if blocked_cache:
for blocked_str in blocked_cache:
2023-04-30 09:38:20 +00:00
if block_nickname:
if block_nickname + '@*' in blocked_str:
return True
2023-04-30 09:46:57 +00:00
if block_domain:
if '*@' + block_domain in blocked_str:
return True
2021-12-30 10:16:57 +00:00
if block_handle:
2022-03-31 16:33:21 +00:00
if blocked_str == block_handle:
return True
else:
2021-12-30 10:16:57 +00:00
global_blocks_filename = base_dir + '/accounts/blocking.txt'
if os.path.isfile(global_blocks_filename):
2023-04-30 09:38:20 +00:00
if block_nickname:
if text_in_file(block_nickname + '@*\n',
global_blocks_filename):
return True
2022-06-10 09:24:11 +00:00
if text_in_file('*@' + block_domain, global_blocks_filename):
return True
2021-12-30 10:16:57 +00:00
if block_handle:
2022-03-31 16:33:21 +00:00
block_str = block_handle + '\n'
2022-06-10 09:24:11 +00:00
if text_in_file(block_str, global_blocks_filename):
return True
else:
# instance allow list
2021-12-30 10:16:57 +00:00
allow_filename = base_dir + '/accounts/allowedinstances.txt'
short_domain = _get_short_domain(block_domain)
2023-04-30 09:46:57 +00:00
if not short_domain and block_domain:
2022-06-10 09:24:11 +00:00
if not text_in_file(block_domain + '\n', allow_filename):
return True
else:
2022-06-10 09:24:11 +00:00
if not text_in_file(short_domain + '\n', allow_filename):
return True
# account level allow list
2021-12-30 10:16:57 +00:00
account_dir = acct_dir(base_dir, nickname, domain)
allow_filename = account_dir + '/allowedinstances.txt'
2023-04-30 09:46:57 +00:00
if block_domain and os.path.isfile(allow_filename):
2022-06-10 09:24:11 +00:00
if not text_in_file(block_domain + '\n', allow_filename):
return True
# account level block list
2021-12-30 10:16:57 +00:00
blocking_filename = account_dir + '/blocking.txt'
if os.path.isfile(blocking_filename):
2023-04-29 20:39:32 +00:00
if block_nickname:
if text_in_file(block_nickname + '@*\n', blocking_filename):
return True
2023-04-30 09:46:57 +00:00
if block_domain:
if text_in_file('*@' + block_domain + '\n', blocking_filename):
return True
2021-12-30 10:16:57 +00:00
if block_handle:
2022-06-10 09:24:11 +00:00
if text_in_file(block_handle + '\n', blocking_filename):
2020-02-05 17:39:41 +00:00
return True
2019-07-14 19:27:13 +00:00
return False
2020-04-01 20:06:27 +00:00
def allowed_announce(base_dir: str, nickname: str, domain: str,
block_nickname: str, block_domain: str,
announce_blocked_cache: [] = None) -> bool:
"""Is the given nickname allowed to send announces?
"""
block_handle = None
if block_nickname and block_domain:
block_handle = block_nickname + '@' + block_domain
# cached announce blocks
if announce_blocked_cache:
for blocked_str in announce_blocked_cache:
2023-04-30 09:52:29 +00:00
if block_nickname:
if block_nickname + '@*' in blocked_str:
return False
if block_domain:
if '*@' + block_domain in blocked_str:
return False
if block_handle:
if blocked_str == block_handle:
return False
# non-cached instance level announce blocks
global_announce_blocks_filename = \
base_dir + '/accounts/noannounce.txt'
if os.path.isfile(global_announce_blocks_filename):
2023-04-30 09:52:29 +00:00
if block_nickname:
if text_in_file(block_nickname + '@*',
global_announce_blocks_filename, False):
return False
if block_domain:
if text_in_file('*@' + block_domain,
global_announce_blocks_filename, False):
return False
if block_handle:
block_str = block_handle + '\n'
if text_in_file(block_str,
2022-12-30 21:07:19 +00:00
global_announce_blocks_filename, False):
return False
# non-cached account level announce blocks
account_dir = acct_dir(base_dir, nickname, domain)
blocking_filename = account_dir + '/noannounce.txt'
if os.path.isfile(blocking_filename):
2023-04-30 09:52:29 +00:00
if block_nickname:
if text_in_file(block_nickname + '@*\n',
blocking_filename, False):
return False
if block_domain:
if text_in_file('*@' + block_domain + '\n',
blocking_filename, False):
return False
if block_handle:
2022-12-30 21:07:19 +00:00
if text_in_file(block_handle + '\n', blocking_filename, False):
return False
return True
2022-11-08 15:43:26 +00:00
def allowed_announce_add(base_dir: str, nickname: str, domain: str,
following_nickname: str,
following_domain: str) -> None:
"""Allow announces for a handle
"""
account_dir = acct_dir(base_dir, nickname, domain)
blocking_filename = account_dir + '/noannounce.txt'
2023-03-17 10:18:17 +00:00
# if the noannounce.txt file doesn't yet exist
if not os.path.isfile(blocking_filename):
return
2022-11-08 15:43:26 +00:00
handle = following_nickname + '@' + following_domain
2022-12-30 21:07:19 +00:00
if text_in_file(handle + '\n', blocking_filename, False):
2022-11-08 15:43:26 +00:00
file_text = ''
try:
with open(blocking_filename, 'r',
encoding='utf-8') as fp_noannounce:
file_text = fp_noannounce.read()
except OSError:
print('EX: unable to read noannounce: ' +
blocking_filename + ' ' + handle)
2022-12-30 21:33:14 +00:00
new_file_text = ''
file_text_list = file_text.split('\n')
handle_lower = handle.lower()
for allowed in file_text_list:
if allowed.lower() != handle_lower:
new_file_text += allowed + '\n'
file_text = new_file_text
2022-11-08 15:43:26 +00:00
try:
with open(blocking_filename, 'w+',
encoding='utf-8') as fp_noannounce:
fp_noannounce.write(file_text)
except OSError:
print('EX: unable to write noannounce: ' +
blocking_filename + ' ' + handle)
def allowed_announce_remove(base_dir: str, nickname: str, domain: str,
following_nickname: str,
following_domain: str) -> None:
"""Don't allow announces from a handle
"""
account_dir = acct_dir(base_dir, nickname, domain)
blocking_filename = account_dir + '/noannounce.txt'
handle = following_nickname + '@' + following_domain
2023-03-17 10:18:17 +00:00
# if the noannounce.txt file doesn't yet exist
if not os.path.isfile(blocking_filename):
file_text = handle + '\n'
try:
with open(blocking_filename, 'w+',
encoding='utf-8') as fp_noannounce:
fp_noannounce.write(file_text)
except OSError:
print('EX: unable to write initial noannounce: ' +
blocking_filename + ' ' + handle)
return
2022-11-08 15:43:26 +00:00
file_text = ''
2022-12-30 21:07:19 +00:00
if not text_in_file(handle + '\n', blocking_filename, False):
2022-11-08 15:43:26 +00:00
try:
with open(blocking_filename, 'r',
encoding='utf-8') as fp_noannounce:
file_text = fp_noannounce.read()
except OSError:
print('EX: unable to read noannounce: ' +
blocking_filename + ' ' + handle)
file_text += handle + '\n'
try:
with open(blocking_filename, 'w+',
encoding='utf-8') as fp_noannounce:
fp_noannounce.write(file_text)
except OSError:
print('EX: unable to write noannounce: ' +
blocking_filename + ' ' + handle)
2022-06-09 16:13:16 +00:00
def outbox_block(base_dir: str, nickname: str, domain: str,
2021-12-29 21:55:09 +00:00
message_json: {}, debug: bool) -> bool:
2019-07-17 21:40:56 +00:00
""" When a block request is received by the outbox from c2s
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: block - no type')
2021-08-11 21:10:26 +00:00
return False
2021-12-25 23:51:19 +00:00
if not message_json['type'] == 'Block':
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: not a block')
2021-08-11 21:10:26 +00:00
return False
2021-12-26 17:12:07 +00:00
if not has_object_string(message_json, debug):
2021-08-11 21:10:26 +00:00
return False
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s block request arrived in outbox')
2021-12-30 10:16:57 +00:00
message_id = remove_id_ending(message_json['object'])
if '/statuses/' not in message_id:
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s block object is not a status')
2021-08-11 21:10:26 +00:00
return False
2021-12-30 10:16:57 +00:00
if not has_users_path(message_id):
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s block object has no nickname')
2021-08-11 21:10:26 +00:00
return False
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-30 10:16:57 +00:00
post_filename = locate_post(base_dir, nickname, domain, message_id)
2021-12-26 23:41:34 +00:00
if not post_filename:
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s block post not found in inbox or outbox')
2021-12-30 10:16:57 +00:00
print(message_id)
2021-08-11 21:10:26 +00:00
return False
2021-12-30 10:16:57 +00:00
nickname_blocked = get_nickname_from_actor(message_json['object'])
if not nickname_blocked:
2021-12-25 23:51:19 +00:00
print('WARN: unable to find nickname in ' + message_json['object'])
2021-08-11 21:10:26 +00:00
return False
2021-12-30 10:16:57 +00:00
domain_blocked, port_blocked = \
get_domain_from_actor(message_json['object'])
2023-01-15 14:33:18 +00:00
if not domain_blocked:
print('WARN: unable to find domain in ' + message_json['object'])
return False
2021-12-30 10:16:57 +00:00
domain_blocked_full = get_full_domain(domain_blocked, port_blocked)
2019-07-17 21:40:56 +00:00
2021-12-28 21:55:38 +00:00
add_block(base_dir, nickname, domain,
nickname_blocked, domain_blocked_full, '')
2020-03-22 20:59:01 +00:00
2019-07-17 21:40:56 +00:00
if debug:
2021-12-26 23:41:34 +00:00
print('DEBUG: post blocked via c2s - ' + post_filename)
2021-08-11 21:10:26 +00:00
return True
2020-04-01 20:06:27 +00:00
2019-07-17 21:40:56 +00:00
def outbox_undo_block(base_dir: str, nickname: str, domain: str,
2021-12-29 21:55:09 +00:00
message_json: {}, debug: bool) -> None:
2019-07-17 21:40:56 +00:00
""" When an undo block request is received by the outbox from c2s
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: undo block - no type')
return
2021-12-25 23:51:19 +00:00
if not message_json['type'] == 'Undo':
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: not an undo block')
return
2022-04-09 15:11:22 +00:00
if not has_object_string_type(message_json, debug):
2019-07-17 21:40:56 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['object']['type'] == 'Block':
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: not an undo block')
return
2021-12-26 15:54:46 +00:00
if not has_object_string_object(message_json, debug):
2019-07-17 21:40:56 +00:00
return
if debug:
print('DEBUG: c2s undo block request arrived in outbox')
2021-12-30 10:16:57 +00:00
message_id = remove_id_ending(message_json['object']['object'])
if '/statuses/' not in message_id:
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s undo block object is not a status')
return
2021-12-30 10:16:57 +00:00
if not has_users_path(message_id):
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s undo block object has no nickname')
return
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-30 10:16:57 +00:00
post_filename = locate_post(base_dir, nickname, domain, message_id)
2021-12-26 23:41:34 +00:00
if not post_filename:
2019-07-17 21:40:56 +00:00
if debug:
print('DEBUG: c2s undo block post not found in inbox or outbox')
2021-12-30 10:16:57 +00:00
print(message_id)
2019-09-02 09:43:43 +00:00
return
2021-12-30 10:16:57 +00:00
nickname_blocked = \
get_nickname_from_actor(message_json['object']['object'])
if not nickname_blocked:
2020-04-01 20:06:27 +00:00
print('WARN: unable to find nickname in ' +
2021-12-25 23:51:19 +00:00
message_json['object']['object'])
2019-09-02 09:43:43 +00:00
return
2021-12-30 10:16:57 +00:00
domain_object = message_json['object']['object']
domain_blocked, port_blocked = get_domain_from_actor(domain_object)
2023-01-15 14:33:18 +00:00
if not domain_blocked:
print('WARN: unable to find domain in ' +
message_json['object']['object'])
return
2021-12-30 10:16:57 +00:00
domain_blocked_full = get_full_domain(domain_blocked, port_blocked)
2019-07-17 21:40:56 +00:00
2021-12-28 21:55:38 +00:00
remove_block(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
nickname_blocked, domain_blocked_full)
2019-07-17 21:40:56 +00:00
if debug:
2021-12-26 23:41:34 +00:00
print('DEBUG: post undo blocked via c2s - ' + post_filename)
2021-02-15 22:06:53 +00:00
2021-12-28 21:55:38 +00:00
def mute_post(base_dir: str, nickname: str, domain: str, port: int,
http_prefix: str, post_id: str, recent_posts_cache: {},
debug: bool) -> None:
2021-03-20 21:20:41 +00:00
""" Mutes the given post
"""
2021-12-28 21:55:38 +00:00
print('mute_post: post_id ' + post_id)
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain, post_id)
if not post_filename:
2021-12-28 21:55:38 +00:00
print('mute_post: file not found ' + post_id)
2021-03-20 21:20:41 +00:00
return
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2021-12-28 21:55:38 +00:00
print('mute_post: object not loaded ' + post_id)
2021-03-20 21:20:41 +00:00
return
2021-12-28 21:55:38 +00:00
print('mute_post: ' + str(post_json_object))
2021-03-20 21:20:41 +00:00
2021-12-30 10:16:57 +00:00
post_json_obj = post_json_object
also_update_post_id = None
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-30 10:16:57 +00:00
post_json_obj = post_json_object['object']
else:
2021-12-26 17:12:07 +00:00
if has_object_string(post_json_object, debug):
2021-12-30 10:16:57 +00:00
also_update_post_id = remove_id_ending(post_json_object['object'])
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:19:59 +00:00
actor = local_actor_url(http_prefix, nickname, domain_full)
2021-08-12 10:22:04 +00:00
2021-12-30 10:16:57 +00:00
if post_json_obj.get('conversation'):
2021-12-29 21:55:09 +00:00
mute_conversation(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
post_json_obj['conversation'])
elif post_json_obj.get('context'):
mute_conversation(base_dir, nickname, domain,
post_json_obj['context'])
2021-08-12 10:22:04 +00:00
2021-09-28 11:20:14 +00:00
# does this post have ignores on it from differenent actors?
2021-12-30 10:16:57 +00:00
if not post_json_obj.get('ignores'):
2021-09-28 11:20:14 +00:00
if debug:
2021-12-26 19:47:06 +00:00
print('DEBUG: Adding initial mute to ' + post_id)
2021-12-30 10:16:57 +00:00
ignores_json = {
2021-09-28 11:20:14 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
2021-12-26 19:47:06 +00:00
'id': post_id,
2021-09-28 11:20:14 +00:00
'type': 'Collection',
"totalItems": 1,
'items': [{
'type': 'Ignore',
'actor': actor
2021-09-28 11:20:14 +00:00
}]
}
2021-12-30 10:16:57 +00:00
post_json_obj['ignores'] = ignores_json
2021-09-28 11:20:14 +00:00
else:
2021-12-30 10:16:57 +00:00
if not post_json_obj['ignores'].get('items'):
post_json_obj['ignores']['items'] = []
items_list = post_json_obj['ignores']['items']
for ignores_item in items_list:
if ignores_item.get('actor'):
if ignores_item['actor'] == actor:
2021-09-28 11:20:14 +00:00
return
2021-12-30 10:16:57 +00:00
new_ignore = {
2021-09-28 11:20:14 +00:00
'type': 'Ignore',
'actor': actor
}
2021-12-30 10:16:57 +00:00
ig_it = len(items_list)
items_list.append(new_ignore)
post_json_obj['ignores']['totalItems'] = ig_it
post_json_obj['muted'] = True
2021-12-26 23:41:34 +00:00
if save_json(post_json_object, post_filename):
2021-12-28 21:55:38 +00:00
print('mute_post: saved ' + post_filename)
2021-03-20 21:20:41 +00:00
# remove cached post so that the muted version gets recreated
# without its content text and/or image
2021-12-30 10:16:57 +00:00
cached_post_filename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain, post_json_object)
2021-12-30 10:16:57 +00:00
if cached_post_filename:
if os.path.isfile(cached_post_filename):
try:
2021-12-30 10:16:57 +00:00
os.remove(cached_post_filename)
print('MUTE: cached post removed ' + cached_post_filename)
2021-11-25 17:01:01 +00:00
except OSError:
2021-10-29 16:31:20 +00:00
print('EX: MUTE cached post not removed ' +
2021-12-30 10:16:57 +00:00
cached_post_filename)
2021-09-28 11:52:25 +00:00
else:
2021-12-30 10:16:57 +00:00
print('MUTE: cached post not found ' + cached_post_filename)
2021-03-20 21:20:41 +00:00
2021-11-25 18:42:38 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(post_filename + '.muted', 'w+',
encoding='utf-8') as mute_file:
2021-12-30 10:16:57 +00:00
mute_file.write('\n')
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-26 23:41:34 +00:00
print('EX: Failed to save mute file ' + post_filename + '.muted')
2021-11-25 18:42:38 +00:00
return
2021-12-26 23:41:34 +00:00
print('MUTE: ' + post_filename + '.muted file added')
2021-03-20 21:20:41 +00:00
# if the post is in the recent posts cache then mark it as muted
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('index'):
2021-12-26 19:47:06 +00:00
post_id = \
2021-12-27 11:20:57 +00:00
remove_id_ending(post_json_object['id']).replace('/', '#')
2021-12-26 20:01:37 +00:00
if post_id in recent_posts_cache['index']:
2021-12-26 19:47:06 +00:00
print('MUTE: ' + post_id + ' is in recent posts cache')
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('json'):
recent_posts_cache['json'][post_id] = json.dumps(post_json_object)
2021-12-26 19:47:06 +00:00
print('MUTE: ' + post_id +
2021-09-28 13:24:57 +00:00
' marked as muted in recent posts memory cache')
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('html'):
if recent_posts_cache['html'].get(post_id):
del recent_posts_cache['html'][post_id]
2021-12-26 19:47:06 +00:00
print('MUTE: ' + post_id + ' removed cached html')
2021-09-28 15:48:14 +00:00
2021-12-30 10:16:57 +00:00
if also_update_post_id:
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
also_update_post_id)
2021-12-26 23:41:34 +00:00
if os.path.isfile(post_filename):
2021-12-30 10:16:57 +00:00
post_json_obj = load_json(post_filename)
cached_post_filename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
post_json_obj)
if cached_post_filename:
if os.path.isfile(cached_post_filename):
2021-09-28 16:32:54 +00:00
try:
2021-12-30 10:16:57 +00:00
os.remove(cached_post_filename)
2021-09-28 16:32:54 +00:00
print('MUTE: cached referenced post removed ' +
2021-12-30 10:16:57 +00:00
cached_post_filename)
2021-11-25 17:01:01 +00:00
except OSError:
2021-10-29 16:31:20 +00:00
print('EX: ' +
2021-10-29 14:48:24 +00:00
'MUTE cached referenced post not removed ' +
2021-12-30 10:16:57 +00:00
cached_post_filename)
2021-09-28 16:32:54 +00:00
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('json'):
2021-12-30 10:16:57 +00:00
if recent_posts_cache['json'].get(also_update_post_id):
del recent_posts_cache['json'][also_update_post_id]
print('MUTE: ' + also_update_post_id +
' removed referenced json')
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('html'):
2021-12-30 10:16:57 +00:00
if recent_posts_cache['html'].get(also_update_post_id):
del recent_posts_cache['html'][also_update_post_id]
print('MUTE: ' + also_update_post_id +
' removed referenced html')
2021-03-20 21:20:41 +00:00
2021-12-28 21:55:38 +00:00
def unmute_post(base_dir: str, nickname: str, domain: str, port: int,
http_prefix: str, post_id: str, recent_posts_cache: {},
debug: bool) -> None:
2021-03-20 21:20:41 +00:00
""" Unmutes the given post
"""
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain, post_id)
if not post_filename:
2021-03-20 21:20:41 +00:00
return
2021-12-26 23:41:34 +00:00
post_json_object = load_json(post_filename)
2021-12-25 22:09:19 +00:00
if not post_json_object:
2021-03-20 21:20:41 +00:00
return
2021-12-30 10:16:57 +00:00
mute_filename = post_filename + '.muted'
if os.path.isfile(mute_filename):
try:
2021-12-30 10:16:57 +00:00
os.remove(mute_filename)
2021-11-25 17:01:01 +00:00
except OSError:
2021-10-29 14:48:24 +00:00
if debug:
2021-12-28 21:55:38 +00:00
print('EX: unmute_post mute filename not deleted ' +
2021-12-30 10:16:57 +00:00
str(mute_filename))
print('UNMUTE: ' + mute_filename + ' file removed')
2021-03-20 21:20:41 +00:00
2021-12-30 10:16:57 +00:00
post_json_obj = post_json_object
also_update_post_id = None
2021-12-26 10:57:03 +00:00
if has_object_dict(post_json_object):
2021-12-30 10:16:57 +00:00
post_json_obj = post_json_object['object']
else:
2021-12-26 17:12:07 +00:00
if has_object_string(post_json_object, debug):
2021-12-30 10:16:57 +00:00
also_update_post_id = remove_id_ending(post_json_object['object'])
2021-12-30 10:16:57 +00:00
if post_json_obj.get('conversation'):
2021-12-29 21:55:09 +00:00
unmute_conversation(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
post_json_obj['conversation'])
elif post_json_obj.get('context'):
unmute_conversation(base_dir, nickname, domain,
post_json_obj['context'])
2021-09-28 11:20:14 +00:00
2021-12-30 10:16:57 +00:00
if post_json_obj.get('ignores'):
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:19:59 +00:00
actor = local_actor_url(http_prefix, nickname, domain_full)
2021-12-30 10:16:57 +00:00
total_items = 0
if post_json_obj['ignores'].get('totalItems'):
total_items = post_json_obj['ignores']['totalItems']
items_list = post_json_obj['ignores']['items']
for ignores_item in items_list:
if ignores_item.get('actor'):
if ignores_item['actor'] == actor:
2021-09-28 11:20:14 +00:00
if debug:
print('DEBUG: mute was removed for ' + actor)
2021-12-30 10:16:57 +00:00
items_list.remove(ignores_item)
2021-09-28 11:20:14 +00:00
break
2021-12-30 10:16:57 +00:00
if total_items == 1:
2021-09-28 11:20:14 +00:00
if debug:
print('DEBUG: mute was removed from post')
2021-12-30 10:16:57 +00:00
del post_json_obj['ignores']
2021-09-28 11:20:14 +00:00
else:
2021-12-30 10:16:57 +00:00
ig_it_len = len(post_json_obj['ignores']['items'])
post_json_obj['ignores']['totalItems'] = ig_it_len
post_json_obj['muted'] = False
2021-12-26 23:41:34 +00:00
save_json(post_json_object, post_filename)
2021-03-20 21:20:41 +00:00
# remove cached post so that the muted version gets recreated
# with its content text and/or image
2021-12-30 10:16:57 +00:00
cached_post_filename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain, post_json_object)
2021-12-30 10:16:57 +00:00
if cached_post_filename:
if os.path.isfile(cached_post_filename):
try:
2021-12-30 10:16:57 +00:00
os.remove(cached_post_filename)
2021-11-25 17:01:01 +00:00
except OSError:
2021-10-29 14:48:24 +00:00
if debug:
2021-12-28 21:55:38 +00:00
print('EX: unmute_post cached post not deleted ' +
2021-12-30 10:16:57 +00:00
str(cached_post_filename))
2021-03-20 21:20:41 +00:00
# if the post is in the recent posts cache then mark it as unmuted
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('index'):
2021-12-26 19:47:06 +00:00
post_id = \
2021-12-27 11:20:57 +00:00
remove_id_ending(post_json_object['id']).replace('/', '#')
2021-12-26 20:01:37 +00:00
if post_id in recent_posts_cache['index']:
2021-12-26 19:47:06 +00:00
print('UNMUTE: ' + post_id + ' is in recent posts cache')
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('json'):
recent_posts_cache['json'][post_id] = json.dumps(post_json_object)
2021-12-26 19:47:06 +00:00
print('UNMUTE: ' + post_id +
2021-09-28 13:24:57 +00:00
' marked as unmuted in recent posts cache')
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('html'):
if recent_posts_cache['html'].get(post_id):
del recent_posts_cache['html'][post_id]
2021-12-26 19:47:06 +00:00
print('UNMUTE: ' + post_id + ' removed cached html')
2021-12-30 10:16:57 +00:00
if also_update_post_id:
2021-12-26 23:41:34 +00:00
post_filename = locate_post(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
also_update_post_id)
2021-12-26 23:41:34 +00:00
if os.path.isfile(post_filename):
2021-12-30 10:16:57 +00:00
post_json_obj = load_json(post_filename)
cached_post_filename = \
2021-12-26 23:41:34 +00:00
get_cached_post_filename(base_dir, nickname, domain,
2021-12-30 10:16:57 +00:00
post_json_obj)
if cached_post_filename:
if os.path.isfile(cached_post_filename):
2021-09-28 16:32:54 +00:00
try:
2021-12-30 10:16:57 +00:00
os.remove(cached_post_filename)
2021-09-28 16:32:54 +00:00
print('MUTE: cached referenced post removed ' +
2021-12-30 10:16:57 +00:00
cached_post_filename)
2021-11-25 17:01:01 +00:00
except OSError:
2021-10-29 14:48:24 +00:00
if debug:
2021-10-29 16:31:20 +00:00
print('EX: ' +
2021-12-28 21:55:38 +00:00
'unmute_post cached ref post not removed ' +
2021-12-30 10:16:57 +00:00
str(cached_post_filename))
2021-09-28 16:32:54 +00:00
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('json'):
2021-12-30 10:16:57 +00:00
if recent_posts_cache['json'].get(also_update_post_id):
del recent_posts_cache['json'][also_update_post_id]
2021-09-28 15:48:14 +00:00
print('UNMUTE: ' +
2021-12-30 10:16:57 +00:00
also_update_post_id + ' removed referenced json')
2021-12-26 20:01:37 +00:00
if recent_posts_cache.get('html'):
2021-12-30 10:16:57 +00:00
if recent_posts_cache['html'].get(also_update_post_id):
del recent_posts_cache['html'][also_update_post_id]
2021-09-28 15:48:14 +00:00
print('UNMUTE: ' +
2021-12-30 10:16:57 +00:00
also_update_post_id + ' removed referenced html')
2021-03-20 21:20:41 +00:00
2021-12-29 21:55:09 +00:00
def outbox_mute(base_dir: str, http_prefix: str,
nickname: str, domain: str, port: int,
message_json: {}, debug: bool,
recent_posts_cache: {}) -> None:
2021-03-20 21:20:41 +00:00
"""When a mute is received by the outbox from c2s
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2021-03-20 21:20:41 +00:00
return
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2021-03-20 21:20:41 +00:00
return
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:00:46 +00:00
if not message_json['actor'].endswith(domain_full + '/users/' + nickname):
2021-03-20 21:20:41 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['type'] == 'Ignore':
2021-03-20 21:20:41 +00:00
return
2021-12-26 17:12:07 +00:00
if not has_object_string(message_json, debug):
2021-03-20 21:20:41 +00:00
return
if debug:
print('DEBUG: c2s mute request arrived in outbox')
2021-12-30 10:16:57 +00:00
message_id = remove_id_ending(message_json['object'])
if '/statuses/' not in message_id:
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: c2s mute object is not a status')
return
2021-12-30 10:16:57 +00:00
if not has_users_path(message_id):
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: c2s mute object has no nickname')
return
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-30 10:16:57 +00:00
post_filename = locate_post(base_dir, nickname, domain, message_id)
2021-12-26 23:41:34 +00:00
if not post_filename:
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: c2s mute post not found in inbox or outbox')
2021-12-30 10:16:57 +00:00
print(message_id)
2021-03-20 21:20:41 +00:00
return
2021-12-30 10:16:57 +00:00
nickname_muted = get_nickname_from_actor(message_json['object'])
if not nickname_muted:
2021-12-25 23:51:19 +00:00
print('WARN: unable to find nickname in ' + message_json['object'])
2021-03-20 21:20:41 +00:00
return
2021-12-28 21:55:38 +00:00
mute_post(base_dir, nickname, domain, port,
http_prefix, message_json['object'], recent_posts_cache,
debug)
2021-03-20 21:20:41 +00:00
if debug:
2021-12-26 23:41:34 +00:00
print('DEBUG: post muted via c2s - ' + post_filename)
2021-03-20 21:20:41 +00:00
2021-12-29 21:55:09 +00:00
def outbox_undo_mute(base_dir: str, http_prefix: str,
nickname: str, domain: str, port: int,
message_json: {}, debug: bool,
recent_posts_cache: {}) -> None:
2021-03-20 21:20:41 +00:00
"""When an undo mute is received by the outbox from c2s
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2021-03-20 21:20:41 +00:00
return
2021-12-26 17:15:04 +00:00
if not has_actor(message_json, debug):
2021-03-20 21:20:41 +00:00
return
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:00:46 +00:00
if not message_json['actor'].endswith(domain_full + '/users/' + nickname):
2021-03-20 21:20:41 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['type'] == 'Undo':
2021-03-20 21:20:41 +00:00
return
2022-04-09 15:11:22 +00:00
if not has_object_string_type(message_json, debug):
2021-03-20 21:20:41 +00:00
return
2021-12-25 23:51:19 +00:00
if message_json['object']['type'] != 'Ignore':
2021-03-20 21:20:41 +00:00
return
2021-12-25 23:51:19 +00:00
if not isinstance(message_json['object']['object'], str):
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: undo mute object is not a string')
return
if debug:
print('DEBUG: c2s undo mute request arrived in outbox')
2021-12-30 10:16:57 +00:00
message_id = remove_id_ending(message_json['object']['object'])
if '/statuses/' not in message_id:
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: c2s undo mute object is not a status')
return
2021-12-30 10:16:57 +00:00
if not has_users_path(message_id):
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: c2s undo mute object has no nickname')
return
2021-12-26 18:17:37 +00:00
domain = remove_domain_port(domain)
2021-12-30 10:16:57 +00:00
post_filename = locate_post(base_dir, nickname, domain, message_id)
2021-12-26 23:41:34 +00:00
if not post_filename:
2021-03-20 21:20:41 +00:00
if debug:
print('DEBUG: c2s undo mute post not found in inbox or outbox')
2021-12-30 10:16:57 +00:00
print(message_id)
2021-03-20 21:20:41 +00:00
return
2021-12-30 10:16:57 +00:00
nickname_muted = get_nickname_from_actor(message_json['object']['object'])
if not nickname_muted:
2021-03-20 21:20:41 +00:00
print('WARN: unable to find nickname in ' +
2021-12-25 23:51:19 +00:00
message_json['object']['object'])
2021-03-20 21:20:41 +00:00
return
2021-12-28 21:55:38 +00:00
unmute_post(base_dir, nickname, domain, port,
http_prefix, message_json['object']['object'],
recent_posts_cache, debug)
2021-03-20 21:20:41 +00:00
if debug:
2021-12-26 23:41:34 +00:00
print('DEBUG: post undo mute via c2s - ' + post_filename)
2021-03-20 21:20:41 +00:00
2021-12-28 21:55:38 +00:00
def broch_mode_is_active(base_dir: str) -> bool:
"""Returns true if broch mode is active
"""
2021-12-30 10:16:57 +00:00
allow_filename = base_dir + '/accounts/allowedinstances.txt'
return os.path.isfile(allow_filename)
2021-12-28 21:55:38 +00:00
def set_broch_mode(base_dir: str, domain_full: str, enabled: bool) -> None:
2021-02-15 22:06:53 +00:00
"""Broch mode can be used to lock down the instance during
a period of time when it is temporarily under attack.
For example, where an adversary is constantly spinning up new
instances.
It surveys the following lists of all accounts and uses that
to construct an instance level allow list. Anything arriving
which is then not from one of the allowed domains will be dropped
"""
2021-12-30 10:16:57 +00:00
allow_filename = base_dir + '/accounts/allowedinstances.txt'
2021-02-15 22:06:53 +00:00
if not enabled:
# remove instance allow list
2021-12-30 10:16:57 +00:00
if os.path.isfile(allow_filename):
try:
2021-12-30 10:16:57 +00:00
os.remove(allow_filename)
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-28 21:55:38 +00:00
print('EX: set_broch_mode allow file not deleted ' +
2021-12-30 10:16:57 +00:00
str(allow_filename))
2021-02-15 22:26:25 +00:00
print('Broch mode turned off')
2021-02-15 22:06:53 +00:00
else:
2021-12-30 10:16:57 +00:00
if os.path.isfile(allow_filename):
last_modified = file_last_modified(allow_filename)
print('Broch mode already activated ' + last_modified)
2021-02-16 09:50:50 +00:00
return
2021-02-15 22:06:53 +00:00
# generate instance allow list
2021-12-30 10:16:57 +00:00
allowed_domains = [domain_full]
2021-12-26 10:19:59 +00:00
follow_files = ('following.txt', 'followers.txt')
2022-05-30 12:45:47 +00:00
for _, dirs, _ in os.walk(base_dir + '/accounts'):
2021-02-15 22:06:53 +00:00
for acct in dirs:
2021-12-26 18:46:43 +00:00
if not is_account_dir(acct):
2021-02-15 22:06:53 +00:00
continue
2021-12-30 10:16:57 +00:00
account_dir = os.path.join(base_dir + '/accounts', acct)
for follow_file_type in follow_files:
following_filename = account_dir + '/' + follow_file_type
if not os.path.isfile(following_filename):
continue
2021-11-26 12:28:20 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(following_filename, 'r',
encoding='utf-8') as foll_file:
2021-12-30 10:16:57 +00:00
follow_list = foll_file.readlines()
for handle in follow_list:
2021-11-26 12:28:20 +00:00
if '@' not in handle:
continue
2022-06-21 11:58:50 +00:00
handle = remove_eol(handle)
2021-12-30 10:16:57 +00:00
handle_domain = handle.split('@')[1]
if handle_domain not in allowed_domains:
allowed_domains.append(handle_domain)
2021-12-25 15:28:52 +00:00
except OSError as ex:
2021-12-30 10:16:57 +00:00
print('EX: failed to read ' + following_filename +
2021-12-25 15:28:52 +00:00
' ' + str(ex))
2021-02-15 22:06:53 +00:00
break
# write the allow file
2021-11-25 18:42:38 +00:00
try:
2022-06-09 14:46:30 +00:00
with open(allow_filename, 'w+',
encoding='utf-8') as allow_file:
2021-12-30 10:16:57 +00:00
allow_file.write(domain_full + '\n')
for allowed in allowed_domains:
allow_file.write(allowed + '\n')
2021-11-25 18:42:38 +00:00
print('Broch mode enabled')
2021-12-25 15:28:52 +00:00
except OSError as ex:
print('EX: Broch mode not enabled due to file write ' + str(ex))
2021-11-25 18:42:38 +00:00
return
2021-02-15 22:06:53 +00:00
2021-12-30 13:56:38 +00:00
set_config_param(base_dir, "brochMode", enabled)
2021-02-15 22:26:25 +00:00
2021-12-25 18:38:19 +00:00
def broch_modeLapses(base_dir: str, lapseDays: int) -> bool:
2021-02-15 22:26:25 +00:00
"""After broch mode is enabled it automatically
elapses after a period of time
"""
2021-12-30 10:16:57 +00:00
allow_filename = base_dir + '/accounts/allowedinstances.txt'
if not os.path.isfile(allow_filename):
2021-02-15 23:01:07 +00:00
return False
2021-12-30 10:16:57 +00:00
last_modified = file_last_modified(allow_filename)
2023-11-20 22:27:58 +00:00
modified_date = \
date_from_string_format(last_modified, ["%Y-%m-%dT%H:%M:%S%z"])
2021-12-30 10:16:57 +00:00
if not modified_date:
2023-11-20 22:27:58 +00:00
print('EX: broch_modeLapses date not parsed ' + str(last_modified))
2021-06-05 13:38:57 +00:00
return False
2023-11-20 22:27:58 +00:00
curr_time = date_utcnow()
2021-12-30 10:16:57 +00:00
days_since_broch = (curr_time - modified_date).days
if days_since_broch >= lapseDays:
removed = False
2021-02-15 22:26:25 +00:00
try:
2021-12-30 10:16:57 +00:00
os.remove(allow_filename)
removed = True
2021-11-25 17:01:01 +00:00
except OSError:
2021-12-25 18:38:19 +00:00
print('EX: broch_modeLapses allow file not deleted ' +
2021-12-30 10:16:57 +00:00
str(allow_filename))
if removed:
2021-12-30 13:56:38 +00:00
set_config_param(base_dir, "brochMode", False)
2021-02-15 22:26:25 +00:00
print('Broch mode has elapsed')
2021-06-05 13:38:57 +00:00
return True
return False
2021-10-21 11:13:24 +00:00
2023-02-10 18:42:58 +00:00
def import_blocking_file(base_dir: str, nickname: str, domain: str,
lines: []) -> bool:
2023-02-10 12:02:10 +00:00
"""Imports blocked domains for a given account
"""
if not lines:
return False
if len(lines) < 2:
return False
if not lines[0].startswith('#domain,#') or \
'comment' not in lines[0]:
return False
fieldnames = lines[0].split(',')
comment_field_index = 0
for field_str in fieldnames:
if 'comment' in field_str:
break
comment_field_index += 1
if comment_field_index >= len(fieldnames):
return False
account_directory = acct_dir(base_dir, nickname, domain)
blocking_filename = \
account_directory + '/blocking.txt'
blocking_reasons_filename = \
account_directory + '/blocking_reasons.txt'
existing_lines = []
if os.path.isfile(blocking_filename):
try:
with open(blocking_filename, 'r', encoding='utf-8') as fp_blocks:
existing_lines = fp_blocks.read().splitlines()
except OSError:
print('EX: ' +
'unable to import existing blocked instances from file ' +
blocking_filename)
existing_reasons = []
if os.path.isfile(blocking_reasons_filename):
try:
with open(blocking_reasons_filename,
'r', encoding='utf-8') as fp_blocks:
existing_reasons = fp_blocks.read().splitlines()
except OSError:
print('EX: ' +
'unable to import existing ' +
'blocked instance reasons from file ' +
blocking_reasons_filename)
append_blocks = []
append_reasons = []
for line_str in lines:
if line_str.startswith('#'):
continue
block_fields = line_str.split(',')
blocked_domain_name = block_fields[0].strip()
if ' ' in blocked_domain_name or \
'.' not in blocked_domain_name:
continue
if blocked_domain_name in existing_lines:
# already blocked
continue
append_blocks.append(blocked_domain_name)
blocked_comment = ''
if '"' in line_str:
quote_section = line_str.split('"')
if len(quote_section) > 1:
blocked_comment = quote_section[1]
2023-02-10 12:02:10 +00:00
append_reasons.append(blocked_domain_name + ' ' +
blocked_comment)
if not blocked_comment:
if len(block_fields) > comment_field_index:
blocked_comment = block_fields[comment_field_index].strip()
if blocked_comment:
if blocked_comment.startswith('"'):
blocked_comment = blocked_comment.replace('"', '')
if blocked_comment not in existing_reasons:
append_reasons.append(blocked_domain_name + ' ' +
blocked_comment)
2023-02-10 12:02:10 +00:00
if not append_blocks:
return True
try:
with open(blocking_filename, 'a+', encoding='utf-8') as fp_blocks:
for new_block in append_blocks:
fp_blocks.write(new_block + '\n')
except OSError:
print('EX: ' +
'unable to append imported blocks to ' +
blocking_filename)
try:
with open(blocking_reasons_filename, 'a+',
encoding='utf-8') as fp_blocks:
for new_reason in append_reasons:
fp_blocks.write(new_reason + '\n')
except OSError:
print('EX: ' +
'unable to append imported block reasons to ' +
blocking_reasons_filename)
return True
2023-02-10 13:08:41 +00:00
2023-02-10 18:42:58 +00:00
def export_blocking_file(base_dir: str, nickname: str, domain: str) -> str:
2023-02-10 13:08:41 +00:00
"""exports account level blocks in a csv format
"""
account_directory = acct_dir(base_dir, nickname, domain)
blocking_filename = \
account_directory + '/blocking.txt'
blocking_reasons_filename = \
account_directory + '/blocking_reasons.txt'
blocks_header = \
'#domain,#severity,#reject_media,#reject_reports,' + \
'#public_comment,#obfuscate\n'
if not os.path.isfile(blocking_filename):
return blocks_header
blocking_lines = []
if os.path.isfile(blocking_filename):
try:
with open(blocking_filename, 'r', encoding='utf-8') as fp_block:
blocking_lines = fp_block.read().splitlines()
except OSError:
print('EX: export_blocks failed to read ' + blocking_filename)
blocking_reasons = []
if os.path.isfile(blocking_reasons_filename):
try:
with open(blocking_reasons_filename, 'r',
encoding='utf-8') as fp_block:
blocking_reasons = fp_block.read().splitlines()
except OSError:
print('EX: export_blocks failed to read ' +
blocking_reasons_filename)
blocks_str = blocks_header
for blocked_domain in blocking_lines:
blocked_domain = blocked_domain.strip()
2023-02-10 13:13:05 +00:00
if blocked_domain.startswith('#'):
2023-02-10 13:08:41 +00:00
continue
reason_str = ''
for reason_line in blocking_reasons:
if reason_line.startswith(blocked_domain + ' '):
2023-02-10 13:16:13 +00:00
reason_str = reason_line.split(' ', 1)[1]
2023-02-10 13:08:41 +00:00
break
blocks_str += \
blocked_domain + ',suspend,false,false,"' + \
reason_str + '",false\n'
return blocks_str
2023-07-05 11:56:02 +00:00
def get_blocks_via_server(session, nickname: str, password: str,
domain: str, port: int,
2023-07-05 12:15:00 +00:00
http_prefix: str, page_number: int, debug: bool,
version: str,
2023-07-05 11:56:02 +00:00
signing_priv_key_pem: str) -> {}:
"""Returns the blocked collection for shared items via c2s
https://codeberg.org/fediverse/fep/src/branch/main/fep/c648/fep-c648.md
"""
if not session:
print('WARN: No session for get_blocks_via_server')
return 6
auth_header = create_basic_auth_header(nickname, password)
headers = {
'host': domain,
'Content-type': 'application/json',
'Authorization': auth_header,
'Accept': 'application/json'
}
domain_full = get_full_domain(domain, port)
2023-07-05 12:15:00 +00:00
url = local_actor_url(http_prefix, nickname, domain_full) + \
'/blocked?page=' + str(page_number)
2023-07-05 11:56:02 +00:00
if debug:
print('Blocked collection request to: ' + url)
blocked_json = get_json(signing_priv_key_pem, session, url, headers, None,
2023-07-05 12:15:00 +00:00
debug, version, http_prefix, None)
2023-08-13 09:58:02 +00:00
if not get_json_valid(blocked_json):
2023-07-05 11:56:02 +00:00
if debug:
print('DEBUG: GET blocked collection failed for c2s to ' + url)
# return 5
if debug:
print('DEBUG: c2s GET blocked collection success')
return blocked_json
2023-07-18 14:05:54 +00:00
def load_blocked_military(base_dir: str) -> {}:
"""Loads a list of nicknames for accounts which block military instances
"""
block_military_filename = base_dir + '/accounts/block_military.txt'
nicknames_list = []
if os.path.isfile(block_military_filename):
try:
with open(block_military_filename, 'r',
encoding='utf-8') as fp_mil:
nicknames_list = fp_mil.read()
except OSError:
print('EX: error while reading block military file')
if not nicknames_list:
return {}
nicknames_list = nicknames_list.split('\n')
nicknames_dict = {}
for nickname in nicknames_list:
nicknames_dict[nickname] = True
return nicknames_dict
def save_blocked_military(base_dir: str, block_military: {}) -> None:
"""Saves a list of nicknames for accounts which block military instances
"""
nicknames_str = ''
for nickname, _ in block_military.items():
nicknames_str += nickname + '\n'
block_military_filename = base_dir + '/accounts/block_military.txt'
try:
with open(block_military_filename, 'w+',
encoding='utf-8') as fp_mil:
fp_mil.write(nicknames_str)
except OSError:
print('EX: error while saving block military file')
2023-07-18 14:55:26 +00:00
def get_mil_domains_list() -> []:
"""returns a list of military top level domains
"""
return ('army', 'navy', 'airforce', 'mil')
2023-07-18 14:05:54 +00:00
def contains_military_domain(message_str: str) -> bool:
"""Returns true if the given string contains a military domain
"""
2023-07-18 14:55:26 +00:00
mil_domains = get_mil_domains_list()
2023-07-18 14:05:54 +00:00
for tld in mil_domains:
if '.' + tld + '"' in message_str or \
'.' + tld + '/' in message_str:
return True
return False