mirror of https://gitlab.com/bashrc2/epicyon
Tidying
parent
52a7cb0fa9
commit
75602209a4
4
blog.py
4
blog.py
|
@ -704,7 +704,7 @@ def _no_of_blog_accounts(base_dir: str) -> int:
|
|||
"""Returns the number of blog accounts
|
||||
"""
|
||||
ctr = 0
|
||||
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
||||
for acct in dirs:
|
||||
if not is_account_dir(acct):
|
||||
continue
|
||||
|
@ -719,7 +719,7 @@ def _no_of_blog_accounts(base_dir: str) -> int:
|
|||
def _single_blog_account_nickname(base_dir: str) -> str:
|
||||
"""Returns the nickname of a single blog account
|
||||
"""
|
||||
for subdir, dirs, files in os.walk(base_dir + '/accounts'):
|
||||
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
||||
for acct in dirs:
|
||||
if not is_account_dir(acct):
|
||||
continue
|
||||
|
|
|
@ -50,7 +50,7 @@ def get_hashtag_categories(base_dir: str,
|
|||
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
|
||||
recently = days_since_epoch - 1
|
||||
|
||||
for subdir, dirs, files in os.walk(base_dir + '/tags'):
|
||||
for _, _, files in os.walk(base_dir + '/tags'):
|
||||
for catfile in files:
|
||||
if not catfile.endswith('.category'):
|
||||
continue
|
||||
|
|
13
content.py
13
content.py
|
@ -380,7 +380,6 @@ def _update_common_emoji(base_dir: str, emoji_content: str) -> None:
|
|||
common_emoji = fp_emoji.readlines()
|
||||
except OSError:
|
||||
print('EX: unable to load common emoji file')
|
||||
pass
|
||||
if common_emoji:
|
||||
new_common_emoji = []
|
||||
emoji_found = False
|
||||
|
@ -1579,7 +1578,7 @@ def import_emoji(base_dir: str, import_filename: str, session) -> None:
|
|||
def content_diff(content: str, prev_content: str) -> str:
|
||||
"""Returns a diff for the given content
|
||||
"""
|
||||
d = difflib.Differ()
|
||||
cdiff = difflib.Differ()
|
||||
text1_lines = content.splitlines()
|
||||
text1_sentences = []
|
||||
for line in text1_lines:
|
||||
|
@ -1594,7 +1593,7 @@ def content_diff(content: str, prev_content: str) -> str:
|
|||
for sentence in sentences:
|
||||
text2_sentences.append(sentence.strip())
|
||||
|
||||
diff = d.compare(text1_sentences, text2_sentences)
|
||||
diff = cdiff.compare(text1_sentences, text2_sentences)
|
||||
|
||||
diff_text = ''
|
||||
for line in diff:
|
||||
|
@ -1629,7 +1628,7 @@ def create_edits_html(edits_json: {}, post_json_object: {},
|
|||
if not post_json_object['object'].get('contentMap'):
|
||||
return ''
|
||||
edit_dates_list = []
|
||||
for modified, item in edits_json.items():
|
||||
for modified, _ in edits_json.items():
|
||||
edit_dates_list.append(modified)
|
||||
edit_dates_list.sort(reverse=True)
|
||||
edits_str = ''
|
||||
|
@ -1708,11 +1707,11 @@ def remove_script(content: str, log_filename: str,
|
|||
if log_filename and actor:
|
||||
# write the detected script to a log file
|
||||
log_str = actor + ' ' + url + ' ' + text + '\n'
|
||||
writeType = 'a+'
|
||||
write_type = 'a+'
|
||||
if os.path.isfile(log_filename):
|
||||
writeType = 'w+'
|
||||
write_type = 'w+'
|
||||
try:
|
||||
with open(log_filename, writeType) as fp_log:
|
||||
with open(log_filename, write_type) as fp_log:
|
||||
fp_log.write(log_str)
|
||||
except OSError:
|
||||
print('EX: cannot append to svg script log')
|
||||
|
|
2
cwtch.py
2
cwtch.py
|
@ -100,7 +100,7 @@ def set_cwtch_address(actor_json: {}, cwtch_address: str) -> None:
|
|||
continue
|
||||
if not property_value['type'].endswith('PropertyValue'):
|
||||
continue
|
||||
prop_value_name, prop_value = \
|
||||
prop_value_name, _ = \
|
||||
get_attachment_property_value(property_value)
|
||||
if not prop_value_name:
|
||||
continue
|
||||
|
|
|
@ -28,7 +28,7 @@ from posts import get_person_box
|
|||
|
||||
def send_delete_via_server(base_dir: str, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, delete_object_url: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -39,7 +39,7 @@ def send_delete_via_server(base_dir: str, session,
|
|||
print('WARN: No session for send_delete_via_server')
|
||||
return 6
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
|
@ -178,8 +178,7 @@ def outbox_delete(base_dir: str, http_prefix: str,
|
|||
def remove_old_hashtags(base_dir: str, max_months: int) -> str:
|
||||
"""Remove old hashtags
|
||||
"""
|
||||
if max_months > 11:
|
||||
max_months = 11
|
||||
max_months = min(max_months, 11)
|
||||
max_days_since_epoch = \
|
||||
(datetime.utcnow() - datetime(1970, 1 + max_months, 1)).days
|
||||
remove_hashtags = []
|
||||
|
|
|
@ -65,7 +65,7 @@ def set_enigma_pub_key(actor_json: {}, enigma_pub_key: str) -> None:
|
|||
property_found = property_value
|
||||
break
|
||||
if property_found:
|
||||
actor_json['attachment'].remove(property_value)
|
||||
actor_json['attachment'].remove(property_found)
|
||||
if remove_key:
|
||||
return
|
||||
|
||||
|
|
4
feeds.py
4
feeds.py
|
@ -9,6 +9,8 @@ __module_group__ = "RSS Feeds"
|
|||
|
||||
|
||||
def rss2tag_header(hashtag: str, http_prefix: str, domain_full: str) -> str:
|
||||
"""Header for rss 2
|
||||
"""
|
||||
return \
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>" + \
|
||||
"<rss version=\"2.0\">" + \
|
||||
|
@ -19,4 +21,6 @@ def rss2tag_header(hashtag: str, http_prefix: str, domain_full: str) -> str:
|
|||
|
||||
|
||||
def rss2tag_footer() -> str:
|
||||
"""Footer for rss 2
|
||||
"""
|
||||
return '</channel></rss>'
|
||||
|
|
55
follow.py
55
follow.py
|
@ -560,8 +560,7 @@ def get_following_feed(base_dir: str, domain: str, port: int, path: str,
|
|||
curr_page += 1
|
||||
following['totalItems'] = total_ctr
|
||||
last_page = int(total_ctr / follows_per_page)
|
||||
if last_page < 1:
|
||||
last_page = 1
|
||||
last_page = max(last_page, 1)
|
||||
if next_page_number > last_page:
|
||||
following['next'] = \
|
||||
local_actor_url(http_prefix, nickname, domain) + \
|
||||
|
@ -731,10 +730,10 @@ def followed_account_accepts(session, base_dir: str, http_prefix: str,
|
|||
port: int,
|
||||
nickname: str, domain: str, from_port: int,
|
||||
person_url: str, federation_list: [],
|
||||
follow_json: {}, send_threads: [], postLog: [],
|
||||
follow_json: {}, send_threads: [], post_log: [],
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
removeFollowActivity: bool,
|
||||
remove_follow_activity: bool,
|
||||
signing_priv_key_pem: str,
|
||||
curr_domain: str,
|
||||
onion_domain: str, i2p_domain: str):
|
||||
|
@ -759,7 +758,7 @@ def followed_account_accepts(session, base_dir: str, http_prefix: str,
|
|||
accept_handle + ' port ' + str(from_port))
|
||||
client_to_server = False
|
||||
|
||||
if removeFollowActivity:
|
||||
if remove_follow_activity:
|
||||
# remove the follow request json
|
||||
follow_activity_filename = \
|
||||
acct_dir(base_dir, nickname_to_follow, domain_to_follow) + \
|
||||
|
@ -783,7 +782,7 @@ def followed_account_accepts(session, base_dir: str, http_prefix: str,
|
|||
nickname, domain, from_port, '',
|
||||
http_prefix, True, client_to_server,
|
||||
federation_list,
|
||||
send_threads, postLog, cached_webfingers,
|
||||
send_threads, post_log, cached_webfingers,
|
||||
person_cache, debug, project_version, None,
|
||||
group_account, signing_priv_key_pem,
|
||||
7856837, curr_domain, onion_domain, i2p_domain)
|
||||
|
@ -796,7 +795,7 @@ def followed_account_rejects(session, session_onion, session_i2p,
|
|||
port: int,
|
||||
nickname: str, domain: str, from_port: int,
|
||||
federation_list: [],
|
||||
send_threads: [], postLog: [],
|
||||
send_threads: [], post_log: [],
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
signing_priv_key_pem: str):
|
||||
|
@ -853,7 +852,7 @@ def followed_account_rejects(session, session_onion, session_i2p,
|
|||
nickname, domain, from_port, '',
|
||||
http_prefix, True, client_to_server,
|
||||
federation_list,
|
||||
send_threads, postLog, cached_webfingers,
|
||||
send_threads, post_log, cached_webfingers,
|
||||
person_cache, debug, project_version, None,
|
||||
group_account, signing_priv_key_pem,
|
||||
6393063,
|
||||
|
@ -865,10 +864,10 @@ def send_follow_request(session, base_dir: str,
|
|||
sender_domain: str, sender_port: int,
|
||||
http_prefix: str,
|
||||
follow_nickname: str, follow_domain: str,
|
||||
followedActor: str,
|
||||
followPort: int, followHttpPrefix: str,
|
||||
followed_actor: str,
|
||||
follow_port: int, follow_http_prefix: str,
|
||||
client_to_server: bool, federation_list: [],
|
||||
send_threads: [], postLog: [], cached_webfingers: {},
|
||||
send_threads: [], post_log: [], cached_webfingers: {},
|
||||
person_cache: {}, debug: bool,
|
||||
project_version: str, signing_priv_key_pem: str,
|
||||
curr_domain: str,
|
||||
|
@ -885,22 +884,22 @@ def send_follow_request(session, base_dir: str,
|
|||
full_domain = get_full_domain(sender_domain, sender_port)
|
||||
follow_actor = local_actor_url(http_prefix, nickname, full_domain)
|
||||
|
||||
request_domain = get_full_domain(follow_domain, followPort)
|
||||
request_domain = get_full_domain(follow_domain, follow_port)
|
||||
|
||||
status_number, _ = get_status_number()
|
||||
|
||||
group_account = False
|
||||
if follow_nickname:
|
||||
followed_id = followedActor
|
||||
followed_id = followed_actor
|
||||
follow_handle = follow_nickname + '@' + request_domain
|
||||
group_account = has_group_type(base_dir, followedActor, person_cache)
|
||||
group_account = has_group_type(base_dir, followed_actor, person_cache)
|
||||
if group_account:
|
||||
follow_handle = '!' + follow_handle
|
||||
print('Follow request being sent to group account')
|
||||
else:
|
||||
if debug:
|
||||
print('DEBUG: send_follow_request - assuming single user instance')
|
||||
followed_id = followHttpPrefix + '://' + request_domain
|
||||
followed_id = follow_http_prefix + '://' + request_domain
|
||||
single_user_nickname = 'dev'
|
||||
follow_handle = single_user_nickname + '@' + request_domain
|
||||
|
||||
|
@ -947,11 +946,11 @@ def send_follow_request(session, base_dir: str,
|
|||
|
||||
send_signed_json(new_follow_json, session, base_dir,
|
||||
nickname, sender_domain, sender_port,
|
||||
follow_nickname, follow_domain, followPort,
|
||||
follow_nickname, follow_domain, follow_port,
|
||||
'https://www.w3.org/ns/activitystreams#Public',
|
||||
http_prefix, True, client_to_server,
|
||||
federation_list,
|
||||
send_threads, postLog, cached_webfingers, person_cache,
|
||||
send_threads, post_log, cached_webfingers, person_cache,
|
||||
debug, project_version, None, group_account,
|
||||
signing_priv_key_pem, 8234389,
|
||||
curr_domain, onion_domain, i2p_domain)
|
||||
|
@ -963,7 +962,7 @@ def send_follow_request_via_server(base_dir: str, session,
|
|||
from_nickname: str, password: str,
|
||||
from_domain: str, from_port: int,
|
||||
follow_nickname: str, follow_domain: str,
|
||||
followPort: int,
|
||||
follow_port: int,
|
||||
http_prefix: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -976,7 +975,7 @@ def send_follow_request_via_server(base_dir: str, session,
|
|||
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
follow_domain_full = get_full_domain(follow_domain, followPort)
|
||||
follow_domain_full = get_full_domain(follow_domain, follow_port)
|
||||
|
||||
follow_actor = \
|
||||
local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
|
@ -1055,7 +1054,7 @@ def send_unfollow_request_via_server(base_dir: str, session,
|
|||
from_nickname: str, password: str,
|
||||
from_domain: str, from_port: int,
|
||||
follow_nickname: str, follow_domain: str,
|
||||
followPort: int,
|
||||
follow_port: int,
|
||||
http_prefix: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -1067,13 +1066,13 @@ def send_unfollow_request_via_server(base_dir: str, session,
|
|||
return 6
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
follow_domain_full = get_full_domain(follow_domain, followPort)
|
||||
follow_domain_full = get_full_domain(follow_domain, follow_port)
|
||||
|
||||
follow_actor = \
|
||||
local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
followed_id = \
|
||||
http_prefix + '://' + follow_domain_full + '/@' + follow_nickname
|
||||
status_number, published = get_status_number()
|
||||
status_number, _ = get_status_number()
|
||||
|
||||
unfollow_json = {
|
||||
'@context': 'https://www.w3.org/ns/activitystreams',
|
||||
|
@ -1173,13 +1172,12 @@ def get_following_via_server(base_dir: str, session,
|
|||
'Authorization': auth_header
|
||||
}
|
||||
|
||||
if page_number < 1:
|
||||
page_number = 1
|
||||
page_number = max(page_number, 1)
|
||||
url = follow_actor + '/following?page=' + str(page_number)
|
||||
followingJson = \
|
||||
following_json = \
|
||||
get_json(signing_priv_key_pem, session, url, headers, {}, debug,
|
||||
__version__, http_prefix, domain, 10, True)
|
||||
if not followingJson:
|
||||
if not following_json:
|
||||
if debug:
|
||||
print('DEBUG: GET following list failed for c2s to ' + url)
|
||||
return 5
|
||||
|
@ -1187,7 +1185,7 @@ def get_following_via_server(base_dir: str, session,
|
|||
if debug:
|
||||
print('DEBUG: c2s GET following list request success')
|
||||
|
||||
return followingJson
|
||||
return following_json
|
||||
|
||||
|
||||
def get_followers_via_server(base_dir: str, session,
|
||||
|
@ -1255,8 +1253,7 @@ def get_follow_requests_via_server(base_dir: str, session,
|
|||
'Authorization': auth_header
|
||||
}
|
||||
|
||||
if page_number < 1:
|
||||
page_number = 1
|
||||
page_number = max(page_number, 1)
|
||||
url = follow_actor + '/followrequests?page=' + str(page_number)
|
||||
followers_json = \
|
||||
get_json(signing_priv_key_pem, session, url, headers, {}, debug,
|
||||
|
|
64
httpsig.py
64
httpsig.py
|
@ -62,7 +62,7 @@ def get_digest_algorithm_from_headers(http_headers: {}) -> str:
|
|||
return 'rsa-sha256'
|
||||
|
||||
|
||||
def sign_post_headers(dateStr: str, private_key_pem: str,
|
||||
def sign_post_headers(date_str: str, private_key_pem: str,
|
||||
nickname: str, domain: str, port: int,
|
||||
to_domain: str, to_port: int,
|
||||
path: str, http_prefix: str,
|
||||
|
@ -76,8 +76,8 @@ def sign_post_headers(dateStr: str, private_key_pem: str,
|
|||
|
||||
to_domain = get_full_domain(to_domain, to_port)
|
||||
|
||||
if not dateStr:
|
||||
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
if not date_str:
|
||||
date_str = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
if nickname != domain and nickname.lower() != 'actor':
|
||||
key_id = local_actor_url(http_prefix, nickname, domain)
|
||||
else:
|
||||
|
@ -88,7 +88,7 @@ def sign_post_headers(dateStr: str, private_key_pem: str,
|
|||
headers = {
|
||||
'(request-target)': f'get {path}',
|
||||
'host': to_domain,
|
||||
'date': dateStr,
|
||||
'date': date_str,
|
||||
'accept': content_type
|
||||
}
|
||||
else:
|
||||
|
@ -99,7 +99,7 @@ def sign_post_headers(dateStr: str, private_key_pem: str,
|
|||
headers = {
|
||||
'(request-target)': f'post {path}',
|
||||
'host': to_domain,
|
||||
'date': dateStr,
|
||||
'date': date_str,
|
||||
'digest': f'{digest_prefix}={body_digest}',
|
||||
'content-type': 'application/activity+json',
|
||||
'content-length': str(content_length)
|
||||
|
@ -138,7 +138,7 @@ def sign_post_headers(dateStr: str, private_key_pem: str,
|
|||
return signature_header
|
||||
|
||||
|
||||
def sign_post_headers_new(dateStr: str, private_key_pem: str,
|
||||
def sign_post_headers_new(date_str: str, private_key_pem: str,
|
||||
nickname: str,
|
||||
domain: str, port: int,
|
||||
to_domain: str, to_port: int,
|
||||
|
@ -157,11 +157,11 @@ def sign_post_headers_new(dateStr: str, private_key_pem: str,
|
|||
to_domain = get_full_domain(to_domain, to_port)
|
||||
|
||||
time_format = "%a, %d %b %Y %H:%M:%S %Z"
|
||||
if not dateStr:
|
||||
if not date_str:
|
||||
curr_time = gmtime()
|
||||
dateStr = strftime(time_format, curr_time)
|
||||
date_str = strftime(time_format, curr_time)
|
||||
else:
|
||||
curr_time = datetime.datetime.strptime(dateStr, time_format)
|
||||
curr_time = datetime.datetime.strptime(date_str, time_format)
|
||||
seconds_since_epoch = \
|
||||
int((curr_time - datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
key_id = local_actor_url(http_prefix, nickname, domain) + '#main-key'
|
||||
|
@ -170,7 +170,7 @@ def sign_post_headers_new(dateStr: str, private_key_pem: str,
|
|||
'@request-target': f'get {path}',
|
||||
'@created': str(seconds_since_epoch),
|
||||
'host': to_domain,
|
||||
'date': dateStr
|
||||
'date': date_str
|
||||
}
|
||||
else:
|
||||
body_digest = message_content_digest(message_body_json_str,
|
||||
|
@ -181,7 +181,7 @@ def sign_post_headers_new(dateStr: str, private_key_pem: str,
|
|||
'@request-target': f'post {path}',
|
||||
'@created': str(seconds_since_epoch),
|
||||
'host': to_domain,
|
||||
'date': dateStr,
|
||||
'date': date_str,
|
||||
'digest': f'{digest_prefix}={body_digest}',
|
||||
'content-type': 'application/activity+json',
|
||||
'content-length': str(content_length)
|
||||
|
@ -234,10 +234,10 @@ def sign_post_headers_new(dateStr: str, private_key_pem: str,
|
|||
return signature_index_header, signature_header
|
||||
|
||||
|
||||
def create_signed_header(dateStr: str, private_key_pem: str, nickname: str,
|
||||
def create_signed_header(date_str: str, private_key_pem: str, nickname: str,
|
||||
domain: str, port: int,
|
||||
to_domain: str, to_port: int,
|
||||
path: str, http_prefix: str, withDigest: bool,
|
||||
path: str, http_prefix: str, with_digest: bool,
|
||||
message_body_json_str: str,
|
||||
content_type: str) -> {}:
|
||||
"""Note that the domain is the destination, not the sender
|
||||
|
@ -247,22 +247,22 @@ def create_signed_header(dateStr: str, private_key_pem: str, nickname: str,
|
|||
header_domain = get_full_domain(to_domain, to_port)
|
||||
|
||||
# if no date is given then create one
|
||||
if not dateStr:
|
||||
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
if not date_str:
|
||||
date_str = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
|
||||
# Content-Type or Accept header
|
||||
if not content_type:
|
||||
content_type = 'application/activity+json'
|
||||
|
||||
if not withDigest:
|
||||
if not with_digest:
|
||||
headers = {
|
||||
'(request-target)': f'get {path}',
|
||||
'host': header_domain,
|
||||
'date': dateStr,
|
||||
'date': date_str,
|
||||
'accept': content_type
|
||||
}
|
||||
signature_header = \
|
||||
sign_post_headers(dateStr, private_key_pem, nickname,
|
||||
sign_post_headers(date_str, private_key_pem, nickname,
|
||||
domain, port, to_domain, to_port,
|
||||
path, http_prefix, None, content_type,
|
||||
algorithm, None)
|
||||
|
@ -274,13 +274,13 @@ def create_signed_header(dateStr: str, private_key_pem: str, nickname: str,
|
|||
headers = {
|
||||
'(request-target)': f'post {path}',
|
||||
'host': header_domain,
|
||||
'date': dateStr,
|
||||
'date': date_str,
|
||||
'digest': f'{digest_prefix}={body_digest}',
|
||||
'content-length': str(content_length),
|
||||
'content-type': content_type
|
||||
}
|
||||
signature_header = \
|
||||
sign_post_headers(dateStr, private_key_pem, nickname,
|
||||
sign_post_headers(date_str, private_key_pem, nickname,
|
||||
domain, port,
|
||||
to_domain, to_port,
|
||||
path, http_prefix, message_body_json_str,
|
||||
|
@ -310,33 +310,33 @@ def _verify_recent_signature(signed_date_str: str) -> bool:
|
|||
|
||||
|
||||
def verify_post_headers(http_prefix: str,
|
||||
publicKeyPem: str, headers: dict,
|
||||
path: str, GETmethod: bool,
|
||||
messageBodyDigest: str,
|
||||
public_key_pem: str, headers: dict,
|
||||
path: str, get_method: bool,
|
||||
message_body_digest: str,
|
||||
message_body_json_str: str, debug: bool,
|
||||
noRecencyCheck: bool = False) -> bool:
|
||||
no_recency_check: bool = False) -> bool:
|
||||
"""Returns true or false depending on if the key that we plugged in here
|
||||
validates against the headers, method, and path.
|
||||
publicKeyPem - the public key from an rsa key pair
|
||||
public_key_pem - the public key from an rsa key pair
|
||||
headers - should be a dictionary of request headers
|
||||
path - the relative url that was requested from this site
|
||||
GETmethod - GET or POST
|
||||
get_method - GET or POST
|
||||
message_body_json_str - the received request body (used for digest)
|
||||
"""
|
||||
|
||||
if GETmethod:
|
||||
if get_method:
|
||||
method = 'GET'
|
||||
else:
|
||||
method = 'POST'
|
||||
|
||||
if debug:
|
||||
print('DEBUG: verify_post_headers ' + method)
|
||||
print('verify_post_headers publicKeyPem: ' + str(publicKeyPem))
|
||||
print('verify_post_headers public_key_pem: ' + str(public_key_pem))
|
||||
print('verify_post_headers headers: ' + str(headers))
|
||||
print('verify_post_headers message_body_json_str: ' +
|
||||
str(message_body_json_str))
|
||||
|
||||
pubkey = load_pem_public_key(publicKeyPem.encode('utf-8'),
|
||||
pubkey = load_pem_public_key(public_key_pem.encode('utf-8'),
|
||||
backend=default_backend())
|
||||
# Build a dictionary of the signature values
|
||||
if headers.get('Signature-Input') or headers.get('signature-input'):
|
||||
|
@ -422,8 +422,8 @@ def verify_post_headers(http_prefix: str,
|
|||
if debug:
|
||||
print('http signature algorithm: ' + algorithm)
|
||||
elif signed_header == 'digest':
|
||||
if messageBodyDigest:
|
||||
body_digest = messageBodyDigest
|
||||
if message_body_digest:
|
||||
body_digest = message_body_digest
|
||||
else:
|
||||
body_digest = \
|
||||
message_content_digest(message_body_json_str,
|
||||
|
@ -446,7 +446,7 @@ def verify_post_headers(http_prefix: str,
|
|||
' not found in ' + str(headers))
|
||||
else:
|
||||
if headers.get(signed_header):
|
||||
if signed_header == 'date' and not noRecencyCheck:
|
||||
if signed_header == 'date' and not no_recency_check:
|
||||
if not _verify_recent_signature(headers[signed_header]):
|
||||
if debug:
|
||||
print('DEBUG: ' +
|
||||
|
|
17
reaction.py
17
reaction.py
|
@ -182,8 +182,8 @@ def reaction_post(recent_posts_cache: {},
|
|||
|
||||
def send_reaction_via_server(base_dir: str, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
http_prefix: str, reactionUrl: str,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, reaction_url: str,
|
||||
emoji_content: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -198,7 +198,7 @@ def send_reaction_via_server(base_dir: str, session,
|
|||
emoji_content + '"')
|
||||
return 7
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
|
||||
|
@ -206,7 +206,7 @@ def send_reaction_via_server(base_dir: str, session,
|
|||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
'type': 'EmojiReact',
|
||||
'actor': actor,
|
||||
'object': reactionUrl,
|
||||
'object': reaction_url,
|
||||
'content': emoji_content
|
||||
}
|
||||
|
||||
|
@ -272,8 +272,8 @@ def send_reaction_via_server(base_dir: str, session,
|
|||
|
||||
def send_undo_reaction_via_server(base_dir: str, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
http_prefix: str, reactionUrl: str,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, reaction_url: str,
|
||||
emoji_content: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -284,7 +284,7 @@ def send_undo_reaction_via_server(base_dir: str, session,
|
|||
print('WARN: No session for send_undo_reaction_via_server')
|
||||
return 6
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
|
||||
|
@ -295,7 +295,7 @@ def send_undo_reaction_via_server(base_dir: str, session,
|
|||
'object': {
|
||||
'type': 'EmojiReact',
|
||||
'actor': actor,
|
||||
'object': reactionUrl,
|
||||
'object': reaction_url,
|
||||
'content': emoji_content
|
||||
}
|
||||
}
|
||||
|
@ -457,7 +457,6 @@ def _update_common_reactions(base_dir: str, emoji_content: str) -> None:
|
|||
common_reactions = fp_react.readlines()
|
||||
except OSError:
|
||||
print('EX: unable to load common reactions file')
|
||||
pass
|
||||
if common_reactions:
|
||||
new_common_reactions = []
|
||||
reaction_found = False
|
||||
|
|
|
@ -49,7 +49,7 @@ def create_session(proxy_type: str):
|
|||
session.proxies = {}
|
||||
session.proxies['http'] = 'socks5h://localhost:7777'
|
||||
session.proxies['https'] = 'socks5h://localhost:7777'
|
||||
elif proxy_type == 'ipfs' or proxy_type == 'ipns':
|
||||
elif proxy_type in ('ipfs', 'ipns'):
|
||||
session.proxies = {}
|
||||
session.proxies['ipfs'] = 'socks5h://localhost:4001'
|
||||
# print('New session created with proxy ' + str(proxy_type))
|
||||
|
@ -79,8 +79,7 @@ def url_exists(session, url: str, timeout_sec: int = 3,
|
|||
timeout=timeout_sec,
|
||||
allow_redirects=False)
|
||||
if result:
|
||||
if result.status_code == 200 or \
|
||||
result.status_code == 304:
|
||||
if result.status_code in (200, 304):
|
||||
return True
|
||||
print('url_exists for ' + url + ' returned ' +
|
||||
str(result.status_code))
|
||||
|
@ -775,7 +774,7 @@ def get_method(method_name: str, xml_str: str,
|
|||
result = session.request(method_name, url, headers=session_headers,
|
||||
data=xml_str,
|
||||
params=session_params, timeout=timeout_sec)
|
||||
if result.status_code != 200 and result.status_code != 207:
|
||||
if result.status_code not in (200, 207):
|
||||
if result.status_code == 401:
|
||||
print("WARN: get_method " + url + ' rejected by secure mode')
|
||||
elif result.status_code == 403:
|
||||
|
|
50
shares.py
50
shares.py
|
@ -543,8 +543,7 @@ def get_shares_feed_for_person(base_dir: str,
|
|||
curr_page += 1
|
||||
shares['totalItems'] = total_ctr
|
||||
last_page = int(total_ctr / shares_per_page)
|
||||
if last_page < 1:
|
||||
last_page = 1
|
||||
last_page = max(last_page, 1)
|
||||
if next_page_number > last_page:
|
||||
shares['next'] = \
|
||||
local_actor_url(http_prefix, nickname, domain) + \
|
||||
|
@ -554,14 +553,14 @@ def get_shares_feed_for_person(base_dir: str,
|
|||
|
||||
def send_share_via_server(base_dir, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, display_name: str,
|
||||
summary: str, image_filename: str,
|
||||
item_qty: float, item_type: str, item_category: str,
|
||||
location: str, duration: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
itemPrice: str, item_currency: str,
|
||||
item_price: str, item_currency: str,
|
||||
signing_priv_key_pem: str) -> {}:
|
||||
"""Creates an item share via c2s
|
||||
"""
|
||||
|
@ -570,14 +569,14 @@ def send_share_via_server(base_dir, session,
|
|||
return 6
|
||||
|
||||
# convert $4.23 to 4.23 USD
|
||||
new_item_price, new_item_currency = get_price_from_string(itemPrice)
|
||||
if new_item_price != itemPrice:
|
||||
itemPrice = new_item_price
|
||||
new_item_price, new_item_currency = get_price_from_string(item_price)
|
||||
if new_item_price != item_price:
|
||||
item_price = new_item_price
|
||||
if not item_currency:
|
||||
if new_item_currency != item_currency:
|
||||
item_currency = new_item_currency
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
|
@ -597,7 +596,7 @@ def send_share_via_server(base_dir, session,
|
|||
"category": item_category,
|
||||
"location": location,
|
||||
"duration": duration,
|
||||
"itemPrice": itemPrice,
|
||||
"itemPrice": item_price,
|
||||
"itemCurrency": item_currency,
|
||||
'to': [to_url],
|
||||
'cc': [cc_url]
|
||||
|
@ -679,7 +678,7 @@ def send_share_via_server(base_dir, session,
|
|||
|
||||
def send_undo_share_via_server(base_dir: str, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, display_name: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -690,7 +689,7 @@ def send_undo_share_via_server(base_dir: str, session,
|
|||
print('WARN: No session for send_undo_share_via_server')
|
||||
return 6
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
|
@ -774,14 +773,14 @@ def send_undo_share_via_server(base_dir: str, session,
|
|||
|
||||
def send_wanted_via_server(base_dir, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, display_name: str,
|
||||
summary: str, image_filename: str,
|
||||
item_qty: float, item_type: str, item_category: str,
|
||||
location: str, duration: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
itemMaxPrice: str, item_currency: str,
|
||||
item_max_price: str, item_currency: str,
|
||||
signing_priv_key_pem: str) -> {}:
|
||||
"""Creates a wanted item via c2s
|
||||
"""
|
||||
|
@ -790,14 +789,15 @@ def send_wanted_via_server(base_dir, session,
|
|||
return 6
|
||||
|
||||
# convert $4.23 to 4.23 USD
|
||||
new_item_max_price, new_item_currency = get_price_from_string(itemMaxPrice)
|
||||
if new_item_max_price != itemMaxPrice:
|
||||
itemMaxPrice = new_item_max_price
|
||||
new_item_max_price, new_item_currency = \
|
||||
get_price_from_string(item_max_price)
|
||||
if new_item_max_price != item_max_price:
|
||||
item_max_price = new_item_max_price
|
||||
if not item_currency:
|
||||
if new_item_currency != item_currency:
|
||||
item_currency = new_item_currency
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
|
@ -817,7 +817,7 @@ def send_wanted_via_server(base_dir, session,
|
|||
"category": item_category,
|
||||
"location": location,
|
||||
"duration": duration,
|
||||
"itemPrice": itemMaxPrice,
|
||||
"itemPrice": item_max_price,
|
||||
"itemCurrency": item_currency,
|
||||
'to': [to_url],
|
||||
'cc': [cc_url]
|
||||
|
@ -899,7 +899,7 @@ def send_wanted_via_server(base_dir, session,
|
|||
|
||||
def send_undo_wanted_via_server(base_dir: str, session,
|
||||
from_nickname: str, password: str,
|
||||
from_domain: str, fromPort: int,
|
||||
from_domain: str, from_port: int,
|
||||
http_prefix: str, display_name: str,
|
||||
cached_webfingers: {}, person_cache: {},
|
||||
debug: bool, project_version: str,
|
||||
|
@ -910,7 +910,7 @@ def send_undo_wanted_via_server(base_dir: str, session,
|
|||
print('WARN: No session for send_undo_wanted_via_server')
|
||||
return 6
|
||||
|
||||
from_domain_full = get_full_domain(from_domain, fromPort)
|
||||
from_domain_full = get_full_domain(from_domain, from_port)
|
||||
|
||||
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
||||
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
|
@ -1488,7 +1488,7 @@ def create_shared_item_federation_token(base_dir: str,
|
|||
def authorize_shared_items(shared_items_federated_domains: [],
|
||||
base_dir: str,
|
||||
origin_domain_full: str,
|
||||
calling_domainFull: str,
|
||||
calling_domain_full: str,
|
||||
auth_header: str,
|
||||
debug: bool,
|
||||
tokens_json: {} = None) -> bool:
|
||||
|
@ -1528,16 +1528,16 @@ def authorize_shared_items(shared_items_federated_domains: [],
|
|||
tokens_json = load_json(tokens_filename, 1, 2)
|
||||
if not tokens_json:
|
||||
return False
|
||||
if not tokens_json.get(calling_domainFull):
|
||||
if not tokens_json.get(calling_domain_full):
|
||||
if debug:
|
||||
print('DEBUG: shared item federation token ' +
|
||||
'check failed for ' + calling_domainFull)
|
||||
'check failed for ' + calling_domain_full)
|
||||
return False
|
||||
if not constant_time_string_check(tokens_json[calling_domainFull],
|
||||
if not constant_time_string_check(tokens_json[calling_domain_full],
|
||||
provided_token):
|
||||
if debug:
|
||||
print('DEBUG: shared item federation token ' +
|
||||
'mismatch for ' + calling_domainFull)
|
||||
'mismatch for ' + calling_domain_full)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
Loading…
Reference in New Issue