diff --git a/blog.py b/blog.py
index 7487f7117..e9440c1ef 100644
--- a/blog.py
+++ b/blog.py
@@ -704,7 +704,7 @@ def _no_of_blog_accounts(base_dir: str) -> int:
"""Returns the number of blog accounts
"""
ctr = 0
- for subdir, dirs, files in os.walk(base_dir + '/accounts'):
+ for _, dirs, _ in os.walk(base_dir + '/accounts'):
for acct in dirs:
if not is_account_dir(acct):
continue
@@ -719,7 +719,7 @@ def _no_of_blog_accounts(base_dir: str) -> int:
def _single_blog_account_nickname(base_dir: str) -> str:
"""Returns the nickname of a single blog account
"""
- for subdir, dirs, files in os.walk(base_dir + '/accounts'):
+ for _, dirs, _ in os.walk(base_dir + '/accounts'):
for acct in dirs:
if not is_account_dir(acct):
continue
diff --git a/categories.py b/categories.py
index acefee5e3..3a0838a61 100644
--- a/categories.py
+++ b/categories.py
@@ -50,7 +50,7 @@ def get_hashtag_categories(base_dir: str,
days_since_epoch = (curr_time - datetime.datetime(1970, 1, 1)).days
recently = days_since_epoch - 1
- for subdir, dirs, files in os.walk(base_dir + '/tags'):
+ for _, _, files in os.walk(base_dir + '/tags'):
for catfile in files:
if not catfile.endswith('.category'):
continue
diff --git a/content.py b/content.py
index 5dfc5c03b..24533e741 100644
--- a/content.py
+++ b/content.py
@@ -380,7 +380,6 @@ def _update_common_emoji(base_dir: str, emoji_content: str) -> None:
common_emoji = fp_emoji.readlines()
except OSError:
print('EX: unable to load common emoji file')
- pass
if common_emoji:
new_common_emoji = []
emoji_found = False
@@ -1579,7 +1578,7 @@ def import_emoji(base_dir: str, import_filename: str, session) -> None:
def content_diff(content: str, prev_content: str) -> str:
"""Returns a diff for the given content
"""
- d = difflib.Differ()
+ cdiff = difflib.Differ()
text1_lines = content.splitlines()
text1_sentences = []
for line in text1_lines:
@@ -1594,7 +1593,7 @@ def content_diff(content: str, prev_content: str) -> str:
for sentence in sentences:
text2_sentences.append(sentence.strip())
- diff = d.compare(text1_sentences, text2_sentences)
+ diff = cdiff.compare(text1_sentences, text2_sentences)
diff_text = ''
for line in diff:
@@ -1629,7 +1628,7 @@ def create_edits_html(edits_json: {}, post_json_object: {},
if not post_json_object['object'].get('contentMap'):
return ''
edit_dates_list = []
- for modified, item in edits_json.items():
+ for modified, _ in edits_json.items():
edit_dates_list.append(modified)
edit_dates_list.sort(reverse=True)
edits_str = ''
@@ -1708,11 +1707,11 @@ def remove_script(content: str, log_filename: str,
if log_filename and actor:
# write the detected script to a log file
log_str = actor + ' ' + url + ' ' + text + '\n'
- writeType = 'a+'
+ write_type = 'a+'
if os.path.isfile(log_filename):
- writeType = 'w+'
+ write_type = 'w+'
try:
- with open(log_filename, writeType) as fp_log:
+ with open(log_filename, write_type) as fp_log:
fp_log.write(log_str)
except OSError:
print('EX: cannot append to svg script log')
diff --git a/cwtch.py b/cwtch.py
index 4849ac014..7fa1df6eb 100644
--- a/cwtch.py
+++ b/cwtch.py
@@ -100,7 +100,7 @@ def set_cwtch_address(actor_json: {}, cwtch_address: str) -> None:
continue
if not property_value['type'].endswith('PropertyValue'):
continue
- prop_value_name, prop_value = \
+ prop_value_name, _ = \
get_attachment_property_value(property_value)
if not prop_value_name:
continue
diff --git a/daemon.py b/daemon.py
index 3e2d766be..49a0f9491 100644
--- a/daemon.py
+++ b/daemon.py
@@ -2230,12 +2230,12 @@ class PubServer(BaseHTTPRequestHandler):
moderation_text = None
moderation_button = None
# get the moderation text first
- actStr = 'moderationAction='
+ act_str = 'moderationAction='
for moderation_str in moderation_params.split('&'):
- if moderation_str.startswith(actStr):
- if actStr in moderation_str:
+ if moderation_str.startswith(act_str):
+ if act_str in moderation_str:
moderation_text = \
- moderation_str.split(actStr)[1].strip()
+ moderation_str.split(act_str)[1].strip()
mod_text = moderation_text.replace('+', ' ')
moderation_text = \
urllib.parse.unquote_plus(mod_text.strip())
@@ -4672,7 +4672,7 @@ class PubServer(BaseHTTPRequestHandler):
print('origin_path_str: ' + origin_path_str)
print('remove_post_actor: ' + remove_post_actor)
if origin_path_str in remove_post_actor:
- toList = [
+ to_list = [
'https://www.w3.org/ns/activitystreams#Public',
remove_post_actor
]
@@ -4680,7 +4680,7 @@ class PubServer(BaseHTTPRequestHandler):
"@context": "https://www.w3.org/ns/activitystreams",
'actor': remove_post_actor,
'object': remove_message_id,
- 'to': toList,
+ 'to': to_list,
'cc': [remove_post_actor + '/followers'],
'type': 'Delete'
}
@@ -5461,7 +5461,7 @@ class PubServer(BaseHTTPRequestHandler):
acct_dir(base_dir, nickname, domain) + \
'/' + m_type + '.temp'
- filename, attachment_media_type = \
+ filename, _ = \
save_media_in_form_post(media_bytes, debug,
filename_base)
if filename:
@@ -14956,16 +14956,16 @@ class PubServer(BaseHTTPRequestHandler):
calling_domain: str, referer_domain: str,
http_prefix: str,
domain: str, port: int,
- followingItemsPerPage: int,
- debug: bool, listName='following') -> None:
+ following_items_per_page: int,
+ debug: bool, list_name: str = 'following') -> None:
"""Returns json collection for following.txt
"""
following_json = \
get_following_feed(base_dir, domain, port, path, http_prefix,
- True, followingItemsPerPage, listName)
+ True, following_items_per_page, list_name)
if not following_json:
if debug:
- print(listName + ' json feed not found for ' + path)
+ print(list_name + ' json feed not found for ' + path)
self._404()
return
msg_str = json.dumps(following_json,
@@ -15280,7 +15280,6 @@ class PubServer(BaseHTTPRequestHandler):
known_bots_str = ''
for bot_name in self.server.known_bots:
known_bots_str += bot_name + '\n'
- # TODO
msg = known_bots_str.encode('utf-8')
msglen = len(msg)
self._set_headers('text/plain; charset=utf-8',
@@ -16395,8 +16394,7 @@ class PubServer(BaseHTTPRequestHandler):
'_GET', 'show welcome screen',
self.server.debug)
return
- else:
- self.path = self.path.replace('/welcome', '')
+ self.path = self.path.replace('/welcome', '')
# the welcome screen which allows you to set an avatar image
if html_getreq and authorized and \
@@ -18556,10 +18554,11 @@ class PubServer(BaseHTTPRequestHandler):
return
if response_str == 'Not modified':
if endpoint_type == 'put':
- return self._200()
- else:
- return self._304()
- elif response_str.startswith('ETag:') and endpoint_type == 'put':
+ self._200()
+ return
+ self._304()
+ return
+ if response_str.startswith('ETag:') and endpoint_type == 'put':
response_etag = response_str.split('ETag:', 1)[1]
self._201(response_etag)
elif response_str != 'Ok':
@@ -19483,7 +19482,7 @@ class PubServer(BaseHTTPRequestHandler):
curr_session, proxy_type):
return 1
return -1
- elif post_type == 'newshare' or post_type == 'newwanted':
+ elif post_type in ('newshare', 'newwanted'):
if not fields.get('itemQty'):
print(post_type + ' no itemQty')
return -1
diff --git a/delete.py b/delete.py
index 00e83d33a..6bb3ba30d 100644
--- a/delete.py
+++ b/delete.py
@@ -28,7 +28,7 @@ from posts import get_person_box
def send_delete_via_server(base_dir: str, session,
from_nickname: str, password: str,
- from_domain: str, fromPort: int,
+ from_domain: str, from_port: int,
http_prefix: str, delete_object_url: str,
cached_webfingers: {}, person_cache: {},
debug: bool, project_version: str,
@@ -39,7 +39,7 @@ def send_delete_via_server(base_dir: str, session,
print('WARN: No session for send_delete_via_server')
return 6
- from_domain_full = get_full_domain(from_domain, fromPort)
+ from_domain_full = get_full_domain(from_domain, from_port)
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
to_url = 'https://www.w3.org/ns/activitystreams#Public'
@@ -178,8 +178,7 @@ def outbox_delete(base_dir: str, http_prefix: str,
def remove_old_hashtags(base_dir: str, max_months: int) -> str:
"""Remove old hashtags
"""
- if max_months > 11:
- max_months = 11
+ max_months = min(max_months, 11)
max_days_since_epoch = \
(datetime.utcnow() - datetime(1970, 1 + max_months, 1)).days
remove_hashtags = []
diff --git a/enigma.py b/enigma.py
index 0612548b5..1550597c4 100644
--- a/enigma.py
+++ b/enigma.py
@@ -65,7 +65,7 @@ def set_enigma_pub_key(actor_json: {}, enigma_pub_key: str) -> None:
property_found = property_value
break
if property_found:
- actor_json['attachment'].remove(property_value)
+ actor_json['attachment'].remove(property_found)
if remove_key:
return
diff --git a/feeds.py b/feeds.py
index edb40f108..6b91885c3 100644
--- a/feeds.py
+++ b/feeds.py
@@ -9,6 +9,8 @@ __module_group__ = "RSS Feeds"
def rss2tag_header(hashtag: str, http_prefix: str, domain_full: str) -> str:
+ """Header for rss 2
+ """
return \
"" + \
"
' + json.loads(response_str)['translatedText'] + '
' diff --git a/like.py b/like.py index 425d20c6c..51a2b00cb 100644 --- a/like.py +++ b/like.py @@ -74,7 +74,7 @@ def _create_like(recent_posts_cache: {}, cc_list: [], http_prefix: str, object_url: str, actor_liked: str, client_to_server: bool, - send_threads: [], postLog: [], + send_threads: [], post_log: [], person_cache: {}, cached_webfingers: {}, debug: bool, project_version: str, signing_priv_key_pem: str, @@ -141,7 +141,7 @@ def _create_like(recent_posts_cache: {}, liked_post_port, 'https://www.w3.org/ns/activitystreams#Public', http_prefix, True, client_to_server, federation_list, - send_threads, postLog, cached_webfingers, + send_threads, post_log, cached_webfingers, person_cache, debug, project_version, None, group_account, signing_priv_key_pem, 7367374, @@ -156,7 +156,7 @@ def like_post(recent_posts_cache: {}, like_nickname: str, like_domain: str, like_port: int, cc_list: [], like_status_number: int, client_to_server: bool, - send_threads: [], postLog: [], + send_threads: [], post_log: [], person_cache: {}, cached_webfingers: {}, debug: bool, project_version: str, signing_priv_key_pem: str, @@ -173,7 +173,8 @@ def like_post(recent_posts_cache: {}, nickname, domain, port, cc_list, http_prefix, object_url, actor_liked, client_to_server, - send_threads, postLog, person_cache, cached_webfingers, + send_threads, post_log, person_cache, + cached_webfingers, debug, project_version, signing_priv_key_pem, curr_domain, onion_domain, i2p_domain) diff --git a/linked_data_sig.py b/linked_data_sig.py index a2790fc81..3c61d24c3 100644 --- a/linked_data_sig.py +++ b/linked_data_sig.py @@ -87,8 +87,8 @@ def verify_json_signature(doc: {}, public_key_pem: str) -> bool: padding.PKCS1v15(), hazutils.Prehashed(hashes.SHA256())) return True - except BaseException: - print('EX: verify_json_signature unable to verify') + except BaseException as ex: + print('EX: verify_json_signature unable to verify ' + str(ex)) return False diff --git a/maps.py b/maps.py index c4ecac990..a7f224110 100644 --- a/maps.py +++ b/maps.py @@ -202,13 +202,13 @@ def geocoords_from_map_link(url: str, """ if osm_domain in url: return _geocoords_from_osm_link(url, osm_domain) - elif '.google.co' in url: + if '.google.co' in url: return _geocoords_from_gmaps_link(url) - elif '.bing.co' in url: + if '.bing.co' in url: return _geocoords_from_bmaps_link(url) - elif '.waze.co' in url: + if '.waze.co' in url: return _geocoords_from_waze_link(url) - elif 'wego.here.co' in url: + if 'wego.here.co' in url: return _geocoords_from_wego_link(url) return None, None, None diff --git a/metadata.py b/metadata.py index 40c9fb088..d3d488a12 100644 --- a/metadata.py +++ b/metadata.py @@ -35,7 +35,7 @@ def meta_data_node_info(base_dir: str, about_url: str, terms_of_service_url: str, registration: bool, version: str, - showAccounts: bool) -> {}: + show_accounts: bool) -> {}: """ /nodeinfo/2.0 endpoint Also see https://socialhub.activitypub.rocks/t/ fep-f1d5-nodeinfo-in-fediverse-software/1190/4 @@ -46,7 +46,7 @@ def meta_data_node_info(base_dir: str, Also exposure of the version number and number of accounts could be sensitive """ - if showAccounts: + if show_accounts: active_accounts = no_of_accounts(base_dir) active_accounts_monthly = no_of_active_accounts_monthly(base_dir, 1) active_accounts_half_year = no_of_active_accounts_monthly(base_dir, 6) @@ -81,7 +81,7 @@ def meta_data_node_info(base_dir: str, return nodeinfo -def meta_data_instance(showAccounts: bool, +def meta_data_instance(show_accounts: bool, instance_title: str, instance_description_short: str, instance_description: str, @@ -131,7 +131,7 @@ def meta_data_instance(showAccounts: bool, http_prefix + '://' + domain_full + '/@' + \ admin_actor['preferredUsername'] - if showAccounts: + if show_accounts: active_accounts = no_of_accounts(base_dir) local_posts = _get_status_count(base_dir) else: diff --git a/newsdaemon.py b/newsdaemon.py index 431230ccc..0faf24a00 100644 --- a/newsdaemon.py +++ b/newsdaemon.py @@ -54,7 +54,7 @@ def _update_feeds_outbox_index(base_dir: str, domain: str, feeds_file.seek(0, 0) feeds_file.write(post_id + '\n' + content) print('DEBUG: feeds post added to index') - except BaseException as ex: + except OSError as ex: print('EX: Failed to write entry to feeds posts index ' + index_filename + ' ' + str(ex)) else: @@ -803,8 +803,7 @@ def run_newswire_daemon(base_dir: str, httpd, print('Newswire daemon has no session') time.sleep(60) continue - else: - print('Newswire daemon session established') + print('Newswire daemon session established') # try to update the feeds print('Updating newswire feeds') diff --git a/newswire.py b/newswire.py index db20b0956..8acd22364 100644 --- a/newswire.py +++ b/newswire.py @@ -471,7 +471,7 @@ def _valid_podcast_entry(base_dir: str, key: str, entry: {}) -> bool: https://github.com/Podcastindex-org/podcast-namespace/ blob/main/proposal-docs/social/social.md#socialinteract-element """ - if key == 'socialInteract' or key == 'discussion': + if key in ('socialInteract', 'discussion'): if not entry.get('protocol'): return False if not entry.get('uri'): @@ -678,10 +678,9 @@ def get_link_from_rss_item(rss_item: str, '.i2p/' not in link: continue return link, mime_type - else: - if '.onion/' not in link and \ - '.i2p/' not in link: - return link, mime_type + if '.onion/' not in link and \ + '.i2p/' not in link: + return link, mime_type if ''
result_str += title_str + '