__filename__ = "tests.py" __author__ = "Bob Mottram" __license__ = "AGPL3+" __version__ = "1.5.0" __maintainer__ = "Bob Mottram" __email__ = "bob@libreserver.org" __status__ = "Production" __module_group__ = "Testing" import base64 from cryptography.hazmat.primitives import hashes from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.serialization import load_pem_private_key from cryptography.hazmat.primitives.serialization import load_pem_public_key from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives.asymmetric import utils as hazutils import time import os import shutil import json import datetime from shutil import copyfile from random import randint from time import gmtime, strftime from pprint import pprint from httpsig import get_digest_algorithm_from_headers from httpsig import get_digest_prefix from httpsig import create_signed_header from httpsig import sign_post_headers from httpsig import sign_post_headers_new from httpsig import verify_post_headers from httpsig import message_content_digest from cache import store_person_in_cache from cache import get_person_from_cache from threads import thread_with_trace from daemon import run_daemon from session import get_json_valid from session import create_session from session import get_json from posts import convert_post_content_to_html from posts import get_actor_from_in_reply_to from posts import regenerate_index_for_box from posts import remove_post_interactions from posts import get_mentioned_people from posts import valid_content_warning from posts import delete_all_posts from posts import create_public_post from posts import send_post from posts import no_of_followers_on_domain from posts import group_followers_by_domain from posts import archive_posts_for_person from posts import send_post_via_server from posts import seconds_between_published from follow import clear_follows from follow import clear_followers from follow import send_follow_request_via_server from follow import send_unfollow_request_via_server from siteactive import site_is_active from utils import data_dir from utils import data_dir_testing from utils import remove_link_tracking from utils import uninvert_text from utils import get_url_from_post from utils import date_from_string_format from utils import date_utcnow from utils import is_right_to_left_text from utils import remove_markup_tag from utils import remove_style_within_html from utils import html_tag_has_closing from utils import remove_inverted_text from utils import remove_square_capitals from utils import standardize_text from utils import remove_eol from utils import text_in_file from utils import convert_published_to_local_timezone from utils import convert_to_snake_case from utils import get_sha_256 from utils import dangerous_svg from utils import can_reply_to from utils import is_group_account from utils import get_actor_languages_list from utils import get_category_types from utils import get_supported_languages from utils import set_config_param from utils import is_group_actor from utils import date_string_to_seconds from utils import date_seconds_to_string from utils import valid_password from utils import user_agent_domain from utils import camel_case_split from utils import decoded_host from utils import get_full_domain from utils import valid_nickname from utils import first_paragraph_from_string from utils import remove_id_ending from utils import update_recent_posts_cache from utils import follow_person from utils import get_nickname_from_actor from utils import get_domain_from_actor from utils import copytree from utils import load_json from utils import save_json from utils import get_status_number from utils import valid_hash_tag from utils import get_followers_of_person from utils import remove_html from utils import dangerous_markup from utils import acct_dir from pgp import extract_pgp_public_key from pgp import pgp_public_key_upload from utils import contains_pgp_public_key from follow import add_follower_of_person from follow import unfollow_account from follow import unfollower_of_account from follow import send_follow_request from person import set_featured_hashtags from person import get_featured_hashtags from person import create_person from person import create_group from person import set_display_nickname from person import set_bio # from person import generate_rsa_key from skills import set_skill_level from skills import actor_skill_value from skills import set_skills_from_dict from skills import actor_has_skill from roles import actor_roles_from_list from roles import set_role from roles import actor_has_role from auth import constant_time_string_check from auth import create_basic_auth_header from auth import authorize_basic from auth import store_basic_credentials from like import like_post from like import send_like_via_server from reaction import reaction_post from reaction import send_reaction_via_server from reaction import valid_emoji_content from announce import announce_public from announce import send_announce_via_server from city import parse_nogo_string from city import spoof_geolocation from city import point_in_nogo from media import get_image_dimensions from media import get_media_path from media import get_attachment_media_type from delete import send_delete_via_server from inbox import json_post_allows_comments from inbox import valid_inbox from inbox import valid_inbox_filenames from inbox import cache_svg_images from categories import guess_hashtag_category from content import remove_link_trackers_from_content from content import format_mixed_right_to_left from content import replace_remote_hashtags from content import add_name_emojis_to_tags from content import combine_textarea_lines from content import detect_dogwhistles from content import remove_script from content import create_edits_html from content import content_diff from content import bold_reading_string from content import safe_web_text from content import words_similarity from content import get_price_from_string from content import limit_repeated_words from content import switch_words from content import extract_text_fields_in_post from content import html_replace_email_quote from content import html_replace_quote_marks from content import dangerous_css from content import add_web_links from content import replace_emoji_from_tags from content import add_html_tags from content import remove_long_words from content import replace_content_duplicates from content import remove_text_formatting from content import remove_html_tag from theme import get_themes_list from theme import update_default_themes_list from theme import set_css_param from theme import scan_themes_for_scripts from linked_data_sig import generate_json_signature from linked_data_sig import verify_json_signature from newsdaemon import hashtag_rule_tree from newsdaemon import hashtag_rule_resolve from newswire import get_link_from_rss_item from newswire import xml_podcast_to_dict from newswire import get_newswire_tags from newswire import parse_feed_date from newswire import limit_word_lengths from mastoapiv1 import get_masto_api_v1id_from_nickname from mastoapiv1 import get_nickname_from_masto_api_v1id from webapp_post import remove_incomplete_code_tags from webapp_post import replace_link_variable from webapp_post import prepare_html_post_nickname from speaker import speaker_replace_links from markdown import markdown_to_html from languages import get_reply_language from languages import set_actor_languages from languages import get_actor_languages from languages import get_links_from_content from languages import add_links_to_content from languages import libretranslate from languages import libretranslate_languages from shares import authorize_shared_items from shares import generate_shared_item_federation_tokens from shares import create_shared_item_federation_token from shares import update_shared_item_federation_token from shares import merge_shared_item_tokens from shares import send_share_via_server from shares import get_shared_items_catalog_via_server from shares import get_offers_via_server from shares import get_wanted_via_server from cwlists import add_cw_from_lists from cwlists import load_cw_lists from happening import dav_month_via_server from happening import dav_day_via_server from webapp_theme_designer import color_contrast from maps import get_map_links_from_post_content from maps import geocoords_from_map_link from followerSync import get_followers_sync_hash from reading import get_book_link_from_content from reading import get_book_from_post from reading import get_reading_status from reading import store_book_events TEST_SERVER_GROUP_RUNNING = False TEST_SERVER_ALICE_RUNNING = False TEST_SERVER_BOB_RUNNING = False TEST_SERVER_EVE_RUNNING = False THR_GROUP = None THR_ALICE = None THR_BOB = None THR_EVE = None def _test_http_signed_get(base_dir: str): print('test_http_signed_get') http_prefix = 'https' debug = True boxpath = "/users/Actor" host = "epicyon.libreserver.org" content_length = "0" user_agent = "http.rb/4.4.1 (Mastodon/3.4.1; +https://octodon.social/)" date_str = 'Wed, 01 Sep 2021 16:11:10 GMT' accept_encoding = 'gzip' accept = \ 'application/activity+json, application/ld+json' signature = \ 'keyId="https://octodon.social/actor#main-key",' + \ 'algorithm="rsa-sha256",' + \ 'headers="(request-target) host date accept",' + \ 'signature="Fe53PS9A2OSP4x+W/svhA' + \ 'jUKHBvnAR73Ez+H32au7DQklLk08Lvm8al' + \ 'LS7pCor28yfyx+DfZADgq6G1mLLRZo0OOn' + \ 'PFSog7DhdcygLhBUMS0KlT5KVGwUS0tw' + \ 'jdiHv4OC83RiCr/ZySBgOv65YLHYmGCi5B' + \ 'IqSZJRkqi8+SLmLGESlNOEzKu+jIxOBY' + \ 'mEEdIpNrDeE5YrFKpfTC3vS2GnxGOo5J/4' + \ 'lB2h+dlUpso+sv5rDz1d1FsqRWK8waV7' + \ '4HUfLV+qbgYRceOTyZIi50vVqLvt9CTQes' + \ 'KZHG3GrrPfaBuvoUbR4MCM3BUvpB7EzL' + \ '9F17Y+Ea9mo8zjqzZm8HaZQ=="' public_key_pem = \ '-----BEGIN PUBLIC KEY-----\n' + \ 'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMII' + \ 'BCgKCAQEA1XT+ov/i4LDYuaXCwh4r\n' + \ '2rVfWtnz68wnFx3knwymwtRoAc/SFGzp9ye' + \ '5ogG1uPcbe7MeirZHhaBICynPlL32\n' + \ 's9OYootI7MsQWn+vu7azxiXO7qcTPByvGcl' + \ '0vpLhtT/ApmlMintkRTVXdzBdJVM0\n' + \ 'UsmYKg6U+IHNL+a1gURHGXep2Ih0BJMh4Aa' + \ 'DbaID6jtpJZvbIkYgJ4IJucOe+A3T\n' + \ 'YPMwkBA84ew+hso+vKQfTunyDInuPQbEzrA' + \ 'zMJXEHS7IpBhdS4/cEox86BoDJ/q0\n' + \ 'KOEOUpUDniFYWb9k1+9B387OviRDLIcLxNZ' + \ 'nf+bNq8d+CwEXY2xGsToBle/q74d8\n' + \ 'BwIDAQAB\n' + \ '-----END PUBLIC KEY-----\n' headers = { "user-agent": user_agent, "content-length": content_length, "host": host, "date": date_str, "accept": accept, "accept-encoding": accept_encoding, "signature": signature } getreq_method = True message_body_digest = None message_body_json_str = '' no_recency_check = True assert verify_post_headers(http_prefix, public_key_pem, headers, boxpath, getreq_method, message_body_digest, message_body_json_str, debug, no_recency_check) # Change a single character and the signature should fail headers['date'] = headers['date'].replace(':10', ':11') assert not verify_post_headers(http_prefix, public_key_pem, headers, boxpath, getreq_method, message_body_digest, message_body_json_str, debug, no_recency_check) path = base_dir + '/.testHttpsigGET' if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) os.chdir(path) nickname = 'testactor' host_domain = 'someother.instance' domain = 'argumentative.social' http_prefix = 'https' port = 443 with_digest = False password = 'SuperSecretPassword' no_recency_check = True private_key_pem, public_key_pem, _, _ = \ create_person(path, nickname, domain, port, http_prefix, False, False, password) assert private_key_pem assert public_key_pem message_body_json_str = '' headers_domain = get_full_domain(host_domain, port) date_str = 'Tue, 14 Sep 2021 16:19:00 GMT' boxpath = '/inbox' accept = 'application/json' # accept = 'application/activity+json' headers = { 'user-agent': 'Epicyon/1.5.0; +https://' + domain + '/', 'host': headers_domain, 'date': date_str, 'accept': accept, 'content-length': 0 } signature_header = \ create_signed_header(date_str, private_key_pem, nickname, domain, port, host_domain, port, boxpath, http_prefix, False, None, accept) headers['signature'] = signature_header['signature'] getreq_method = not with_digest assert verify_post_headers(http_prefix, public_key_pem, headers, boxpath, getreq_method, None, message_body_json_str, debug, no_recency_check) if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) def _test_sign_and_verify() -> None: print('test_sign_and_verify') public_key_pem = \ '-----BEGIN RSA PUBLIC KEY-----\n' + \ 'MIIBCgKCAQEAhAKYdtoeoy8zcAcR874L8' + \ 'cnZxKzAGwd7v36APp7Pv6Q2jdsPBRrw\n' + \ 'WEBnez6d0UDKDwGbc6nxfEXAy5mbhgajz' + \ 'rw3MOEt8uA5txSKobBpKDeBLOsdJKFq\n' + \ 'MGmXCQvEG7YemcxDTRPxAleIAgYYRjTSd' + \ '/QBwVW9OwNFhekro3RtlinV0a75jfZg\n' + \ 'kne/YiktSvLG34lw2zqXBDTC5NHROUqGT' + \ 'lML4PlNZS5Ri2U4aCNx2rUPRcKIlE0P\n' + \ 'uKxI4T+HIaFpv8+rdV6eUgOrB2xeI1dSF' + \ 'Fn/nnv5OoZJEIB+VmuKn3DCUcCZSFlQ\n' + \ 'PSXSfBDiUGhwOw76WuSSsf1D4b/vLoJ10wIDAQAB\n' + \ '-----END RSA PUBLIC KEY-----\n' private_key_pem = \ '-----BEGIN RSA PRIVATE KEY-----\n' + \ 'MIIEqAIBAAKCAQEAhAKYdtoeoy8zcAcR8' + \ '74L8cnZxKzAGwd7v36APp7Pv6Q2jdsP\n' + \ 'BRrwWEBnez6d0UDKDwGbc6nxfEXAy5mbh' + \ 'gajzrw3MOEt8uA5txSKobBpKDeBLOsd\n' + \ 'JKFqMGmXCQvEG7YemcxDTRPxAleIAgYYR' + \ 'jTSd/QBwVW9OwNFhekro3RtlinV0a75\n' + \ 'jfZgkne/YiktSvLG34lw2zqXBDTC5NHRO' + \ 'UqGTlML4PlNZS5Ri2U4aCNx2rUPRcKI\n' + \ 'lE0PuKxI4T+HIaFpv8+rdV6eUgOrB2xeI' + \ '1dSFFn/nnv5OoZJEIB+VmuKn3DCUcCZ\n' + \ 'SFlQPSXSfBDiUGhwOw76WuSSsf1D4b/vL' + \ 'oJ10wIDAQABAoIBAG/JZuSWdoVHbi56\n' + \ 'vjgCgkjg3lkO1KrO3nrdm6nrgA9P9qaPj' + \ 'xuKoWaKO1cBQlE1pSWp/cKncYgD5WxE\n' + \ 'CpAnRUXG2pG4zdkzCYzAh1i+c34L6oZoH' + \ 'sirK6oNcEnHveydfzJL5934egm6p8DW\n' + \ '+m1RQ70yUt4uRc0YSor+q1LGJvGQHReF0' + \ 'WmJBZHrhz5e63Pq7lE0gIwuBqL8SMaA\n' + \ 'yRXtK+JGxZpImTq+NHvEWWCu09SCq0r83' + \ '8ceQI55SvzmTkwqtC+8AT2zFviMZkKR\n' + \ 'Qo6SPsrqItxZWRty2izawTF0Bf5S2VAx7' + \ 'O+6t3wBsQ1sLptoSgX3QblELY5asI0J\n' + \ 'YFz7LJECgYkAsqeUJmqXE3LP8tYoIjMIA' + \ 'KiTm9o6psPlc8CrLI9CH0UbuaA2JCOM\n' + \ 'cCNq8SyYbTqgnWlB9ZfcAm/cFpA8tYci9' + \ 'm5vYK8HNxQr+8FS3Qo8N9RJ8d0U5Csw\n' + \ 'DzMYfRghAfUGwmlWj5hp1pQzAuhwbOXFt' + \ 'xKHVsMPhz1IBtF9Y8jvgqgYHLbmyiu1\n' + \ 'mwJ5AL0pYF0G7x81prlARURwHo0Yf52kE' + \ 'w1dxpx+JXER7hQRWQki5/NsUEtv+8RT\n' + \ 'qn2m6qte5DXLyn83b1qRscSdnCCwKtKWU' + \ 'ug5q2ZbwVOCJCtmRwmnP131lWRYfj67\n' + \ 'B/xJ1ZA6X3GEf4sNReNAtaucPEelgR2ns' + \ 'N0gKQKBiGoqHWbK1qYvBxX2X3kbPDkv\n' + \ '9C+celgZd2PW7aGYLCHq7nPbmfDV0yHcW' + \ 'jOhXZ8jRMjmANVR/eLQ2EfsRLdW69bn\n' + \ 'f3ZD7JS1fwGnO3exGmHO3HZG+6AvberKY' + \ 'VYNHahNFEw5TsAcQWDLRpkGybBcxqZo\n' + \ '81YCqlqidwfeO5YtlO7etx1xLyqa2NsCe' + \ 'G9A86UjG+aeNnXEIDk1PDK+EuiThIUa\n' + \ '/2IxKzJKWl1BKr2d4xAfR0ZnEYuRrbeDQ' + \ 'YgTImOlfW6/GuYIxKYgEKCFHFqJATAG\n' + \ 'IxHrq1PDOiSwXd2GmVVYyEmhZnbcp8Cxa' + \ 'EMQoevxAta0ssMK3w6UsDtvUvYvF22m\n' + \ 'qQKBiD5GwESzsFPy3Ga0MvZpn3D6EJQLg' + \ 'snrtUPZx+z2Ep2x0xc5orneB5fGyF1P\n' + \ 'WtP+fG5Q6Dpdz3LRfm+KwBCWFKQjg7uTx' + \ 'cjerhBWEYPmEMKYwTJF5PBG9/ddvHLQ\n' + \ 'EQeNC8fHGg4UXU8mhHnSBt3EA10qQJfRD' + \ 's15M38eG2cYwB1PZpDHScDnDA0=\n' + \ '-----END RSA PRIVATE KEY-----' # sign signed_header_text = \ '(request-target): get /actor\n' + \ 'host: octodon.social\n' + \ 'date: Tue, 14 Sep 2021 16:19:00 GMT\n' + \ 'accept: application/json' header_digest = get_sha_256(signed_header_text.encode('ascii')) key = load_pem_private_key(private_key_pem.encode('utf-8'), None, backend=default_backend()) raw_signature = key.sign(header_digest, padding.PKCS1v15(), hazutils.Prehashed(hashes.SHA256())) signature1 = base64.b64encode(raw_signature).decode('ascii') # verify padding_str = padding.PKCS1v15() alg = hazutils.Prehashed(hashes.SHA256()) pubkey = load_pem_public_key(public_key_pem.encode('utf-8'), backend=default_backend()) signature2 = base64.b64decode(signature1) pubkey.verify(signature2, header_digest, padding_str, alg) def _test_http_sig_new(algorithm: str, digest_algorithm: str): print('test_http_sig_new') http_prefix = 'https' port = 443 debug = True message_body_json = {"hello": "world"} message_body_json_str = json.dumps(message_body_json) nickname = 'foo' path_str = "/" + nickname + "?param=value&pet=dog HTTP/1.1" domain = 'example.com' date_str = 'Tue, 20 Apr 2021 02:07:55 GMT' digest_prefix = get_digest_prefix(digest_algorithm) digest_str = \ digest_prefix + '=X48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE=' body_digest = \ message_content_digest(message_body_json_str, digest_algorithm) assert body_digest in digest_str content_length = 18 content_type = 'application/activity+json' public_key_pem = \ '-----BEGIN RSA PUBLIC KEY-----\n' + \ 'MIIBCgKCAQEAhAKYdtoeoy8zcAcR874L8' + \ 'cnZxKzAGwd7v36APp7Pv6Q2jdsPBRrw\n' + \ 'WEBnez6d0UDKDwGbc6nxfEXAy5mbhgajz' + \ 'rw3MOEt8uA5txSKobBpKDeBLOsdJKFq\n' + \ 'MGmXCQvEG7YemcxDTRPxAleIAgYYRjTSd' + \ '/QBwVW9OwNFhekro3RtlinV0a75jfZg\n' + \ 'kne/YiktSvLG34lw2zqXBDTC5NHROUqGT' + \ 'lML4PlNZS5Ri2U4aCNx2rUPRcKIlE0P\n' + \ 'uKxI4T+HIaFpv8+rdV6eUgOrB2xeI1dSF' + \ 'Fn/nnv5OoZJEIB+VmuKn3DCUcCZSFlQ\n' + \ 'PSXSfBDiUGhwOw76WuSSsf1D4b/vLoJ10wIDAQAB\n' + \ '-----END RSA PUBLIC KEY-----\n' private_key_pem = \ '-----BEGIN RSA PRIVATE KEY-----\n' + \ 'MIIEqAIBAAKCAQEAhAKYdtoeoy8zcAcR8' + \ '74L8cnZxKzAGwd7v36APp7Pv6Q2jdsP\n' + \ 'BRrwWEBnez6d0UDKDwGbc6nxfEXAy5mbh' + \ 'gajzrw3MOEt8uA5txSKobBpKDeBLOsd\n' + \ 'JKFqMGmXCQvEG7YemcxDTRPxAleIAgYYR' + \ 'jTSd/QBwVW9OwNFhekro3RtlinV0a75\n' + \ 'jfZgkne/YiktSvLG34lw2zqXBDTC5NHRO' + \ 'UqGTlML4PlNZS5Ri2U4aCNx2rUPRcKI\n' + \ 'lE0PuKxI4T+HIaFpv8+rdV6eUgOrB2xeI' + \ '1dSFFn/nnv5OoZJEIB+VmuKn3DCUcCZ\n' + \ 'SFlQPSXSfBDiUGhwOw76WuSSsf1D4b/vL' + \ 'oJ10wIDAQABAoIBAG/JZuSWdoVHbi56\n' + \ 'vjgCgkjg3lkO1KrO3nrdm6nrgA9P9qaPj' + \ 'xuKoWaKO1cBQlE1pSWp/cKncYgD5WxE\n' + \ 'CpAnRUXG2pG4zdkzCYzAh1i+c34L6oZoH' + \ 'sirK6oNcEnHveydfzJL5934egm6p8DW\n' + \ '+m1RQ70yUt4uRc0YSor+q1LGJvGQHReF0' + \ 'WmJBZHrhz5e63Pq7lE0gIwuBqL8SMaA\n' + \ 'yRXtK+JGxZpImTq+NHvEWWCu09SCq0r83' + \ '8ceQI55SvzmTkwqtC+8AT2zFviMZkKR\n' + \ 'Qo6SPsrqItxZWRty2izawTF0Bf5S2VAx7' + \ 'O+6t3wBsQ1sLptoSgX3QblELY5asI0J\n' + \ 'YFz7LJECgYkAsqeUJmqXE3LP8tYoIjMIA' + \ 'KiTm9o6psPlc8CrLI9CH0UbuaA2JCOM\n' + \ 'cCNq8SyYbTqgnWlB9ZfcAm/cFpA8tYci9' + \ 'm5vYK8HNxQr+8FS3Qo8N9RJ8d0U5Csw\n' + \ 'DzMYfRghAfUGwmlWj5hp1pQzAuhwbOXFt' + \ 'xKHVsMPhz1IBtF9Y8jvgqgYHLbmyiu1\n' + \ 'mwJ5AL0pYF0G7x81prlARURwHo0Yf52kE' + \ 'w1dxpx+JXER7hQRWQki5/NsUEtv+8RT\n' + \ 'qn2m6qte5DXLyn83b1qRscSdnCCwKtKWU' + \ 'ug5q2ZbwVOCJCtmRwmnP131lWRYfj67\n' + \ 'B/xJ1ZA6X3GEf4sNReNAtaucPEelgR2ns' + \ 'N0gKQKBiGoqHWbK1qYvBxX2X3kbPDkv\n' + \ '9C+celgZd2PW7aGYLCHq7nPbmfDV0yHcW' + \ 'jOhXZ8jRMjmANVR/eLQ2EfsRLdW69bn\n' + \ 'f3ZD7JS1fwGnO3exGmHO3HZG+6AvberKY' + \ 'VYNHahNFEw5TsAcQWDLRpkGybBcxqZo\n' + \ '81YCqlqidwfeO5YtlO7etx1xLyqa2NsCe' + \ 'G9A86UjG+aeNnXEIDk1PDK+EuiThIUa\n' + \ '/2IxKzJKWl1BKr2d4xAfR0ZnEYuRrbeDQ' + \ 'YgTImOlfW6/GuYIxKYgEKCFHFqJATAG\n' + \ 'IxHrq1PDOiSwXd2GmVVYyEmhZnbcp8Cxa' + \ 'EMQoevxAta0ssMK3w6UsDtvUvYvF22m\n' + \ 'qQKBiD5GwESzsFPy3Ga0MvZpn3D6EJQLg' + \ 'snrtUPZx+z2Ep2x0xc5orneB5fGyF1P\n' + \ 'WtP+fG5Q6Dpdz3LRfm+KwBCWFKQjg7uTx' + \ 'cjerhBWEYPmEMKYwTJF5PBG9/ddvHLQ\n' + \ 'EQeNC8fHGg4UXU8mhHnSBt3EA10qQJfRD' + \ 's15M38eG2cYwB1PZpDHScDnDA0=\n' + \ '-----END RSA PRIVATE KEY-----' headers = { "host": domain, "date": date_str, "digest": f'{digest_prefix}={body_digest}', "content-type": content_type, "content-length": str(content_length) } signature_index_header, signature_header = \ sign_post_headers_new(date_str, private_key_pem, nickname, domain, port, domain, port, path_str, http_prefix, message_body_json_str, algorithm, digest_algorithm, debug) print('signature_index_header1: ' + str(signature_index_header)) print('signature_header1: ' + str(signature_header)) sig_input = "keyId=\"https://example.com/users/foo#main-key\"; " + \ "alg=hs2019; created=1618884475; " + \ "sig1=(@request-target, @created, host, date, digest, " + \ "content-type, content-length)" assert signature_index_header == sig_input sig = "sig1=:NXAQ7AtDMR2iwhmH1qCwiZw5PVTjOw5+5kSu0Tsx/3gqz0D" + \ "py7OQbWqFHrNB7MmS4TukX/vDyQOFdElY5yxnEhbgRwKACq0AP4QH9H" + \ "CiRyCE8UXDdAkY4VUd6jrWjRHKRoqQN7I+Q5tb2Fu5cDfifw/PQc86Z" + \ "NmMhPrg3OjUJ9Q2Gj29NhgJ+4el1ECg0cAy4yG1M9AQ3KvQooQFvlg1" + \ "vp0H2xfbJQjv8FsR/lKiRdaVHqGR2CKrvxvPRPaOsFANp2wzEtiMk3O" + \ "TrBTYU+Zb53mIspfEeLxsNtcGmBDmQKZ9Pud8f99XGJrP+uDd3zKtnr" + \ "f3fUnRRqy37yhB7WVwkg==:" assert signature_header == sig debug = True headers['path'] = path_str headers['signature'] = sig headers['signature-input'] = sig_input assert verify_post_headers(http_prefix, public_key_pem, headers, path_str, False, None, message_body_json_str, debug, True) # make a deliberate mistake debug = False headers['signature'] = headers['signature'].replace('V', 'B') assert not verify_post_headers(http_prefix, public_key_pem, headers, path_str, False, None, message_body_json_str, debug, True) def _test_httpsig_base(with_digest: bool, base_dir: str): print('test_httpsig(' + str(with_digest) + ')') path = base_dir + '/.testHttpsigBase' if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) os.chdir(path) algorithm = 'rsa-sha256' digest_algorithm = 'rsa-sha256' content_type = 'application/activity+json' nickname = 'socrates' host_domain = 'someother.instance' domain = 'argumentative.social' http_prefix = 'https' port = 5576 password = 'SuperSecretPassword' private_key_pem, public_key_pem, person, wf_endpoint = \ create_person(path, nickname, domain, port, http_prefix, False, False, password) assert private_key_pem assert public_key_pem assert person assert wf_endpoint if with_digest: message_body_json = { "a key": "a value", "another key": "A string", "yet another key": "Another string" } message_body_json_str = json.dumps(message_body_json) else: message_body_json_str = '' headers_domain = get_full_domain(host_domain, port) date_str = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime()) boxpath = '/inbox' if not with_digest: headers = { 'host': headers_domain, 'date': date_str, 'accept': content_type } signature_header = \ sign_post_headers(date_str, private_key_pem, nickname, domain, port, host_domain, port, boxpath, http_prefix, None, content_type, algorithm, None) else: digest_prefix = get_digest_prefix(digest_algorithm) body_digest = \ message_content_digest(message_body_json_str, digest_algorithm) content_length = len(message_body_json_str) headers = { 'host': headers_domain, 'date': date_str, 'digest': f'{digest_prefix}={body_digest}', 'content-type': content_type, 'content-length': str(content_length) } assert get_digest_algorithm_from_headers(headers) == digest_algorithm signature_header = \ sign_post_headers(date_str, private_key_pem, nickname, domain, port, host_domain, port, boxpath, http_prefix, message_body_json_str, content_type, algorithm, digest_algorithm) headers['signature'] = signature_header getreq_method = not with_digest debug = True assert verify_post_headers(http_prefix, public_key_pem, headers, boxpath, getreq_method, None, message_body_json_str, debug) if with_digest: # everything correct except for content-length headers['content-length'] = str(content_length + 2) assert verify_post_headers(http_prefix, public_key_pem, headers, boxpath, getreq_method, None, message_body_json_str, False) is False assert verify_post_headers(http_prefix, public_key_pem, headers, '/parambulator' + boxpath, getreq_method, None, message_body_json_str, False) is False assert verify_post_headers(http_prefix, public_key_pem, headers, boxpath, not getreq_method, None, message_body_json_str, False) is False if not with_digest: # fake domain headers = { 'host': 'bogon.domain', 'date': date_str, 'content-type': content_type } else: # correct domain but fake message message_body_json_str = \ '{"a key": "a value", "another key": "Fake GNUs", ' + \ '"yet another key": "More Fake GNUs"}' content_length = len(message_body_json_str) digest_prefix = get_digest_prefix(digest_algorithm) body_digest = \ message_content_digest(message_body_json_str, digest_algorithm) headers = { 'host': domain, 'date': date_str, 'digest': f'{digest_prefix}={body_digest}', 'content-type': content_type, 'content-length': str(content_length) } assert get_digest_algorithm_from_headers(headers) == digest_algorithm headers['signature'] = signature_header assert verify_post_headers(http_prefix, public_key_pem, headers, boxpath, not getreq_method, None, message_body_json_str, False) is False os.chdir(base_dir) shutil.rmtree(path, ignore_errors=False) def _test_httpsig(base_dir: str): _test_httpsig_base(True, base_dir) _test_httpsig_base(False, base_dir) def _test_cache(): print('test_cache') person_url = "cat@cardboard.box" person_json = { "id": 123456, "test": "This is a test" } person_cache = {} store_person_in_cache(None, person_url, person_json, person_cache, True) result = get_person_from_cache(None, person_url, person_cache) assert result['id'] == 123456 assert result['test'] == 'This is a test' def _test_threads_function(param1: str, param2: str): for _ in range(10000): time.sleep(2) def _test_threads(): print('test_threads') thr = \ thread_with_trace(target=_test_threads_function, args=('test', 'test2'), daemon=True) thr.start() assert thr.is_alive() is True time.sleep(1) thr.kill() thr.join() assert thr.is_alive() is False def create_server_alice(path: str, domain: str, port: int, bob_address: str, federation_list: [], has_follows: bool, has_posts: bool, send_threads: []): print('Creating test server: Alice on port ' + str(port)) if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) os.chdir(path) shared_items_federated_domains = [] system_language = 'en' languages_understood = [system_language] nickname = 'alice' http_prefix = 'http' proxy_type = None password = 'alicepass' max_replies = 64 domain_max_posts_per_day = 1000 account_max_posts_per_day = 1000 allow_deletion = True low_bandwidth = True private_key_pem, public_key_pem, person, wf_endpoint = \ create_person(path, nickname, domain, port, http_prefix, True, False, password) assert private_key_pem assert public_key_pem assert person assert wf_endpoint delete_all_posts(path, nickname, domain, 'inbox') delete_all_posts(path, nickname, domain, 'outbox') assert set_skill_level(path, nickname, domain, 'hacking', 90) assert set_role(path, nickname, domain, 'guru') if has_follows: follow_person(path, nickname, domain, 'bob', bob_address, federation_list, False, False, 'following.txt') add_follower_of_person(path, nickname, domain, 'bob', bob_address, federation_list, False, False) if has_posts: test_save_to_file = True client_to_server = False test_comments_enabled = True test_attach_image_filename = None test_media_type = None test_image_description = None test_city = 'London, England' test_in_reply_to = None test_in_reply_to_atom_uri = None test_subject = None test_schedule_post = False test_event_date = None test_event_time = None test_event_end_time = None test_location = None test_is_article = False conversation_id = None translate = {} content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Mr Blobby' buy_url = '' chat_url = '' auto_cw_cache = {} test_video_transcript = '' create_public_post(path, nickname, domain, port, http_prefix, "No wise fish would go anywhere without a porpoise", test_save_to_file, client_to_server, test_comments_enabled, test_attach_image_filename, test_media_type, test_image_description, test_video_transcript, test_city, test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) create_public_post(path, nickname, domain, port, http_prefix, "Curiouser and curiouser!", test_save_to_file, client_to_server, test_comments_enabled, test_attach_image_filename, test_media_type, test_image_description, test_video_transcript, test_city, test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) create_public_post(path, nickname, domain, port, http_prefix, "In the gardens of memory, in the palace " + "of dreams, that is where you and I shall meet", test_save_to_file, client_to_server, test_comments_enabled, test_attach_image_filename, test_media_type, test_image_description, test_video_transcript, test_city, test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) regenerate_index_for_box(path, nickname, domain, 'outbox') global TEST_SERVER_ALICE_RUNNING TEST_SERVER_ALICE_RUNNING = True max_mentions = 10 max_emoji = 10 onion_domain = None i2p_domain = None allow_local_network_access = True max_newswire_posts = 20 dormant_months = 3 send_threads_timeout_mins = 30 max_followers = 10 verify_all_signatures = True broch_mode = False show_node_info_accounts = True show_node_info_version = True city = 'London, England' log_login_failures = False user_agents_blocked = [] max_like_count = 10 default_reply_interval_hrs = 9999999999 lists_enabled = '' content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' dyslexic_font = False crawlers_allowed = [] check_actor_timeout = 2 preferred_podcast_formats = None clacks = None map_format = 'gpx' max_hashtags = 20 max_shares_on_profile = 8 public_replies_unlisted = False no_of_books = 10 accounts_data_dir = None print('Server running: Alice') run_daemon(accounts_data_dir, no_of_books, public_replies_unlisted, max_shares_on_profile, max_hashtags, map_format, clacks, preferred_podcast_formats, check_actor_timeout, crawlers_allowed, dyslexic_font, content_license_url, lists_enabled, default_reply_interval_hrs, low_bandwidth, max_like_count, shared_items_federated_domains, user_agents_blocked, log_login_failures, city, show_node_info_accounts, show_node_info_version, broch_mode, verify_all_signatures, send_threads_timeout_mins, dormant_months, max_newswire_posts, allow_local_network_access, 2048, False, True, False, False, True, max_followers, 0, 100, 1024, 5, False, 0, False, 1, False, False, False, 5, True, True, 'en', __version__, "instance_id", False, path, domain, onion_domain, i2p_domain, None, None, port, port, http_prefix, federation_list, max_mentions, max_emoji, False, proxy_type, max_replies, domain_max_posts_per_day, account_max_posts_per_day, allow_deletion, True, True, False, send_threads, False) def create_server_bob(path: str, domain: str, port: int, alice_address: str, federation_list: [], has_follows: bool, has_posts: bool, send_threads: []): print('Creating test server: Bob on port ' + str(port)) if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) os.chdir(path) shared_items_federated_domains = [] system_language = 'en' languages_understood = [system_language] nickname = 'bob' http_prefix = 'http' proxy_type = None client_to_server = False password = 'bobpass' max_replies = 64 domain_max_posts_per_day = 1000 account_max_posts_per_day = 1000 allow_deletion = True low_bandwidth = True private_key_pem, public_key_pem, person, wf_endpoint = \ create_person(path, nickname, domain, port, http_prefix, True, False, password) assert private_key_pem assert public_key_pem assert person assert wf_endpoint delete_all_posts(path, nickname, domain, 'inbox') delete_all_posts(path, nickname, domain, 'outbox') if has_follows and alice_address: follow_person(path, nickname, domain, 'alice', alice_address, federation_list, False, False, 'following.txt') add_follower_of_person(path, nickname, domain, 'alice', alice_address, federation_list, False, False) if has_posts: test_save_to_file = True test_comments_enabled = True test_attach_image_filename = None test_image_description = None test_media_type = None test_city = 'London, England' test_in_reply_to = None test_in_reply_to_atom_uri = None test_subject = None test_schedule_post = False test_event_date = None test_event_time = None test_event_end_time = None test_location = None test_is_article = False conversation_id = None content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Hamster' translate = {} buy_url = '' chat_url = '' auto_cw_cache = {} test_video_transcript = '' create_public_post(path, nickname, domain, port, http_prefix, "It's your life, live it your way.", test_save_to_file, client_to_server, test_comments_enabled, test_attach_image_filename, test_media_type, test_image_description, test_video_transcript, test_city, test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) create_public_post(path, nickname, domain, port, http_prefix, "One of the things I've realised is that " + "I am very simple", test_save_to_file, client_to_server, test_comments_enabled, test_attach_image_filename, test_media_type, test_image_description, test_video_transcript, test_city, test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) create_public_post(path, nickname, domain, port, http_prefix, "Quantum physics is a bit of a passion of mine", test_save_to_file, client_to_server, test_comments_enabled, test_attach_image_filename, test_media_type, test_image_description, test_video_transcript, test_city, test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) regenerate_index_for_box(path, nickname, domain, 'outbox') global TEST_SERVER_BOB_RUNNING TEST_SERVER_BOB_RUNNING = True max_mentions = 10 max_emoji = 10 onion_domain = None i2p_domain = None allow_local_network_access = True max_newswire_posts = 20 dormant_months = 3 send_threads_timeout_mins = 30 max_followers = 10 verify_all_signatures = True broch_mode = False show_node_info_accounts = True show_node_info_version = True city = 'London, England' log_login_failures = False user_agents_blocked = [] max_like_count = 10 default_reply_interval_hrs = 9999999999 lists_enabled = '' content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' dyslexic_font = False crawlers_allowed = [] check_actor_timeout = 2 preferred_podcast_formats = None clacks = None map_format = 'gpx' max_hashtags = 20 max_shares_on_profile = 8 public_replies_unlisted = False no_of_books = 10 accounts_data_dir = None print('Server running: Bob') run_daemon(accounts_data_dir, no_of_books, public_replies_unlisted, max_shares_on_profile, max_hashtags, map_format, clacks, preferred_podcast_formats, check_actor_timeout, crawlers_allowed, dyslexic_font, content_license_url, lists_enabled, default_reply_interval_hrs, low_bandwidth, max_like_count, shared_items_federated_domains, user_agents_blocked, log_login_failures, city, show_node_info_accounts, show_node_info_version, broch_mode, verify_all_signatures, send_threads_timeout_mins, dormant_months, max_newswire_posts, allow_local_network_access, 2048, False, True, False, False, True, max_followers, 0, 100, 1024, 5, False, 0, False, 1, False, False, False, 5, True, True, 'en', __version__, "instance_id", False, path, domain, onion_domain, i2p_domain, None, None, port, port, http_prefix, federation_list, max_mentions, max_emoji, False, proxy_type, max_replies, domain_max_posts_per_day, account_max_posts_per_day, allow_deletion, True, True, False, send_threads, False) def create_server_eve(path: str, domain: str, port: int, federation_list: [], has_follows: bool, has_posts: bool, send_threads: []): print('Creating test server: Eve on port ' + str(port)) if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) os.chdir(path) shared_items_federated_domains = [] nickname = 'eve' http_prefix = 'http' proxy_type = None password = 'evepass' max_replies = 64 allow_deletion = True private_key_pem, public_key_pem, person, wf_endpoint = \ create_person(path, nickname, domain, port, http_prefix, True, False, password) assert private_key_pem assert public_key_pem assert person assert wf_endpoint delete_all_posts(path, nickname, domain, 'inbox') delete_all_posts(path, nickname, domain, 'outbox') global TEST_SERVER_EVE_RUNNING TEST_SERVER_EVE_RUNNING = True max_mentions = 10 max_emoji = 10 onion_domain = None i2p_domain = None allow_local_network_access = True max_newswire_posts = 20 dormant_months = 3 send_threads_timeout_mins = 30 max_followers = 10 verify_all_signatures = True broch_mode = False show_node_info_accounts = True show_node_info_version = True city = 'London, England' log_login_failures = False user_agents_blocked = [] max_like_count = 10 low_bandwidth = True default_reply_interval_hrs = 9999999999 lists_enabled = '' content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' dyslexic_font = False crawlers_allowed = [] check_actor_timeout = 2 preferred_podcast_formats = None clacks = None map_format = 'gpx' max_hashtags = 20 max_shares_on_profile = 8 public_replies_unlisted = False no_of_books = 10 domain_max_posts_per_day = 1000 account_max_posts_per_day = 1000 accounts_data_dir = None print('Server running: Eve') run_daemon(accounts_data_dir, no_of_books, public_replies_unlisted, max_shares_on_profile, max_hashtags, map_format, clacks, preferred_podcast_formats, check_actor_timeout, crawlers_allowed, dyslexic_font, content_license_url, lists_enabled, default_reply_interval_hrs, low_bandwidth, max_like_count, shared_items_federated_domains, user_agents_blocked, log_login_failures, city, show_node_info_accounts, show_node_info_version, broch_mode, verify_all_signatures, send_threads_timeout_mins, dormant_months, max_newswire_posts, allow_local_network_access, 2048, False, True, False, False, True, max_followers, 0, 100, 1024, 5, False, 0, False, 1, False, False, False, 5, True, True, 'en', __version__, "instance_id", False, path, domain, onion_domain, i2p_domain, None, None, port, port, http_prefix, federation_list, max_mentions, max_emoji, False, proxy_type, max_replies, domain_max_posts_per_day, account_max_posts_per_day, allow_deletion, True, True, False, send_threads, False) def create_server_group(path: str, domain: str, port: int, federation_list: [], has_follows: bool, has_posts: bool, send_threads: []): print('Creating test server: Group on port ' + str(port)) if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) os.chdir(path) shared_items_federated_domains = [] # system_language = 'en' nickname = 'testgroup' http_prefix = 'http' proxy_type = None password = 'testgrouppass' max_replies = 64 domain_max_posts_per_day = 1000 account_max_posts_per_day = 1000 allow_deletion = True private_key_pem, public_key_pem, person, wf_endpoint = \ create_group(path, nickname, domain, port, http_prefix, True, password) assert private_key_pem assert public_key_pem assert person assert wf_endpoint delete_all_posts(path, nickname, domain, 'inbox') delete_all_posts(path, nickname, domain, 'outbox') global TEST_SERVER_GROUP_RUNNING TEST_SERVER_GROUP_RUNNING = True max_mentions = 10 max_emoji = 10 onion_domain = None i2p_domain = None allow_local_network_access = True max_newswire_posts = 20 dormant_months = 3 send_threads_timeout_mins = 30 max_followers = 10 verify_all_signatures = True broch_mode = False show_node_info_accounts = True show_node_info_version = True city = 'London, England' log_login_failures = False user_agents_blocked = [] max_like_count = 10 low_bandwidth = True default_reply_interval_hrs = 9999999999 lists_enabled = '' content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' dyslexic_font = False crawlers_allowed = [] check_actor_timeout = 2 preferred_podcast_formats = None clacks = None map_format = 'gpx' max_hashtags = 20 max_shares_on_profile = 8 public_replies_unlisted = False no_of_books = 10 accounts_data_dir = None print('Server running: Group') run_daemon(accounts_data_dir, no_of_books, public_replies_unlisted, max_shares_on_profile, max_hashtags, map_format, clacks, preferred_podcast_formats, check_actor_timeout, crawlers_allowed, dyslexic_font, content_license_url, lists_enabled, default_reply_interval_hrs, low_bandwidth, max_like_count, shared_items_federated_domains, user_agents_blocked, log_login_failures, city, show_node_info_accounts, show_node_info_version, broch_mode, verify_all_signatures, send_threads_timeout_mins, dormant_months, max_newswire_posts, allow_local_network_access, 2048, False, True, False, False, True, max_followers, 0, 100, 1024, 5, False, 0, False, 1, False, False, False, 5, True, True, 'en', __version__, "instance_id", False, path, domain, onion_domain, i2p_domain, None, None, port, port, http_prefix, federation_list, max_mentions, max_emoji, False, proxy_type, max_replies, domain_max_posts_per_day, account_max_posts_per_day, allow_deletion, True, True, False, send_threads, False) def test_post_message_between_servers(base_dir: str) -> None: print('Testing sending message from one server to the inbox of another') global TEST_SERVER_ALICE_RUNNING global TEST_SERVER_BOB_RUNNING TEST_SERVER_ALICE_RUNNING = False TEST_SERVER_BOB_RUNNING = False system_language = 'en' languages_understood = [system_language] http_prefix = 'http' proxy_type = None content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Secret Squirrel' if os.path.isdir(base_dir + '/.tests'): shutil.rmtree(base_dir + '/.tests', ignore_errors=False) os.mkdir(base_dir + '/.tests') # create the servers alice_dir = base_dir + '/.tests/alice' alice_domain = '127.0.0.50' alice_port = 61935 alice_address = alice_domain + ':' + str(alice_port) bob_dir = base_dir + '/.tests/bob' bob_domain = '127.0.0.100' bob_port = 61936 federation_list = [bob_domain, alice_domain] alice_send_threads = [] bob_send_threads = [] bob_address = bob_domain + ':' + str(bob_port) global THR_ALICE if THR_ALICE: while THR_ALICE.is_alive(): THR_ALICE.stop() time.sleep(1) THR_ALICE.kill() THR_ALICE = \ thread_with_trace(target=create_server_alice, args=(alice_dir, alice_domain, alice_port, bob_address, federation_list, False, False, alice_send_threads), daemon=True) global THR_BOB if THR_BOB: while THR_BOB.is_alive(): THR_BOB.stop() time.sleep(1) THR_BOB.kill() THR_BOB = \ thread_with_trace(target=create_server_bob, args=(bob_dir, bob_domain, bob_port, alice_address, federation_list, False, False, bob_send_threads), daemon=True) THR_ALICE.start() THR_BOB.start() assert THR_ALICE.is_alive() is True assert THR_BOB.is_alive() is True # wait for both servers to be running while not (TEST_SERVER_ALICE_RUNNING and TEST_SERVER_BOB_RUNNING): time.sleep(1) time.sleep(1) print('\n\n*******************************************************') print('Alice sends to Bob') os.chdir(alice_dir) session_alice = create_session(proxy_type) in_reply_to = None in_reply_to_atom_uri = None subject = None alice_post_log = [] save_to_file = True client_to_server = False cc_url = None alice_person_cache = {} alice_cached_webfingers = {} alice_shared_items_federated_domains = [] alice_shared_item_federation_tokens = {} attached_image_filename = base_dir + '/img/logo.png' test_image_width, test_image_height = \ get_image_dimensions(attached_image_filename) assert test_image_width assert test_image_height media_type = get_attachment_media_type(attached_image_filename) attached_image_description = 'Logo' video_transcript = None is_article = False city = 'London, England' # nothing in Alice's outbox last_pub_filename = \ data_dir(alice_dir) + '/alice@' + alice_domain + '/.last_published' assert not os.path.isfile(last_pub_filename) outbox_path = data_dir(alice_dir) + '/alice@' + alice_domain + '/outbox' assert len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) == 0 low_bandwidth = False signing_priv_key_pem = None translate = {} buy_url = '' chat_url = '' auto_cw_cache = {} send_result = \ send_post(signing_priv_key_pem, __version__, session_alice, alice_dir, 'alice', alice_domain, alice_port, 'bob', bob_domain, bob_port, cc_url, http_prefix, 'Why is a mouse when it spins? ' + 'यह एक परीक्षण है #sillyquestion', save_to_file, client_to_server, True, attached_image_filename, media_type, attached_image_description, video_transcript, city, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, is_article, system_language, languages_understood, alice_shared_items_federated_domains, alice_shared_item_federation_tokens, low_bandwidth, content_license_url, media_license_url, media_creator, translate, buy_url, chat_url, auto_cw_cache, True, in_reply_to, in_reply_to_atom_uri, subject) print('send_result: ' + str(send_result)) queue_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/queue' inbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' m_path = get_media_path() media_path = alice_dir + '/' + m_path for _ in range(30): if os.path.isdir(inbox_path): if len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) > 0: if len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) == 1: if len([name for name in os.listdir(media_path) if os.path.isfile(os.path.join(media_path, name))]) > 0: if len([name for name in os.listdir(queue_path) if os.path.isfile(os.path.join(queue_path, name))]) == 0: break time.sleep(1) assert os.path.isfile(last_pub_filename) # check that a news account exists news_actor_dir = data_dir(alice_dir) + '/news@' + alice_domain print("news_actor_dir: " + news_actor_dir) assert os.path.isdir(news_actor_dir) news_actor_file = news_actor_dir + '.json' assert os.path.isfile(news_actor_file) news_actor_json = load_json(news_actor_file) assert news_actor_json assert news_actor_json.get("id") # check the id of the news actor print('News actor Id: ' + news_actor_json["id"]) assert (news_actor_json["id"] == http_prefix + '://' + alice_address + '/users/news') # Image attachment created assert len([name for name in os.listdir(media_path) if os.path.isfile(os.path.join(media_path, name))]) > 0 # inbox item created assert len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) == 1 # queue item removed testval = len([name for name in os.listdir(queue_path) if os.path.isfile(os.path.join(queue_path, name))]) print('queue_path: ' + queue_path + ' '+str(testval)) assert testval == 0 assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) print('Check that message received from Alice contains the expected text') for name in os.listdir(inbox_path): filename = os.path.join(inbox_path, name) assert os.path.isfile(filename) received_json = load_json(filename, 0) if received_json: pprint(received_json['object']['content']) assert received_json assert 'Why is a mouse when it spins?' in \ received_json['object']['content'] assert 'Why is a mouse when it spins?' in \ received_json['object']['contentMap'][system_language] assert 'यह एक परीक्षण है' in received_json['object']['content'] print('Check that message received from Alice contains an attachment') assert received_json['object']['attachment'] if len(received_json['object']['attachment']) != 2: pprint(received_json['object']['attachment']) assert len(received_json['object']['attachment']) == 2 attached = received_json['object']['attachment'][0] pprint(attached) assert attached.get('type') assert attached.get('url') assert attached['mediaType'] == 'image/png' url_str = get_url_from_post(attached['url']) if '/system/media_attachments/files/' not in url_str: print(str(attached['url'])) assert '/system/media_attachments/files/' in url_str assert url_str.endswith('.png') assert attached.get('width') assert attached.get('height') assert attached['width'] > 0 assert attached['height'] > 0 print('\n\n*******************************************************') print("Bob likes Alice's post") alice_domain_str = alice_domain + ':' + str(alice_port) add_follower_of_person(bob_dir, 'bob', bob_domain, 'alice', alice_domain_str, federation_list, False, False) bob_domain_str = bob_domain + ':' + str(bob_port) follow_person(alice_dir, 'alice', alice_domain, 'bob', bob_domain_str, federation_list, False, False, 'following.txt') session_bob = create_session(proxy_type) bob_post_log = [] bob_person_cache = {} bob_cached_webfingers = {} sites_unavailable = [] status_number = None outbox_post_filename = None outbox_path = data_dir(alice_dir) + '/alice@' + alice_domain + '/outbox' for name in os.listdir(outbox_path): if '#statuses#' in name: status_number = \ int(name.split('#statuses#')[1].replace('.json', '')) outbox_post_filename = outbox_path + '/' + name assert status_number > 0 assert outbox_post_filename assert like_post({}, session_bob, bob_dir, federation_list, 'bob', bob_domain, bob_port, http_prefix, 'alice', alice_domain, alice_port, [], status_number, False, bob_send_threads, bob_post_log, bob_person_cache, bob_cached_webfingers, True, __version__, signing_priv_key_pem, bob_domain, None, None, sites_unavailable, system_language) for _ in range(20): if text_in_file('likes', outbox_post_filename): break time.sleep(1) alice_post_json = load_json(outbox_post_filename, 0) if alice_post_json: pprint(alice_post_json) assert text_in_file('likes', outbox_post_filename) print('\n\n*******************************************************') print("Bob reacts to Alice's post") sites_unavailable = [] assert reaction_post({}, session_bob, bob_dir, federation_list, 'bob', bob_domain, bob_port, http_prefix, 'alice', alice_domain, alice_port, [], status_number, '😀', False, bob_send_threads, bob_post_log, bob_person_cache, bob_cached_webfingers, True, __version__, signing_priv_key_pem, bob_domain, None, None, sites_unavailable, system_language) for _ in range(20): if text_in_file('reactions', outbox_post_filename): break time.sleep(1) alice_post_json = load_json(outbox_post_filename, 0) if alice_post_json: pprint(alice_post_json) if not text_in_file('reactions', outbox_post_filename): pprint(alice_post_json) assert text_in_file('reactions', outbox_post_filename) print('\n\n*******************************************************') print("Bob repeats Alice's post") object_url = \ http_prefix + '://' + alice_domain + ':' + str(alice_port) + \ '/users/alice/statuses/' + str(status_number) inbox_path = data_dir(alice_dir) + '/alice@' + alice_domain + '/inbox' outbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/outbox' outbox_before_announce_count = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) before_announce_count = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) print('inbox items before announce: ' + str(before_announce_count)) print('outbox items before announce: ' + str(outbox_before_announce_count)) assert outbox_before_announce_count == 0 assert before_announce_count == 0 sites_unavailable = [] announce_public(session_bob, bob_dir, federation_list, 'bob', bob_domain, bob_port, http_prefix, object_url, False, bob_send_threads, bob_post_log, bob_person_cache, bob_cached_webfingers, True, __version__, signing_priv_key_pem, bob_domain, None, None, sites_unavailable, system_language) announce_message_arrived = False outbox_message_arrived = False for _ in range(20): time.sleep(1) if not os.path.isdir(inbox_path): continue if len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) > 0: outbox_message_arrived = True print('Announce created by Bob') if len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) > 0: announce_message_arrived = True print('Announce message sent to Alice!') if announce_message_arrived and outbox_message_arrived: break after_announce_count = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) outbox_after_announce_count = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) print('inbox items after announce: ' + str(after_announce_count)) print('outbox items after announce: ' + str(outbox_after_announce_count)) assert after_announce_count == before_announce_count + 1 assert outbox_after_announce_count == outbox_before_announce_count + 1 # stop the servers THR_ALICE.kill() THR_ALICE.join() assert THR_ALICE.is_alive() is False THR_BOB.kill() THR_BOB.join() assert THR_BOB.is_alive() is False os.chdir(base_dir) shutil.rmtree(alice_dir, ignore_errors=False) shutil.rmtree(bob_dir, ignore_errors=False) def test_follow_between_servers(base_dir: str) -> None: print('Testing sending a follow request from one server to another') global TEST_SERVER_ALICE_RUNNING global TEST_SERVER_BOB_RUNNING TEST_SERVER_ALICE_RUNNING = False TEST_SERVER_BOB_RUNNING = False system_language = 'en' languages_understood = [system_language] http_prefix = 'http' proxy_type = None federation_list = [] content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Penfold' if os.path.isdir(base_dir + '/.tests'): shutil.rmtree(base_dir + '/.tests', ignore_errors=False) os.mkdir(base_dir + '/.tests') # create the servers alice_dir = base_dir + '/.tests/alice' alice_domain = '127.0.0.47' alice_port = 61935 alice_send_threads = [] alice_address = alice_domain + ':' + str(alice_port) bob_dir = base_dir + '/.tests/bob' bob_domain = '127.0.0.79' bob_port = 61936 bob_send_threads = [] bob_address = bob_domain + ':' + str(bob_port) global THR_ALICE if THR_ALICE: while THR_ALICE.is_alive(): THR_ALICE.stop() time.sleep(1) THR_ALICE.kill() THR_ALICE = \ thread_with_trace(target=create_server_alice, args=(alice_dir, alice_domain, alice_port, bob_address, federation_list, False, False, alice_send_threads), daemon=True) global THR_BOB if THR_BOB: while THR_BOB.is_alive(): THR_BOB.stop() time.sleep(1) THR_BOB.kill() THR_BOB = \ thread_with_trace(target=create_server_bob, args=(bob_dir, bob_domain, bob_port, alice_address, federation_list, False, False, bob_send_threads), daemon=True) THR_ALICE.start() THR_BOB.start() assert THR_ALICE.is_alive() is True assert THR_BOB.is_alive() is True # wait for all servers to be running ctr = 0 while not (TEST_SERVER_ALICE_RUNNING and TEST_SERVER_BOB_RUNNING): time.sleep(1) ctr += 1 if ctr > 60: break print('Alice online: ' + str(TEST_SERVER_ALICE_RUNNING)) print('Bob online: ' + str(TEST_SERVER_BOB_RUNNING)) assert ctr <= 60 time.sleep(1) # In the beginning all was calm and there were no follows print('*********************************************************') print('Alice sends a follow request to Bob') os.chdir(alice_dir) session_alice = create_session(proxy_type) in_reply_to = None in_reply_to_atom_uri = None subject = None alice_post_log = [] save_to_file = True client_to_server = False cc_url = None alice_person_cache = {} alice_cached_webfingers = {} alice_post_log = [] sites_unavailable = [] bob_actor = http_prefix + '://' + bob_address + '/users/bob' signing_priv_key_pem = None send_result = \ send_follow_request(session_alice, alice_dir, 'alice', alice_domain, alice_domain, alice_port, http_prefix, 'bob', bob_domain, bob_actor, bob_port, http_prefix, client_to_server, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, True, __version__, signing_priv_key_pem, alice_domain, None, None, sites_unavailable, system_language) print('send_result: ' + str(send_result)) alice_dir_str = data_dir(alice_dir) bob_dir_str = data_dir(bob_dir) for _ in range(16): if os.path.isfile(bob_dir_str + '/bob@' + bob_domain + '/followers.txt'): if os.path.isfile(alice_dir_str + '/alice@' + alice_domain + '/following.txt'): if os.path.isfile(alice_dir_str + '/alice@' + alice_domain + '/followingCalendar.txt'): break time.sleep(1) assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) assert text_in_file('alice@' + alice_domain, bob_dir_str + '/bob@' + bob_domain + '/followers.txt') assert text_in_file('bob@' + bob_domain, alice_dir_str + '/alice@' + alice_domain + '/following.txt') assert text_in_file('bob@' + bob_domain, alice_dir_str + '/alice@' + alice_domain + '/followingCalendar.txt') assert not is_group_actor(alice_dir, bob_actor, alice_person_cache) assert not is_group_account(alice_dir, 'alice', alice_domain) print('\n\n*********************************************************') print('Alice sends a message to Bob') alice_post_log = [] alice_person_cache = {} alice_cached_webfingers = {} alice_shared_items_federated_domains = [] alice_shared_item_federation_tokens = {} alice_post_log = [] is_article = False city = 'London, England' low_bandwidth = False signing_priv_key_pem = None translate = {} buy_url = '' chat_url = '' video_transcript = None auto_cw_cache = {} send_result = \ send_post(signing_priv_key_pem, __version__, session_alice, alice_dir, 'alice', alice_domain, alice_port, 'bob', bob_domain, bob_port, cc_url, http_prefix, 'Alice message', save_to_file, client_to_server, True, None, None, None, video_transcript, city, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, is_article, system_language, languages_understood, alice_shared_items_federated_domains, alice_shared_item_federation_tokens, low_bandwidth, content_license_url, media_license_url, media_creator, translate, buy_url, chat_url, auto_cw_cache, True, in_reply_to, in_reply_to_atom_uri, subject) print('send_result: ' + str(send_result)) queue_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/queue' inbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' alice_message_arrived = False for _ in range(20): time.sleep(1) if os.path.isdir(inbox_path): if len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) > 0: alice_message_arrived = True print('Alice message sent to Bob!') break assert alice_message_arrived is True print('Message from Alice to Bob succeeded') # stop the servers THR_ALICE.kill() THR_ALICE.join() assert THR_ALICE.is_alive() is False THR_BOB.kill() THR_BOB.join() assert THR_BOB.is_alive() is False # queue item removed time.sleep(8) assert len([name for name in os.listdir(queue_path) if os.path.isfile(os.path.join(queue_path, name))]) == 0 os.chdir(base_dir) shutil.rmtree(base_dir + '/.tests', ignore_errors=False) def test_shared_items_federation(base_dir: str) -> None: print('Testing federation of shared items between Alice and Bob') global TEST_SERVER_ALICE_RUNNING global TEST_SERVER_BOB_RUNNING TEST_SERVER_ALICE_RUNNING = False TEST_SERVER_BOB_RUNNING = False system_language = 'en' languages_understood = [system_language] http_prefix = 'http' proxy_type = None federation_list = [] content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Dr Drokk' if os.path.isdir(base_dir + '/.tests'): shutil.rmtree(base_dir + '/.tests', ignore_errors=False) os.mkdir(base_dir + '/.tests') # create the servers alice_dir = base_dir + '/.tests/alice' alice_domain = '127.0.0.74' alice_port = 61917 alice_send_threads = [] alice_address = alice_domain + ':' + str(alice_port) bob_dir = base_dir + '/.tests/bob' bob_domain = '127.0.0.81' bob_port = 61983 bob_send_threads = [] bob_address = bob_domain + ':' + str(bob_port) bob_password = 'bobpass' bob_cached_webfingers = {} bob_person_cache = {} global THR_ALICE if THR_ALICE: while THR_ALICE.is_alive(): THR_ALICE.stop() time.sleep(1) THR_ALICE.kill() THR_ALICE = \ thread_with_trace(target=create_server_alice, args=(alice_dir, alice_domain, alice_port, bob_address, federation_list, False, False, alice_send_threads), daemon=True) global THR_BOB if THR_BOB: while THR_BOB.is_alive(): THR_BOB.stop() time.sleep(1) THR_BOB.kill() THR_BOB = \ thread_with_trace(target=create_server_bob, args=(bob_dir, bob_domain, bob_port, alice_address, federation_list, False, False, bob_send_threads), daemon=True) THR_ALICE.start() THR_BOB.start() assert THR_ALICE.is_alive() is True assert THR_BOB.is_alive() is True # wait for all servers to be running ctr = 0 while not (TEST_SERVER_ALICE_RUNNING and TEST_SERVER_BOB_RUNNING): time.sleep(1) ctr += 1 if ctr > 60: break print('Alice online: ' + str(TEST_SERVER_ALICE_RUNNING)) print('Bob online: ' + str(TEST_SERVER_BOB_RUNNING)) assert ctr <= 60 time.sleep(1) signing_priv_key_pem = None session_client = create_session(proxy_type) # Get Bob's instance actor print('\n\n*********************************************************') print("Test Bob's instance actor") profile_str = 'https://www.w3.org/ns/activitystreams' test_headers = { 'host': bob_address, 'Accept': 'application/ld+json; profile="' + profile_str + '"' } bob_instance_actor_json = \ get_json(signing_priv_key_pem, session_client, 'http://' + bob_address + '/@actor', test_headers, {}, True, __version__, 'http', 'somedomain.or.other', 10, False) if not get_json_valid(bob_instance_actor_json): print('Unable to get json for ' + 'http://' + bob_address + '/@actor') assert bob_instance_actor_json pprint(bob_instance_actor_json) assert bob_instance_actor_json['name'] == 'ACTOR' # In the beginning all was calm and there were no follows print('\n\n*********************************************************') print("Alice and Bob agree to share items catalogs") assert os.path.isdir(alice_dir) assert os.path.isdir(bob_dir) set_config_param(alice_dir, 'sharedItemsFederatedDomains', bob_address) set_config_param(bob_dir, 'sharedItemsFederatedDomains', alice_address) print('*********************************************************') print('Alice sends a follow request to Bob') os.chdir(alice_dir) session_alice = create_session(proxy_type) in_reply_to = None in_reply_to_atom_uri = None subject = None alice_post_log = [] save_to_file = True client_to_server = False cc_url = None alice_person_cache = {} alice_cached_webfingers = {} alice_post_log = [] sites_unavailable = [] bob_actor = http_prefix + '://' + bob_address + '/users/bob' send_result = \ send_follow_request(session_alice, alice_dir, 'alice', alice_domain, alice_domain, alice_port, http_prefix, 'bob', bob_domain, bob_actor, bob_port, http_prefix, client_to_server, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, True, __version__, signing_priv_key_pem, alice_domain, None, None, sites_unavailable, system_language) print('send_result: ' + str(send_result)) alice_dir_str = data_dir(alice_dir) bob_dir_str = data_dir(bob_dir) for _ in range(16): if os.path.isfile(bob_dir_str + '/bob@' + bob_domain + '/followers.txt'): if os.path.isfile(alice_dir_str + '/alice@' + alice_domain + '/following.txt'): if os.path.isfile(alice_dir_str + '/alice@' + alice_domain + '/followingCalendar.txt'): break time.sleep(1) assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) assert text_in_file('alice@' + alice_domain, bob_dir_str + '/bob@' + bob_domain + '/followers.txt') assert text_in_file('bob@' + bob_domain, alice_dir_str + '/alice@' + alice_domain + '/following.txt') assert text_in_file('bob@' + bob_domain, alice_dir_str + '/alice@' + alice_domain + '/followingCalendar.txt') assert not is_group_actor(alice_dir, bob_actor, alice_person_cache) assert not is_group_account(bob_dir, 'bob', bob_domain) print('\n\n*********************************************************') print('Bob publishes some shared items') if os.path.isdir(bob_dir + '/ontology'): shutil.rmtree(bob_dir + '/ontology', ignore_errors=False) os.mkdir(bob_dir + '/ontology') copyfile(base_dir + '/img/logo.png', bob_dir + '/logo.png') copyfile(base_dir + '/ontology/foodTypes.json', bob_dir + '/ontology/foodTypes.json') copyfile(base_dir + '/ontology/toolTypes.json', bob_dir + '/ontology/toolTypes.json') copyfile(base_dir + '/ontology/clothesTypes.json', bob_dir + '/ontology/clothesTypes.json') copyfile(base_dir + '/ontology/medicalTypes.json', bob_dir + '/ontology/medicalTypes.json') copyfile(base_dir + '/ontology/accommodationTypes.json', bob_dir + '/ontology/accommodationTypes.json') assert os.path.isfile(bob_dir + '/logo.png') assert os.path.isfile(bob_dir + '/ontology/foodTypes.json') assert os.path.isfile(bob_dir + '/ontology/toolTypes.json') assert os.path.isfile(bob_dir + '/ontology/clothesTypes.json') assert os.path.isfile(bob_dir + '/ontology/medicalTypes.json') assert os.path.isfile(bob_dir + '/ontology/accommodationTypes.json') shared_item_name = 'cheddar' shared_item_description = 'Some cheese' shared_item_image_filename = 'logo.png' shared_item_qty = 1 shared_item_type = 'Cheese' shared_item_category = 'Food' shared_item_location = "Bob's location" shared_item_duration = "10 days" shared_item_price = "1.30" shared_item_currency = "EUR" signing_priv_key_pem = None session_bob = create_session(proxy_type) share_json = \ send_share_via_server(bob_dir, session_bob, 'bob', bob_password, bob_domain, bob_port, http_prefix, shared_item_name, shared_item_description, shared_item_image_filename, shared_item_qty, shared_item_type, shared_item_category, shared_item_location, shared_item_duration, bob_cached_webfingers, bob_person_cache, True, __version__, shared_item_price, shared_item_currency, signing_priv_key_pem, system_language) assert share_json assert isinstance(share_json, dict) shared_item_name = 'Epicyon T-shirt' shared_item_description = 'A fashionable item' shared_item_image_filename = 'logo.png' shared_item_qty = 1 shared_item_type = 'T-Shirt' shared_item_category = 'Clothes' shared_item_location = "Bob's location" shared_item_duration = "5 days" shared_item_price = "0" shared_item_currency = "EUR" share_json = \ send_share_via_server(bob_dir, session_bob, 'bob', bob_password, bob_domain, bob_port, http_prefix, shared_item_name, shared_item_description, shared_item_image_filename, shared_item_qty, shared_item_type, shared_item_category, shared_item_location, shared_item_duration, bob_cached_webfingers, bob_person_cache, True, __version__, shared_item_price, shared_item_currency, signing_priv_key_pem, system_language) assert share_json assert isinstance(share_json, dict) shared_item_name = 'Soldering iron' shared_item_description = 'A soldering iron' shared_item_image_filename = 'logo.png' shared_item_qty = 1 shared_item_type = 'Soldering iron' shared_item_category = 'Tools' shared_item_location = "Bob's location" shared_item_duration = "9 days" shared_item_price = "10.00" shared_item_currency = "EUR" share_json = \ send_share_via_server(bob_dir, session_bob, 'bob', bob_password, bob_domain, bob_port, http_prefix, shared_item_name, shared_item_description, shared_item_image_filename, shared_item_qty, shared_item_type, shared_item_category, shared_item_location, shared_item_duration, bob_cached_webfingers, bob_person_cache, True, __version__, shared_item_price, shared_item_currency, signing_priv_key_pem, system_language) assert share_json assert isinstance(share_json, dict) time.sleep(2) print('\n\n*********************************************************') print('Bob has a shares.json file containing the uploaded items') shares_filename = data_dir(bob_dir) + '/bob@' + bob_domain + '/shares.json' assert os.path.isfile(shares_filename) shares_json = load_json(shares_filename) assert shares_json pprint(shares_json) assert len(shares_json.items()) == 3 for item_id, item in shares_json.items(): if not item.get('dfcId'): pprint(item) print(item_id + ' does not have dfcId field') assert item.get('dfcId') print('\n\n*********************************************************') print('Bob can read the shared items catalog on his own instance') signing_priv_key_pem = None catalog_json = \ get_shared_items_catalog_via_server(session_bob, 'bob', bob_password, bob_domain, bob_port, http_prefix, True, signing_priv_key_pem) assert catalog_json pprint(catalog_json) assert 'DFC:supplies' in catalog_json assert len(catalog_json.get('DFC:supplies')) == 3 offers_json = \ get_offers_via_server(session_bob, 'bob', bob_password, bob_domain, bob_port, http_prefix, True, signing_priv_key_pem) assert offers_json print('Offers collection:') pprint(offers_json) assert isinstance(offers_json, dict) assert len(offers_json['orderedItems']) >= 1 wanted_json = \ get_wanted_via_server(session_bob, 'bob', bob_password, bob_domain, bob_port, http_prefix, True, signing_priv_key_pem) print('Wanted collection:') pprint(wanted_json) assert isinstance(wanted_json, dict) assert len(wanted_json['orderedItems']) == 0 print('\n\n*********************************************************') print('Alice sends a message to Bob') alice_tokens_filename = \ data_dir(alice_dir) + '/sharedItemsFederationTokens.json' assert os.path.isfile(alice_tokens_filename) alice_shared_item_federation_tokens = load_json(alice_tokens_filename) assert alice_shared_item_federation_tokens print('Alice shared item federation tokens:') pprint(alice_shared_item_federation_tokens) assert len(alice_shared_item_federation_tokens.items()) > 0 for host_str, token in alice_shared_item_federation_tokens.items(): assert ':' in host_str alice_post_log = [] alice_person_cache = {} alice_cached_webfingers = {} alice_shared_items_federated_domains = [bob_address] alice_post_log = [] is_article = False city = 'London, England' low_bandwidth = False signing_priv_key_pem = None translate = {} buy_url = '' chat_url = '' video_transcript = None auto_cw_cache = {} send_result = \ send_post(signing_priv_key_pem, __version__, session_alice, alice_dir, 'alice', alice_domain, alice_port, 'bob', bob_domain, bob_port, cc_url, http_prefix, 'Alice message', save_to_file, client_to_server, True, None, None, None, video_transcript, city, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, is_article, system_language, languages_understood, alice_shared_items_federated_domains, alice_shared_item_federation_tokens, low_bandwidth, content_license_url, media_license_url, media_creator, translate, buy_url, chat_url, auto_cw_cache, True, in_reply_to, in_reply_to_atom_uri, subject) print('send_result: ' + str(send_result)) queue_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/queue' inbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' alice_message_arrived = False for _ in range(20): time.sleep(1) if os.path.isdir(inbox_path): if len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) > 0: alice_message_arrived = True print('Alice message sent to Bob!') break assert alice_message_arrived is True print('Message from Alice to Bob succeeded') print('\n\n*********************************************************') print('Check that Alice received the shared items authorization') print('token from Bob') alice_tokens_filename = \ data_dir(alice_dir) + '/sharedItemsFederationTokens.json' bob_tokens_filename = \ data_dir(bob_dir) + '/sharedItemsFederationTokens.json' assert os.path.isfile(alice_tokens_filename) assert os.path.isfile(bob_tokens_filename) alice_tokens = load_json(alice_tokens_filename) assert alice_tokens for host_str, token in alice_tokens.items(): assert ':' in host_str assert alice_tokens.get(alice_address) print('Alice tokens') pprint(alice_tokens) bob_tokens = load_json(bob_tokens_filename) assert bob_tokens for host_str, token in bob_tokens.items(): assert ':' in host_str assert bob_tokens.get(bob_address) print("Check that Bob now has Alice's token") pprint(bob_tokens) assert bob_tokens.get(alice_address) print('Bob tokens') pprint(bob_tokens) print('\n\n*********************************************************') print('Alice can read the federated shared items catalog of Bob') headers = { 'Origin': alice_address, 'Authorization': bob_tokens[bob_address], 'host': bob_address, 'Accept': 'application/json' } url = http_prefix + '://' + bob_address + '/catalog' signing_priv_key_pem = None catalog_json = get_json(signing_priv_key_pem, session_alice, url, headers, None, True) assert get_json_valid(catalog_json) pprint(catalog_json) assert 'DFC:supplies' in catalog_json assert len(catalog_json.get('DFC:supplies')) == 3 # queue item removed ctr = 0 while len([name for name in os.listdir(queue_path) if os.path.isfile(os.path.join(queue_path, name))]) > 0: ctr += 1 if ctr > 10: break time.sleep(1) # assert len([name for name in os.listdir(queue_path) # if os.path.isfile(os.path.join(queue_path, name))]) == 0 # stop the servers THR_ALICE.kill() THR_ALICE.join() assert THR_ALICE.is_alive() is False THR_BOB.kill() THR_BOB.join() assert THR_BOB.is_alive() is False os.chdir(base_dir) shutil.rmtree(base_dir + '/.tests', ignore_errors=False) print('Testing federation of shared items between ' + 'Alice and Bob is complete') def test_group_follow(base_dir: str) -> None: print('Testing following of a group') global TEST_SERVER_ALICE_RUNNING global TEST_SERVER_BOB_RUNNING global TEST_SERVER_GROUP_RUNNING system_language = 'en' languages_understood = [system_language] TEST_SERVER_ALICE_RUNNING = False TEST_SERVER_BOB_RUNNING = False TEST_SERVER_GROUP_RUNNING = False # system_language = 'en' http_prefix = 'http' proxy_type = None federation_list = [] content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Bumble' if os.path.isdir(base_dir + '/.tests'): shutil.rmtree(base_dir + '/.tests', ignore_errors=False) os.mkdir(base_dir + '/.tests') # create the servers alice_dir = base_dir + '/.tests/alice' alice_domain = '127.0.0.57' alice_port = 61927 alice_send_threads = [] alice_address = alice_domain + ':' + str(alice_port) bob_dir = base_dir + '/.tests/bob' bob_domain = '127.0.0.59' bob_port = 61814 bob_send_threads = [] # bob_address = bob_domain + ':' + str(bob_port) testgroup_dir = base_dir + '/.tests/testgroup' testgroup_domain = '127.0.0.63' testgroupPort = 61925 testgroupSendThreads = [] testgroupAddress = testgroup_domain + ':' + str(testgroupPort) global THR_ALICE if THR_ALICE: while THR_ALICE.is_alive(): THR_ALICE.stop() time.sleep(1) THR_ALICE.kill() THR_ALICE = \ thread_with_trace(target=create_server_alice, args=(alice_dir, alice_domain, alice_port, testgroupAddress, federation_list, False, True, alice_send_threads), daemon=True) global THR_BOB if THR_BOB: while THR_BOB.is_alive(): THR_BOB.stop() time.sleep(1) THR_BOB.kill() THR_BOB = \ thread_with_trace(target=create_server_bob, args=(bob_dir, bob_domain, bob_port, None, federation_list, False, False, bob_send_threads), daemon=True) global THR_GROUP if THR_GROUP: while THR_GROUP.is_alive(): THR_GROUP.stop() time.sleep(1) THR_GROUP.kill() THR_GROUP = \ thread_with_trace(target=create_server_group, args=(testgroup_dir, testgroup_domain, testgroupPort, federation_list, False, False, testgroupSendThreads), daemon=True) THR_ALICE.start() THR_BOB.start() THR_GROUP.start() assert THR_ALICE.is_alive() is True assert THR_BOB.is_alive() is True assert THR_GROUP.is_alive() is True # wait for all servers to be running ctr = 0 while not (TEST_SERVER_ALICE_RUNNING and TEST_SERVER_BOB_RUNNING and TEST_SERVER_GROUP_RUNNING): time.sleep(1) ctr += 1 if ctr > 60: break print('Alice online: ' + str(TEST_SERVER_ALICE_RUNNING)) print('Bob online: ' + str(TEST_SERVER_BOB_RUNNING)) print('Test Group online: ' + str(TEST_SERVER_GROUP_RUNNING)) assert ctr <= 60 time.sleep(1) print('*********************************************************') print('Alice has some outbox posts') alice_outbox = 'http://' + alice_address + '/users/alice/outbox' session = create_session(None) profile_str = 'https://www.w3.org/ns/activitystreams' as_header = { 'Accept': 'application/ld+json; profile="' + profile_str + '"' } signing_priv_key_pem = None outbox_json = get_json(signing_priv_key_pem, session, alice_outbox, as_header, None, True, __version__, 'http', None) assert get_json_valid(outbox_json) pprint(outbox_json) assert outbox_json['type'] == 'OrderedCollection' assert 'first' in outbox_json first_page = outbox_json['first'] assert 'totalItems' in outbox_json print('Alice outbox totalItems: ' + str(outbox_json['totalItems'])) assert outbox_json['totalItems'] == 3 outbox_json = get_json(signing_priv_key_pem, session, first_page, as_header, None, True, __version__, 'http', None) assert get_json_valid(outbox_json) pprint(outbox_json) assert 'orderedItems' in outbox_json assert outbox_json['type'] == 'OrderedCollectionPage' print('Alice outbox orderedItems: ' + str(len(outbox_json['orderedItems']))) assert len(outbox_json['orderedItems']) == 3 queue_path = \ data_dir(testgroup_dir) + '/testgroup@' + testgroup_domain + '/queue' # In the beginning the test group had no followers print('*********************************************************') print('Alice sends a follow request to the test group') os.chdir(alice_dir) session_alice = create_session(proxy_type) in_reply_to = None in_reply_to_atom_uri = None subject = None alice_post_log = [] save_to_file = True client_to_server = False cc_url = None alice_person_cache = {} alice_cached_webfingers = {} alice_post_log = [] sites_unavailable = [] # aliceActor = http_prefix + '://' + alice_address + '/users/alice' testgroup_actor = \ http_prefix + '://' + testgroupAddress + '/users/testgroup' signing_priv_key_pem = None send_result = \ send_follow_request(session_alice, alice_dir, 'alice', alice_domain, alice_domain, alice_port, http_prefix, 'testgroup', testgroup_domain, testgroup_actor, testgroupPort, http_prefix, client_to_server, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, True, __version__, signing_priv_key_pem, alice_domain, None, None, sites_unavailable, system_language) print('send_result: ' + str(send_result)) alice_following_filename = \ data_dir(alice_dir) + '/alice@' + alice_domain + '/following.txt' alice_following_calendar_filename = \ data_dir(alice_dir) + '/alice@' + alice_domain + \ '/followingCalendar.txt' testgroup_followers_filename = \ data_dir(testgroup_dir) + '/testgroup@' + testgroup_domain + \ '/followers.txt' for _ in range(16): if os.path.isfile(testgroup_followers_filename): if os.path.isfile(alice_following_filename): if os.path.isfile(alice_following_calendar_filename): break time.sleep(1) assert valid_inbox(testgroup_dir, 'testgroup', testgroup_domain) assert valid_inbox_filenames(testgroup_dir, 'testgroup', testgroup_domain, alice_domain, alice_port) assert text_in_file('alice@' + alice_domain, testgroup_followers_filename) assert not text_in_file('!alice@' + alice_domain, testgroup_followers_filename) testgroup_webfinger_filename = \ testgroup_dir + '/wfendpoints/testgroup@' + \ testgroup_domain + ':' + str(testgroupPort) + '.json' assert os.path.isfile(testgroup_webfinger_filename) assert text_in_file('acct:testgroup@', testgroup_webfinger_filename) print('acct: exists within the webfinger endpoint for testgroup') testgroup_handle = 'testgroup@' + testgroup_domain following_str = '' with open(alice_following_filename, 'r', encoding='utf-8') as fp_foll: following_str = fp_foll.read() print('Alice following.txt:\n\n' + following_str) if '!testgroup' not in following_str: print('Alice following.txt does not contain !testgroup@' + testgroup_domain + ':' + str(testgroupPort)) assert is_group_actor(alice_dir, testgroup_actor, alice_person_cache) assert not is_group_account(alice_dir, 'alice', alice_domain) assert is_group_account(testgroup_dir, 'testgroup', testgroup_domain) assert '!testgroup' in following_str assert text_in_file(testgroup_handle, alice_following_filename) assert text_in_file(testgroup_handle, alice_following_calendar_filename) print('\n\n*********************************************************') print('Alice follows the test group') print('*********************************************************') print('Bob sends a follow request to the test group') os.chdir(bob_dir) session_bob = create_session(proxy_type) in_reply_to = None in_reply_to_atom_uri = None subject = None bob_post_log = [] save_to_file = True client_to_server = False cc_url = None bob_person_cache = {} bob_cached_webfingers = {} bob_post_log = [] sites_unavailable = [] # bob_actor = http_prefix + '://' + bob_address + '/users/bob' testgroup_actor = \ http_prefix + '://' + testgroupAddress + '/users/testgroup' signing_priv_key_pem = None send_result = \ send_follow_request(session_bob, bob_dir, 'bob', bob_domain, bob_domain, bob_port, http_prefix, 'testgroup', testgroup_domain, testgroup_actor, testgroupPort, http_prefix, client_to_server, federation_list, bob_send_threads, bob_post_log, bob_cached_webfingers, bob_person_cache, True, __version__, signing_priv_key_pem, bob_domain, None, None, sites_unavailable, system_language) print('send_result: ' + str(send_result)) bob_following_filename = \ data_dir(bob_dir) + '/bob@' + bob_domain + '/following.txt' bob_following_calendar_filename = \ data_dir(bob_dir) + '/bob@' + bob_domain + '/followingCalendar.txt' testgroup_followers_filename = \ data_dir(testgroup_dir) + '/testgroup@' + testgroup_domain + \ '/followers.txt' for _ in range(16): if os.path.isfile(testgroup_followers_filename): if os.path.isfile(bob_following_filename): if os.path.isfile(bob_following_calendar_filename): break time.sleep(1) assert valid_inbox(testgroup_dir, 'testgroup', testgroup_domain) assert valid_inbox_filenames(testgroup_dir, 'testgroup', testgroup_domain, bob_domain, bob_port) assert text_in_file('bob@' + bob_domain, testgroup_followers_filename) assert not text_in_file('!bob@' + bob_domain, testgroup_followers_filename) testgroup_webfinger_filename = \ testgroup_dir + '/wfendpoints/testgroup@' + \ testgroup_domain + ':' + str(testgroupPort) + '.json' assert os.path.isfile(testgroup_webfinger_filename) assert text_in_file('acct:testgroup@', testgroup_webfinger_filename) print('acct: exists within the webfinger endpoint for testgroup') testgroup_handle = 'testgroup@' + testgroup_domain following_str = '' with open(bob_following_filename, 'r', encoding='utf-8') as fp_foll: following_str = fp_foll.read() print('Bob following.txt:\n\n' + following_str) if '!testgroup' not in following_str: print('Bob following.txt does not contain !testgroup@' + testgroup_domain + ':' + str(testgroupPort)) assert is_group_actor(bob_dir, testgroup_actor, bob_person_cache) assert '!testgroup' in following_str assert text_in_file(testgroup_handle, bob_following_filename) assert text_in_file(testgroup_handle, bob_following_calendar_filename) print('Bob follows the test group') print('\n\n*********************************************************') print('Alice posts to the test group') inbox_path_bob = \ data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' start_posts_bob = \ len([name for name in os.listdir(inbox_path_bob) if os.path.isfile(os.path.join(inbox_path_bob, name))]) assert start_posts_bob == 0 alice_post_log = [] alice_person_cache = {} alice_cached_webfingers = {} alice_shared_items_federated_domains = [] alice_shared_item_federation_tokens = {} alice_post_log = [] is_article = False city = 'London, England' low_bandwidth = False signing_priv_key_pem = None queue_path = \ data_dir(testgroup_dir) + '/testgroup@' + testgroup_domain + '/queue' inbox_path = \ data_dir(testgroup_dir) + '/testgroup@' + testgroup_domain + '/inbox' outbox_path = \ data_dir(testgroup_dir) + '/testgroup@' + testgroup_domain + '/outbox' alice_message_arrived = False start_posts_inbox = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) start_posts_outbox = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) translate = {} buy_url = '' chat_url = '' video_transcript = None auto_cw_cache = {} send_result = \ send_post(signing_priv_key_pem, __version__, session_alice, alice_dir, 'alice', alice_domain, alice_port, 'testgroup', testgroup_domain, testgroupPort, cc_url, http_prefix, "Alice group message", save_to_file, client_to_server, True, None, None, None, video_transcript, city, federation_list, alice_send_threads, alice_post_log, alice_cached_webfingers, alice_person_cache, is_article, system_language, languages_understood, alice_shared_items_federated_domains, alice_shared_item_federation_tokens, low_bandwidth, content_license_url, media_license_url, media_creator, translate, buy_url, chat_url, auto_cw_cache, True, in_reply_to, in_reply_to_atom_uri, subject) print('send_result: ' + str(send_result)) for _ in range(20): time.sleep(1) if os.path.isdir(inbox_path): curr_posts_inbox = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) curr_posts_outbox = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) if curr_posts_inbox > start_posts_inbox and \ curr_posts_outbox > start_posts_outbox: alice_message_arrived = True print('Alice post sent to test group!') break assert alice_message_arrived is True print('\n\n*********************************************************') print('Post from Alice to test group succeeded') print('\n\n*********************************************************') print('Check that post was relayed from test group to bob') bob_message_arrived = False for _ in range(20): time.sleep(1) if os.path.isdir(inbox_path_bob): curr_posts_bob = \ len([name for name in os.listdir(inbox_path_bob) if os.path.isfile(os.path.join(inbox_path_bob, name))]) if curr_posts_bob > start_posts_bob: bob_message_arrived = True print('Bob received relayed group post!') break assert bob_message_arrived is True # check that the received post has an id from the group, # not from the original sender (alice) group_id_checked = False for name in os.listdir(inbox_path_bob): filename = os.path.join(inbox_path_bob, name) if os.path.isfile(filename): received_json = load_json(filename) assert received_json print('Received group post ' + received_json['id']) assert '/testgroup/statuses/' in received_json['id'] group_id_checked = True break assert group_id_checked # stop the servers THR_ALICE.kill() THR_ALICE.join() assert THR_ALICE.is_alive() is False THR_BOB.kill() THR_BOB.join() assert THR_BOB.is_alive() is False THR_GROUP.kill() THR_GROUP.join() assert THR_GROUP.is_alive() is False # queue item removed time.sleep(4) assert len([name for name in os.listdir(queue_path) if os.path.isfile(os.path.join(queue_path, name))]) == 0 os.chdir(base_dir) try: shutil.rmtree(base_dir + '/.tests', ignore_errors=False) except OSError: print('Unable to remove directory ' + base_dir + '/.tests') print('Testing following of a group is complete') def _test_followers_of_person(base_dir: str) -> None: print('test_followers_of_person') curr_dir = base_dir nickname = 'mxpop' domain = 'diva.domain' password = 'birb' port = 80 http_prefix = 'https' federation_list = [] base_dir = curr_dir + '/.tests_followersofperson' if os.path.isdir(base_dir): shutil.rmtree(base_dir, ignore_errors=False) os.mkdir(base_dir) os.chdir(base_dir) create_person(base_dir, nickname, domain, port, http_prefix, True, False, password) create_person(base_dir, 'maxboardroom', domain, port, http_prefix, True, False, password) create_person(base_dir, 'ultrapancake', domain, port, http_prefix, True, False, password) create_person(base_dir, 'drokk', domain, port, http_prefix, True, False, password) create_person(base_dir, 'sausagedog', domain, port, http_prefix, True, False, password) clear_follows(base_dir, nickname, domain, 'following.txt') follow_person(base_dir, nickname, domain, 'maxboardroom', domain, federation_list, False, False, 'following.txt') follow_person(base_dir, 'drokk', domain, 'ultrapancake', domain, federation_list, False, False, 'following.txt') # deliberate duplication follow_person(base_dir, 'drokk', domain, 'ultrapancake', domain, federation_list, False, False, 'following.txt') follow_person(base_dir, 'sausagedog', domain, 'ultrapancake', domain, federation_list, False, False, 'following.txt') follow_person(base_dir, nickname, domain, 'ultrapancake', domain, federation_list, False, False, 'following.txt') follow_person(base_dir, nickname, domain, 'someother', 'randodomain.net', federation_list, False, False, 'following.txt') follow_list = get_followers_of_person(base_dir, 'ultrapancake', domain) assert len(follow_list) == 3 assert 'mxpop@' + domain in follow_list assert 'drokk@' + domain in follow_list assert 'sausagedog@' + domain in follow_list os.chdir(curr_dir) shutil.rmtree(base_dir, ignore_errors=False) def _test_followers_on_domain(base_dir: str) -> None: print('test_followers_on_domain') curr_dir = base_dir nickname = 'mxpop' domain = 'diva.domain' otherdomain = 'soup.dragon' password = 'birb' port = 80 http_prefix = 'https' federation_list = [] base_dir = curr_dir + '/.tests_nooffollowersOndomain' if os.path.isdir(base_dir): shutil.rmtree(base_dir, ignore_errors=False) os.mkdir(base_dir) os.chdir(base_dir) create_person(base_dir, nickname, domain, port, http_prefix, True, False, password) create_person(base_dir, 'maxboardroom', otherdomain, port, http_prefix, True, False, password) create_person(base_dir, 'ultrapancake', otherdomain, port, http_prefix, True, False, password) create_person(base_dir, 'drokk', otherdomain, port, http_prefix, True, False, password) create_person(base_dir, 'sausagedog', otherdomain, port, http_prefix, True, False, password) follow_person(base_dir, 'drokk', otherdomain, nickname, domain, federation_list, False, False, 'following.txt') follow_person(base_dir, 'sausagedog', otherdomain, nickname, domain, federation_list, False, False, 'following.txt') follow_person(base_dir, 'maxboardroom', otherdomain, nickname, domain, federation_list, False, False, 'following.txt') add_follower_of_person(base_dir, nickname, domain, 'cucumber', 'sandwiches.party', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'captainsensible', 'damned.zone', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'pilchard', 'zombies.attack', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'drokk', otherdomain, federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'sausagedog', otherdomain, federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'maxboardroom', otherdomain, federation_list, False, False) followers_on_other_domain = \ no_of_followers_on_domain(base_dir, nickname + '@' + domain, otherdomain) assert followers_on_other_domain == 3 unfollower_of_account(base_dir, nickname, domain, 'sausagedog', otherdomain, False, False) followers_on_other_domain = \ no_of_followers_on_domain(base_dir, nickname + '@' + domain, otherdomain) assert followers_on_other_domain == 2 os.chdir(curr_dir) shutil.rmtree(base_dir, ignore_errors=False) def _test_group_followers(base_dir: str) -> None: print('test_group_followers') curr_dir = base_dir nickname = 'test735' domain = 'mydomain.com' password = 'somepass' port = 80 http_prefix = 'https' federation_list = [] base_dir = curr_dir + '/.tests_testgroupfollowers' if os.path.isdir(base_dir): shutil.rmtree(base_dir, ignore_errors=False) os.mkdir(base_dir) os.chdir(base_dir) create_person(base_dir, nickname, domain, port, http_prefix, True, False, password) clear_followers(base_dir, nickname, domain) add_follower_of_person(base_dir, nickname, domain, 'badger', 'wild.domain', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'squirrel', 'wild.domain', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'rodent', 'wild.domain', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'utterly', 'clutterly.domain', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'zonked', 'zzz.domain', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'nap', 'zzz.domain', federation_list, False, False) grouped = group_followers_by_domain(base_dir, nickname, domain) assert len(grouped.items()) == 3 assert grouped.get('zzz.domain') assert grouped.get('clutterly.domain') assert grouped.get('wild.domain') assert len(grouped['zzz.domain']) == 2 assert len(grouped['wild.domain']) == 3 assert len(grouped['clutterly.domain']) == 1 os.chdir(curr_dir) shutil.rmtree(base_dir, ignore_errors=False) def _test_follows(base_dir: str) -> None: print('test_follows') curr_dir = base_dir nickname = 'test529' domain = 'testdomain.com' password = 'mypass' port = 80 http_prefix = 'https' federation_list = ['wild.com', 'mesh.com'] base_dir = curr_dir + '/.tests_testfollows' if os.path.isdir(base_dir): shutil.rmtree(base_dir, ignore_errors=False) os.mkdir(base_dir) os.chdir(base_dir) create_person(base_dir, nickname, domain, port, http_prefix, True, False, password) clear_follows(base_dir, nickname, domain, 'following.txt') follow_person(base_dir, nickname, domain, 'badger', 'wild.com', federation_list, False, False, 'following.txt') follow_person(base_dir, nickname, domain, 'squirrel', 'secret.com', federation_list, False, False, 'following.txt') follow_person(base_dir, nickname, domain, 'rodent', 'drainpipe.com', federation_list, False, False, 'following.txt') follow_person(base_dir, nickname, domain, 'batman', 'mesh.com', federation_list, False, False, 'following.txt') follow_person(base_dir, nickname, domain, 'giraffe', 'trees.com', federation_list, False, False, 'following.txt') account_dir = acct_dir(base_dir, nickname, domain) with open(account_dir + '/following.txt', 'r', encoding='utf-8') as fp_foll: domain_found = False for following_domain in fp_foll: test_domain = following_domain.split('@')[1] test_domain = remove_eol(test_domain) if test_domain == 'mesh.com': domain_found = True if test_domain not in federation_list: print(test_domain) assert False assert domain_found unfollow_account(base_dir, nickname, domain, 'batman', 'mesh.com', True, False, 'following.txt') domain_found = False for following_domain in fp_foll: test_domain = following_domain.split('@')[1] test_domain = remove_eol(test_domain) if test_domain == 'mesh.com': domain_found = True assert domain_found is False clear_followers(base_dir, nickname, domain) add_follower_of_person(base_dir, nickname, domain, 'badger', 'wild.com', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'squirrel', 'secret.com', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'rodent', 'drainpipe.com', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'batman', 'mesh.com', federation_list, False, False) add_follower_of_person(base_dir, nickname, domain, 'giraffe', 'trees.com', federation_list, False, False) account_dir = acct_dir(base_dir, nickname, domain) with open(account_dir + '/followers.txt', 'r', encoding='utf-8') as fp_foll: for follower_domain in fp_foll: test_domain = follower_domain.split('@')[1] test_domain = remove_eol(test_domain) if test_domain not in federation_list: print(test_domain) assert False os.chdir(curr_dir) shutil.rmtree(base_dir, ignore_errors=False) def _test_create_person_account(base_dir: str): print('test_create_person_account') system_language = 'en' languages_understood = [system_language] curr_dir = base_dir nickname = 'test382' domain = 'badgerdomain.com' password = 'mypass' port = 80 http_prefix = 'https' client_to_server = False base_dir = curr_dir + '/.tests_createperson' if os.path.isdir(base_dir): shutil.rmtree(base_dir, ignore_errors=False) os.mkdir(base_dir) os.chdir(base_dir) private_key_pem, public_key_pem, person, wf_endpoint = \ create_person(base_dir, nickname, domain, port, http_prefix, True, False, password) assert private_key_pem assert public_key_pem assert person assert wf_endpoint dir_str = data_dir(base_dir) assert os.path.isfile(dir_str + '/passwords') delete_all_posts(base_dir, nickname, domain, 'inbox') delete_all_posts(base_dir, nickname, domain, 'outbox') set_display_nickname(base_dir, nickname, domain, 'badger') set_bio(base_dir, nickname, domain, 'Randomly roaming in your backyard') archive_posts_for_person(nickname, domain, base_dir, 'inbox', None, {}, 4) archive_posts_for_person(nickname, domain, base_dir, 'outbox', None, {}, 4) test_in_reply_to = None test_in_reply_to_atom_uri = None test_subject = None test_schedule_post = False test_event_date = None test_event_time = None test_event_end_time = None test_location = None test_is_article = False save_to_file = True comments_enabled = True attach_image_filename = None media_type = None conversation_id = None low_bandwidth = True translate = {} content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'Hissing Sid' content = \ "If your \"independent organization\" is government funded...\n\n" + \ "(yawn)\n\n...then it's not really independent.\n\n" + \ "Politicians will threaten to withdraw funding if you do " + \ "anything which challenges middle class sensibilities or incomes." buy_url = '' chat_url = '' auto_cw_cache = {} test_post_json = \ create_public_post(base_dir, nickname, domain, port, http_prefix, content, save_to_file, client_to_server, comments_enabled, attach_image_filename, media_type, 'Not suitable for Vogons', '', 'London, England', test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) assert test_post_json assert test_post_json.get('object') assert test_post_json['object']['content'] assert '(yawn)' in test_post_json['object']['content'] content = \ 'I would regard fediverse as being things based on ActivityPub ' + \ 'or OStatus. i.e. things whose protocol lineage can be traced ' + \ 'back to identica/statusnet/pumpio.\n' + \ '\nFediverse is a vague term though ' + \ 'and I know some people regard Matrix and Diaspora as being ' + \ 'fediverse. If fediverse just means any federated system ' + \ 'then email would be somequitelongword.\nAnotherlongwordhere sentence.' test_post_json = \ create_public_post(base_dir, nickname, domain, port, http_prefix, content, save_to_file, client_to_server, comments_enabled, attach_image_filename, media_type, 'Not suitable for Vogons', '', 'London, England', test_in_reply_to, test_in_reply_to_atom_uri, test_subject, test_schedule_post, test_event_date, test_event_time, test_event_end_time, test_location, test_is_article, system_language, conversation_id, low_bandwidth, content_license_url, media_license_url, media_creator, languages_understood, translate, buy_url, chat_url, auto_cw_cache) assert test_post_json assert test_post_json.get('object') assert test_post_json['object']['content'] assert 'Fediverse' in test_post_json['object']['content'] content_str = test_post_json['object']['content'] object_content = remove_long_words(content_str, 40, []) assert 'Fediverse' in object_content bold_reading = False object_content = remove_text_formatting(object_content, bold_reading) assert 'Fediverse' in object_content object_content = limit_repeated_words(object_content, 6) assert 'Fediverse' in object_content object_content = html_replace_email_quote(object_content) assert 'Fediverse' in object_content object_content = html_replace_quote_marks(object_content) assert 'Fediverse' in object_content os.chdir(curr_dir) shutil.rmtree(base_dir, ignore_errors=False) def show_test_boxes(name: str, inbox_path: str, outbox_path: str) -> None: inbox_posts = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) outbox_posts = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) print('EVENT: ' + name + ' inbox has ' + str(inbox_posts) + ' posts and ' + str(outbox_posts) + ' outbox posts') def _test_authentication(base_dir: str) -> None: print('test_authentication') curr_dir = base_dir nickname = 'test8743' password = 'SuperSecretPassword12345' base_dir = curr_dir + '/.tests_authentication' if os.path.isdir(base_dir): shutil.rmtree(base_dir, ignore_errors=False) os.mkdir(base_dir) os.chdir(base_dir) assert store_basic_credentials(base_dir, 'othernick', 'otherpass') assert store_basic_credentials(base_dir, 'bad:nick', 'otherpass') is False assert store_basic_credentials(base_dir, 'badnick', 'otherpa:ss') is False assert store_basic_credentials(base_dir, nickname, password) auth_header = create_basic_auth_header(nickname, password) assert authorize_basic(base_dir, '/users/' + nickname + '/inbox', auth_header, False) assert authorize_basic(base_dir, '/users/' + nickname, auth_header, False) is False assert authorize_basic(base_dir, '/users/othernick/inbox', auth_header, False) is False auth_header = create_basic_auth_header(nickname, password + '1') assert authorize_basic(base_dir, '/users/' + nickname + '/inbox', auth_header, False) is False password = 'someOtherPassword' assert store_basic_credentials(base_dir, nickname, password) auth_header = create_basic_auth_header(nickname, password) assert authorize_basic(base_dir, '/users/' + nickname + '/inbox', auth_header, False) os.chdir(curr_dir) shutil.rmtree(base_dir, ignore_errors=False) def test_client_to_server(base_dir: str): print('EVENT: Testing sending a post via c2s') global TEST_SERVER_ALICE_RUNNING global TEST_SERVER_BOB_RUNNING content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_license_url = 'https://creativecommons.org/licenses/by-nc/4.0' media_creator = 'King Tut' TEST_SERVER_ALICE_RUNNING = False TEST_SERVER_BOB_RUNNING = False system_language = 'en' languages_understood = [system_language] http_prefix = 'http' proxy_type = None federation_list = [] low_bandwidth = False if os.path.isdir(base_dir + '/.tests'): shutil.rmtree(base_dir + '/.tests', ignore_errors=False) os.mkdir(base_dir + '/.tests') # create the servers alice_dir = base_dir + '/.tests/alice' alice_domain = '127.0.0.42' alice_port = 61935 alice_send_threads = [] alice_address = alice_domain + ':' + str(alice_port) bob_dir = base_dir + '/.tests/bob' bob_domain = '127.0.0.64' bob_port = 61936 bob_send_threads = [] bob_address = bob_domain + ':' + str(bob_port) global THR_ALICE if THR_ALICE: while THR_ALICE.is_alive(): THR_ALICE.stop() time.sleep(1) THR_ALICE.kill() THR_ALICE = \ thread_with_trace(target=create_server_alice, args=(alice_dir, alice_domain, alice_port, bob_address, federation_list, False, False, alice_send_threads), daemon=True) global THR_BOB if THR_BOB: while THR_BOB.is_alive(): THR_BOB.stop() time.sleep(1) THR_BOB.kill() THR_BOB = \ thread_with_trace(target=create_server_bob, args=(bob_dir, bob_domain, bob_port, alice_address, federation_list, False, False, bob_send_threads), daemon=True) THR_ALICE.start() THR_BOB.start() assert THR_ALICE.is_alive() is True assert THR_BOB.is_alive() is True # wait for both servers to be running ctr = 0 while not (TEST_SERVER_ALICE_RUNNING and TEST_SERVER_BOB_RUNNING): time.sleep(1) ctr += 1 if ctr > 60: break print('Alice online: ' + str(TEST_SERVER_ALICE_RUNNING)) print('Bob online: ' + str(TEST_SERVER_BOB_RUNNING)) time.sleep(1) # set bob to be following the calendar of alice print('Bob follows the calendar of Alice') following_cal_path = \ data_dir(bob_dir) + '/bob@' + bob_domain + '/followingCalendar.txt' with open(following_cal_path, 'w+', encoding='utf-8') as fp_foll: fp_foll.write('alice@' + alice_domain + '\n') print('\n\n*******************************************************') print('EVENT: Alice sends to Bob via c2s') session_alice = create_session(proxy_type) attached_image_filename = base_dir + '/img/logo.png' media_type = get_attachment_media_type(attached_image_filename) attached_image_description = 'Logo' city = 'London, England' is_article = False cached_webfingers = {} person_cache = {} password = 'alicepass' conversation_id = None alice_inbox_path = \ data_dir(alice_dir) + '/alice@' + alice_domain + '/inbox' alice_outbox_path = \ data_dir(alice_dir) + '/alice@' + alice_domain + '/outbox' bob_inbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' bob_outbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/outbox' outbox_path = data_dir(alice_dir) + '/alice@' + alice_domain + '/outbox' inbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) assert len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]) == 0 assert len([name for name in os.listdir(alice_outbox_path) if os.path.isfile(os.path.join(alice_outbox_path, name))]) == 0 assert len([name for name in os.listdir(bob_inbox_path) if os.path.isfile(os.path.join(bob_inbox_path, name))]) == 0 assert len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]) == 0 print('EVENT: all inboxes and outboxes are empty') signing_priv_key_pem = None test_date = datetime.datetime.now() event_date = \ str(test_date.year) + '-' + str(test_date.month) + '-' + \ str(test_date.day) event_time = '11:45' event_end_time = '12:30' location = "Kinshasa" translate = {} buy_url = '' chat_url = '' video_transcript = None auto_cw_cache = {} send_result = \ send_post_via_server(signing_priv_key_pem, __version__, alice_dir, session_alice, 'alice', password, alice_domain, alice_port, 'bob', bob_domain, bob_port, None, http_prefix, 'Sent from my ActivityPub client', True, attached_image_filename, media_type, attached_image_description, video_transcript, city, cached_webfingers, person_cache, is_article, system_language, languages_understood, low_bandwidth, content_license_url, media_license_url, media_creator, event_date, event_time, event_end_time, location, translate, buy_url, chat_url, auto_cw_cache, True, None, None, conversation_id, None) print('send_result: ' + str(send_result)) for _ in range(30): if os.path.isdir(outbox_path): if len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) == 1: break time.sleep(1) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) assert len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]) == 0 assert len([name for name in os.listdir(alice_outbox_path) if os.path.isfile(os.path.join(alice_outbox_path, name))]) == 1 print(">>> c2s post arrived in Alice's outbox\n\n\n") for _ in range(30): if os.path.isdir(inbox_path): if len([name for name in os.listdir(bob_inbox_path) if os.path.isfile(os.path.join(bob_inbox_path, name))]) == 1: break time.sleep(1) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) assert len([name for name in os.listdir(bob_inbox_path) if os.path.isfile(os.path.join(bob_inbox_path, name))]) == 1 assert len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]) == 0 print(">>> s2s post arrived in Bob's inbox") time.sleep(2) calendar_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/calendar' if not os.path.isdir(calendar_path): print('Missing calendar path: ' + calendar_path) assert os.path.isdir(calendar_path) assert os.path.isdir(calendar_path + '/' + str(test_date.year)) assert os.path.isfile(calendar_path + '/' + str(test_date.year) + '/' + str(test_date.month) + '.txt') print(">>> calendar entry created for s2s post which arrived at " + "Bob's inbox") print("c2s send success\n\n\n") print('\n\nEVENT: Getting message id for the post') status_number = 0 outbox_post_filename = None outbox_post_id = None for name in os.listdir(outbox_path): if '#statuses#' in name: status_number = name.split('#statuses#')[1].replace('.json', '') status_number = int(status_number.replace('#activity', '')) outbox_post_filename = outbox_path + '/' + name post_json_object = load_json(outbox_post_filename, 0) if post_json_object: outbox_post_id = remove_id_ending(post_json_object['id']) assert outbox_post_id print('message id obtained: ' + outbox_post_id) assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) print('\n\nAlice follows Bob') signing_priv_key_pem = None send_follow_request_via_server(alice_dir, session_alice, 'alice', password, alice_domain, alice_port, 'bob', bob_domain, bob_port, http_prefix, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) alice_petnames_filename = data_dir(alice_dir) + '/' + \ 'alice@' + alice_domain + '/petnames.txt' alice_following_filename = \ data_dir(alice_dir) + '/alice@' + alice_domain + '/following.txt' bob_followers_filename = \ data_dir(bob_dir) + '/bob@' + bob_domain + '/followers.txt' for _ in range(10): if os.path.isfile(bob_followers_filename): test_str = 'alice@' + alice_domain + ':' + str(alice_port) if text_in_file(test_str, bob_followers_filename): if os.path.isfile(alice_following_filename) and \ os.path.isfile(alice_petnames_filename): test_str = 'bob@' + bob_domain + ':' + str(bob_port) if text_in_file(test_str, alice_following_filename): break time.sleep(1) assert os.path.isfile(bob_followers_filename) assert os.path.isfile(alice_following_filename) assert os.path.isfile(alice_petnames_filename) assert text_in_file('bob bob@' + bob_domain, alice_petnames_filename) print('alice@' + alice_domain + ':' + str(alice_port) + ' in ' + bob_followers_filename) test_str = 'alice@' + alice_domain + ':' + str(alice_port) assert text_in_file(test_str, bob_followers_filename) print('bob@' + bob_domain + ':' + str(bob_port) + ' in ' + alice_following_filename) test_str = 'bob@' + bob_domain + ':' + str(bob_port) assert text_in_file(test_str, alice_following_filename) assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) print('\n\nEVENT: Bob follows Alice') send_follow_request_via_server(alice_dir, session_alice, 'bob', 'bobpass', bob_domain, bob_port, 'alice', alice_domain, alice_port, http_prefix, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) alice_dir_str = data_dir(alice_dir) bob_dir_str = data_dir(bob_dir) for _ in range(20): if os.path.isfile(alice_dir_str + '/alice@' + alice_domain + '/followers.txt'): test_str = 'bob@' + bob_domain + ':' + str(bob_port) test_filename = \ alice_dir_str + '/alice@' + \ alice_domain + '/followers.txt' if text_in_file(test_str, test_filename): if os.path.isfile(bob_dir_str + '/bob@' + bob_domain + '/following.txt'): alice_handle_str = \ 'alice@' + alice_domain + ':' + str(alice_port) if text_in_file(alice_handle_str, bob_dir_str + '/bob@' + bob_domain + '/following.txt'): if os.path.isfile(bob_dir_str + '/bob@' + bob_domain + '/followingCalendar.txt'): if text_in_file(alice_handle_str, bob_dir_str + '/bob@' + bob_domain + '/followingCalendar.txt'): break time.sleep(1) assert os.path.isfile(alice_dir_str + '/alice@' + alice_domain + '/followers.txt') assert os.path.isfile(bob_dir_str + '/bob@' + bob_domain + '/following.txt') test_str = 'bob@' + bob_domain + ':' + str(bob_port) assert text_in_file(test_str, alice_dir_str + '/alice@' + alice_domain + '/followers.txt') test_str = 'alice@' + alice_domain + ':' + str(alice_port) assert text_in_file(test_str, bob_dir_str + '/bob@' + bob_domain + '/following.txt') session_bob = create_session(proxy_type) password = 'bobpass' outbox_path = bob_dir_str + '/bob@' + bob_domain + '/outbox' inbox_path = alice_dir_str + '/alice@' + alice_domain + '/inbox' print(str(len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]))) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) assert len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]) == 1 print(str(len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]))) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) assert len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]) == 0 print('\n\nEVENT: Bob checks his calendar via caldav') # test caldav result for a month result = \ dav_month_via_server(session_bob, http_prefix, 'bob', bob_domain, bob_port, True, test_date.year, test_date.month, 'bobpass') print('response: ' + str(result)) assert 'VCALENDAR' in str(result) assert 'VEVENT' in str(result) # test caldav result for a day result = \ dav_day_via_server(session_bob, http_prefix, 'bob', bob_domain, bob_port, True, test_date.year, test_date.month, test_date.day, 'bobpass') print('response: ' + str(result)) assert 'VCALENDAR' in str(result) assert 'VEVENT' in str(result) # test for incorrect caldav login result = \ dav_day_via_server(session_bob, http_prefix, 'bob', bob_domain, bob_port, True, test_date.year, test_date.month, test_date.day, 'wrongpass') assert 'VCALENDAR' not in str(result) assert 'VEVENT' not in str(result) print('\n\nEVENT: Bob likes the post') send_like_via_server(bob_dir, session_bob, 'bob', 'bobpass', bob_domain, bob_port, http_prefix, outbox_post_id, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) for _ in range(20): if os.path.isdir(outbox_path) and os.path.isdir(inbox_path): if len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) == 2: test = len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) if test == 1: break time.sleep(1) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) bob_outbox_path_ctr = \ len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]) print('bob_outbox_path_ctr: ' + str(bob_outbox_path_ctr)) assert bob_outbox_path_ctr == 2 alice_inbox_path_ctr = \ len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]) print('alice_inbox_path_ctr: ' + str(alice_inbox_path_ctr)) assert alice_inbox_path_ctr == 0 print('EVENT: Post liked') print('\n\nEVENT: Bob reacts to the post') send_reaction_via_server(bob_dir, session_bob, 'bob', 'bobpass', bob_domain, bob_port, http_prefix, outbox_post_id, '😃', cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) for _ in range(20): if os.path.isdir(outbox_path) and os.path.isdir(inbox_path): if len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) == 3: test = len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) if test == 1: break time.sleep(1) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) bob_outbox_path_ctr = \ len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]) print('bob_outbox_path_ctr: ' + str(bob_outbox_path_ctr)) assert bob_outbox_path_ctr == 3 alice_inbox_path_ctr = \ len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]) print('alice_inbox_path_ctr: ' + str(alice_inbox_path_ctr)) assert alice_inbox_path_ctr == 0 print('EVENT: Post reacted to') print(str(len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]))) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) outbox_path_ctr = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) print('outbox_path_ctr: ' + str(outbox_path_ctr)) assert outbox_path_ctr == 3 inbox_path_ctr = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) print('inbox_path_ctr: ' + str(inbox_path_ctr)) assert inbox_path_ctr == 0 show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) print('\n\nEVENT: Bob repeats the post') signing_priv_key_pem = None send_announce_via_server(bob_dir, session_bob, 'bob', password, bob_domain, bob_port, http_prefix, outbox_post_id, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) for _ in range(30): if os.path.isdir(outbox_path) and os.path.isdir(inbox_path): if len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) == 4: if len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) == 2: break time.sleep(1) show_test_boxes('alice', alice_inbox_path, alice_outbox_path) show_test_boxes('bob', bob_inbox_path, bob_outbox_path) bob_outbox_path_ctr = \ len([name for name in os.listdir(bob_outbox_path) if os.path.isfile(os.path.join(bob_outbox_path, name))]) print('bob_outbox_path_ctr: ' + str(bob_outbox_path_ctr)) assert bob_outbox_path_ctr == 5 alice_inbox_path_ctr = \ len([name for name in os.listdir(alice_inbox_path) if os.path.isfile(os.path.join(alice_inbox_path, name))]) print('alice_inbox_path_ctr: ' + str(alice_inbox_path_ctr)) assert alice_inbox_path_ctr == 1 print('EVENT: Post repeated') inbox_path = data_dir(bob_dir) + '/bob@' + bob_domain + '/inbox' outbox_path = data_dir(alice_dir) + '/alice@' + alice_domain + '/outbox' bob_posts_before = \ len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) alice_posts_before = \ len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) print('\n\nEVENT: Alice deletes her post: ' + outbox_post_id + ' ' + str(alice_posts_before)) password = 'alicepass' send_delete_via_server(alice_dir, session_alice, 'alice', password, alice_domain, alice_port, http_prefix, outbox_post_id, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) for _ in range(30): if os.path.isdir(inbox_path): test = len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) if test == bob_posts_before-1: break time.sleep(1) test = len([name for name in os.listdir(inbox_path) if os.path.isfile(os.path.join(inbox_path, name))]) assert test == bob_posts_before - 1 print(">>> post was deleted from Bob's inbox") test = len([name for name in os.listdir(outbox_path) if os.path.isfile(os.path.join(outbox_path, name))]) # this should be unchanged because a delete post was added # at the outbox and one was removed assert test == alice_posts_before print(">>> post deleted from Alice's outbox") assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) print('\n\nEVENT: Alice unfollows Bob') password = 'alicepass' send_unfollow_request_via_server(base_dir, session_alice, 'alice', password, alice_domain, alice_port, 'bob', bob_domain, bob_port, http_prefix, cached_webfingers, person_cache, True, __version__, signing_priv_key_pem, system_language) for _ in range(10): test_str = 'alice@' + alice_domain + ':' + str(alice_port) if not text_in_file(test_str, bob_followers_filename): test_str = 'bob@' + bob_domain + ':' + str(bob_port) if not text_in_file(test_str, alice_following_filename): break time.sleep(1) assert os.path.isfile(bob_followers_filename) assert os.path.isfile(alice_following_filename) test_str = 'alice@' + alice_domain + ':' + str(alice_port) assert not text_in_file(test_str, bob_followers_filename) test_str = 'bob@' + bob_domain + ':' + str(bob_port) assert not text_in_file(test_str, alice_following_filename) assert valid_inbox(bob_dir, 'bob', bob_domain) assert valid_inbox_filenames(bob_dir, 'bob', bob_domain, alice_domain, alice_port) assert valid_inbox(alice_dir, 'alice', alice_domain) assert valid_inbox_filenames(alice_dir, 'alice', alice_domain, bob_domain, bob_port) # stop the servers THR_ALICE.kill() THR_ALICE.join() assert THR_ALICE.is_alive() is False THR_BOB.kill() THR_BOB.join() assert THR_BOB.is_alive() is False os.chdir(base_dir) # shutil.rmtree(alice_dir, ignore_errors=False) # shutil.rmtree(bob_dir, ignore_errors=False) def _test_actor_parsing(): print('test_actor_parsing') actor = 'https://mydomain:72/users/mynick' domain, port = get_domain_from_actor(actor) assert domain == 'mydomain' assert port == 72 nickname = get_nickname_from_actor(actor) assert nickname == 'mynick' actor = 'https://element/accounts/badger' domain, port = get_domain_from_actor(actor) assert domain == 'element' nickname = get_nickname_from_actor(actor) assert nickname == 'badger' actor = 'egg@chicken.com' domain, port = get_domain_from_actor(actor) assert domain == 'chicken.com' nickname = get_nickname_from_actor(actor) assert nickname == 'egg' actor = '@waffle@cardboard' domain, port = get_domain_from_actor(actor) assert domain == 'cardboard' nickname = get_nickname_from_actor(actor) assert nickname == 'waffle' actor = 'https://astral/channel/sky' domain, port = get_domain_from_actor(actor) assert domain == 'astral' nickname = get_nickname_from_actor(actor) assert nickname == 'sky' actor = 'https://randomain/users/rando' domain, port = get_domain_from_actor(actor) assert domain == 'randomain' nickname = get_nickname_from_actor(actor) assert nickname == 'rando' actor = 'https://otherdomain:49/@othernick' domain, port = get_domain_from_actor(actor) assert domain == 'otherdomain' assert port == 49 nickname = get_nickname_from_actor(actor) assert nickname == 'othernick' def _test_web_links(): print('test_web_links') example_text = \ '

Some text!

https://videosite.whatever/video/A3JpZMovL25kci1kZS32' + \ 'MGE0NCg4YB1lMLQwLTRkMGEtYkYxMS5kNmQ1MjJqYWZjKzd

' linked_text = add_web_links(example_text) expected_text = \ '

Some text!

https://videosite.whatever/video/A3JpZM

' assert linked_text == expected_text example_text = \ "

Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + \ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + \ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + \ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + \ " #turbot #haddock

" result_text = remove_long_words(example_text, 40, []) assert result_text == "

Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + \ " #turbot " + \ "#haddock

" example_text = \ '

@foo Some ' + \ 'random text.

AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + \ 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + \ 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + \ 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + \ 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + \ 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA

' result_text = remove_long_words(example_text, 40, []) assert result_text == \ '

@foo ' + \ 'Some random text.

' example_text = \ 'This post has a web links https://somesite.net\n\nAnd some other text' linked_text = add_web_links(example_text) expected_text = \ 'somesite.net' if expected_text not in linked_text: print(expected_text + '\n') print(linked_text) assert expected_text in linked_text # NOTE: it is difficult to find academic studies of the fediverse which # do not in some way violate consent or embody an arrogant status # quo attitude. Did all those scraped accounts agree to be part of # an academic study? Did they even consider consent as an issue? # It seems doubtful. We are just like algae under a microscope to them. example_text = \ 'This post has an arxiv link arXiv:2203.15752 some other text' linked_text = add_web_links(example_text) expected_text = \ 'arXiv:2203.15752' if expected_text not in linked_text: print(expected_text + '\n') print(linked_text) assert expected_text in linked_text example_text = \ 'This post has an doi link ' + \ 'doi:10.1109/INFCOMW.2019.8845221 some other text' linked_text = add_web_links(example_text) expected_text = \ '' + \ 'doi:10.1109/INFCOMW.2019.8845221' if expected_text not in linked_text: print(expected_text + '\n') print(linked_text) assert expected_text in linked_text example_text = \ 'This post has a very long web link\n\nhttp://' + \ 'cbwebewuvfuftdiudbqd33dddbbyuef23fyug3bfhcyu2fct2' + \ 'cuyqbcbucuwvckiwyfgewfvqejbchevbhwevuevwbqebqekve' + \ 'qvuvjfkf.onion\n\nAnd some other text' linked_text = add_web_links(example_text) assert 'ellipsis' in linked_text example_text = \ '

1. HAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAH' + \ 'AHAHAHHAHAHAHAHAHAHAHAHAHAHAHAHHAHAHAHAHAHAHAHAH

' result_text = remove_long_words(example_text, 40, []) assert result_text == '

1. HAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHAHA

' example_text = \ '

Tox address is 88AB9DED6F9FBEF43E105FB72060A2D89F9B93C74' + \ '4E8C45AB3C5E42C361C837155AFCFD9D448

' result_text = remove_long_words(example_text, 40, []) assert result_text == example_text example_text = \ 'some.incredibly.long.and.annoying.word.which.should.be.removed: ' + \ 'The remaining text' result_text = remove_long_words(example_text, 40, []) assert result_text == \ 'some.incredibly.long.and.annoying.word.w\n' + \ 'hich.should.be.removed: The remaining text' example_text = \ '

Tox address is 88AB9DED6F9FBEF43E105FB72060A2D89F9B93C74' + \ '4E8C45AB3C5E42C361C837155AFCFD9D448

' result_text = remove_long_words(example_text, 40, []) assert result_text == \ '

Tox address is 88AB9DED6F9FBEF43E105FB72060A2D89F9B93C7\n' + \ '44E8C45AB3C5E42C361C837155AFCFD9D448

' example_text = \ '

ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCA' + \ 'BCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCAB' + \ 'CABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC' + \ 'ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCA' + \ 'BCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCAB' + \ 'CABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC' + \ 'ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCA' + \ 'BCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCAB' + \ 'CABCABCABCABCABCABCABCABC

' result_text = remove_long_words(example_text, 40, []) assert result_text == r'

ABCABCABCABCABCABCABCABCABCABCABCABCABCA<\p>' example_text = \ '"the nucleus of mutual-support institutions, habits, and customs ' + \ 'remains alive with the millions; it keeps them together; and ' + \ 'they prefer to cling to their customs, beliefs, and traditions ' + \ 'rather than to accept the teachings of a war of each ' + \ 'against all"\n\n--Peter Kropotkin' test_fn_str = add_web_links(example_text) result_text = remove_long_words(test_fn_str, 40, []) assert result_text == example_text assert 'ellipsis' not in result_text example_text = \ '

filepopout=' + \ 'TemplateAttachmentRichPopout<<\\p>' result_text = replace_content_duplicates(example_text) assert result_text == \ '

filepopout=' + \ 'TemplateAttachmentRichPopout' example_text = \ '

Test1 test2 #YetAnotherExcessivelyLongwindedAndBoringHashtag

' test_fn_str = add_web_links(example_text) result_text = remove_long_words(test_fn_str, 40, []) assert (result_text == '

Test1 test2 ' '#YetAnotherExcessivelyLongwindedAndBorin\ngHashtag

') example_text = \ "

Don't remove a p2p link " + \ "rad:git:hwd1yrerc3mcgn8ga9rho3dqi4w33nep7kxmqezss4topyfgmexihp" + \ "33xcw

" test_fn_str = add_web_links(example_text) result_text = remove_long_words(test_fn_str, 40, []) assert result_text == example_text def _test_addemoji(base_dir: str): print('test_addemoji') content = "Emoji :lemon: :strawberry: :banana:" http_prefix = 'http' nickname = 'testuser' domain = 'testdomain.net' port = 3682 recipients = [] translate = {} hashtags = {} base_dir_original = base_dir path = base_dir + '/.tests' if not os.path.isdir(path): os.mkdir(path) path = base_dir + '/.tests/emoji' if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) base_dir = path path = base_dir + '/emoji' if os.path.isdir(path): shutil.rmtree(path, ignore_errors=False) os.mkdir(path) copytree(base_dir_original + '/emoji', base_dir + '/emoji', False, None) os.chdir(base_dir) private_key_pem, public_key_pem, person, wf_endpoint = \ create_person(base_dir, nickname, domain, port, http_prefix, True, False, 'password') assert private_key_pem assert public_key_pem assert person assert wf_endpoint content_modified = \ add_html_tags(base_dir, http_prefix, nickname, domain, content, recipients, hashtags, translate, True) assert ':lemon:' in content_modified assert content_modified.startswith('

') assert content_modified.endswith('

') tags = [] for _, tag in hashtags.items(): tags.append(tag) content = content_modified content_modified = \ replace_emoji_from_tags(None, base_dir, content, tags, 'content', True, True) expected_content = '

Emoji 🍋 🍓 🍌

' if content_modified != expected_content: print('expected_content: ' + expected_content) print('content_modified: ' + content_modified) assert content_modified == expected_content content_modified = \ replace_emoji_from_tags(None, base_dir, content, tags, 'content', True, False) expected_content = '

Emoji ' + \ ' ' + \ '

' if content_modified != expected_content: print('expected_content: ' + expected_content) print('content_modified: ' + content_modified) assert content_modified == expected_content os.chdir(base_dir_original) shutil.rmtree(base_dir_original + '/.tests', ignore_errors=False) def _test_get_status_number(): print('test_get_status_number') prev_status_number = None for _ in range(1, 20): status_number, _ = get_status_number() if prev_status_number: assert len(status_number) == 18 assert int(status_number) > prev_status_number prev_status_number = int(status_number) def _test_json_string() -> None: print('test_json_string') filename = '.epicyon_tests_test_json_string.json' message_str = "Crème brûlée यह एक परीक्षण ह" test_json = { "content": message_str } assert save_json(test_json, filename) received_json = load_json(filename, 0) assert received_json assert received_json['content'] == message_str encoded_str = json.dumps(test_json, ensure_ascii=False) assert message_str in encoded_str try: os.remove(filename) except OSError: pass def _test_save_load_json(): print('test_save_load_json') test_json = { "param1": 3, "param2": '"Crème brûlée यह एक परीक्षण ह"' } test_filename = '.epicyon_tests_test_save_load_json.json' if os.path.isfile(test_filename): try: os.remove(test_filename) except OSError: pass assert save_json(test_json, test_filename) assert os.path.isfile(test_filename) test_load_json = load_json(test_filename) assert test_load_json assert test_load_json.get('param1') assert test_load_json.get('param2') assert test_load_json['param1'] == 3 assert test_load_json['param2'] == '"Crème brûlée यह एक परीक्षण ह"' try: os.remove(test_filename) except OSError: pass def _test_theme(): print('test_theme') css = 'somestring --background-value: 24px; --foreground-value: 24px;' result = set_css_param(css, 'background-value', '32px') assert result == \ 'somestring --background-value: 32px; --foreground-value: 24px;' css = \ 'somestring --background-value: 24px; --foreground-value: 24px; ' + \ '--background-value: 24px;' result = set_css_param(css, 'background-value', '32px') assert result == \ 'somestring --background-value: 32px; --foreground-value: 24px; ' + \ '--background-value: 32px;' css = '--background-value: 24px; --foreground-value: 24px;' result = set_css_param(css, 'background-value', '32px') assert result == '--background-value: 32px; --foreground-value: 24px;' def _test_recent_posts_cache(): print('test_recent_posts_cache') recent_posts_cache = {} max_recent_posts = 3 html_str = '' for i in range(5): post_json_object = { "id": "https://somesite.whatever/users/someuser/statuses/" + str(i) } update_recent_posts_cache(recent_posts_cache, max_recent_posts, post_json_object, html_str) assert len(recent_posts_cache['index']) == max_recent_posts assert len(recent_posts_cache['json'].items()) == max_recent_posts assert len(recent_posts_cache['html'].items()) == max_recent_posts def _test_remove_txt_formatting(): print('test_remove_txt_formatting') test_str = '

Text without formatting

' result_str = remove_text_formatting(test_str, False) assert result_str == test_str test_str = '

Text with

formatting

' result_str = remove_text_formatting(test_str, False) assert result_str == '

Text with formatting

' def _test_jsonld(): print("test_jsonld") jld_document = { "@context": "https://www.w3.org/ns/activitystreams", "actor": "https://somesite.net/users/gerbil", "description": "My json document", "numberField": 83582, "object": { "content": "valid content" } } # private_key_pem, public_key_pem = generate_rsa_key() private_key_pem = '-----BEGIN RSA PRIVATE KEY-----\n' \ 'MIIEowIBAAKCAQEAod9iHfIn4ugY/2byFrFjUprrFLkkH5bCrjiBq2/MdHFg99IQ\n' \ '7li2x2mg5fkBMhU5SJIxlN8kiZMFq7JUXSA97Yo4puhVubqTSHihIh6Xn2mTjTgs\n' \ 'zNo9SBbmN3YiyBPTcr0rF4jGWZAduJ8u6i7Eky2QH+UBKyUNRZrcfoVq+7grHUIA\n' \ '45pE7vAfEEWtgRiw32Nwlx55N3hayHax0y8gMdKEF/vfYKRLcM7rZgEASMtlCpgy\n' \ 'fsyHwFCDzl/BP8AhP9u3dM+SEundeAvF58AiXx1pKvBpxqttDNAsKWCRQ06/WI/W\n' \ '2Rwihl9yCjobqRoFsZ/cTEi6FG9AbDAds5YjTwIDAQABAoIBAERL3rbpy8Bl0t43\n' \ 'jh7a+yAIMvVMZBxb3InrV3KAug/LInGNFQ2rKnsaawN8uu9pmwCuhfLc7yqIeJUH\n' \ 'qaadCuPlNJ/fWQQC309tbfbaV3iv78xejjBkSATZfIqb8nLeQpGflMXaNG3na1LQ\n' \ '/tdZoiDC0ZNTaNnOSTo765oKKqhHUTQkwkGChrwG3Js5jekV4zpPMLhUafXk6ksd\n' \ '8XLlZdCF3RUnuguXAg2xP/duxMYmTCx3eeGPkXBPQl0pahu8/6OtBoYvBrqNdQcx\n' \ 'jnEtYX9PCqDY3hAXW9GWsxNfu02DKhWigFHFNRUQtMI++438+QIfzXPslE2bTQIt\n' \ '0OXUlwECgYEAxTKUZ7lwIBb5XKPJq53RQmX66M3ArxI1RzFSKm1+/CmxvYiN0c+5\n' \ '2Aq62WEIauX6hoZ7yQb4zhdeNRzinLR7rsmBvIcP12FidXG37q9v3Vu70KmHniJE\n' \ 'TPbt5lHQ0bNACFxkar4Ab/JZN4CkMRgJdlcZ5boYNmcGOYCvw9izuM8CgYEA0iQ1\n' \ 'khIFZ6fCiXwVRGvEHmqSnkBmBHz8MY8fczv2Z4Gzfq3Tlh9VxpigK2F2pFt7keWc\n' \ '53HerYFHFpf5otDhEyRwA1LyIcwbj5HopumxsB2WG+/M2as45lLfWa6KO73OtPpU\n' \ 'wGZYW+i/otdk9eFphceYtw19mxI+3lYoeI8EjYECgYBxOtTKJkmCs45lqkp/d3QT\n' \ '2zjSempcXGkpQuG6KPtUUaCUgxdj1RISQj792OCbeQh8PDZRvOYaeIKInthkQKIQ\n' \ 'P/Z1yVvIQUvmwfBqZmQmR6k1bFLJ80UiqFr7+BiegH2RD3Q9cnIP1aly3DPrWLD+\n' \ 'OY9OQKfsfQWu+PxzyTeRMwKBgD8Zjlh5PtQ8RKcB8mTkMzSq7bHFRpzsZtH+1wPE\n' \ 'Kp40DRDp41H9wMTsiZPdJUH/EmDh4LaCs8nHuu/m3JfuPtd/pn7pBjntzwzSVFji\n' \ 'bW+jwrJK1Gk8B87pbZXBWlLMEOi5Dn/je37Fqd2c7f0DHauFHq9AxsmsteIPXwGs\n' \ 'eEKBAoGBAIzJX/5yFp3ObkPracIfOJ/U/HF1UdP6Y8qmOJBZOg5s9Y+JAdY76raK\n' \ '0SbZPsOpuFUdTiRkSI3w/p1IuM5dPxgCGH9MHqjqogU5QwXr3vLF+a/PFhINkn1x\n' \ 'lozRZjDcF1y6xHfExotPC973UZnKEviq9/FqOsovZpvSQkzAYSZF\n' \ '-----END RSA PRIVATE KEY-----' public_key_pem = '-----BEGIN PUBLIC KEY-----\n' \ 'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAod9iHfIn4ugY/2byFrFj\n' \ 'UprrFLkkH5bCrjiBq2/MdHFg99IQ7li2x2mg5fkBMhU5SJIxlN8kiZMFq7JUXSA9\n' \ '7Yo4puhVubqTSHihIh6Xn2mTjTgszNo9SBbmN3YiyBPTcr0rF4jGWZAduJ8u6i7E\n' \ 'ky2QH+UBKyUNRZrcfoVq+7grHUIA45pE7vAfEEWtgRiw32Nwlx55N3hayHax0y8g\n' \ 'MdKEF/vfYKRLcM7rZgEASMtlCpgyfsyHwFCDzl/BP8AhP9u3dM+SEundeAvF58Ai\n' \ 'Xx1pKvBpxqttDNAsKWCRQ06/WI/W2Rwihl9yCjobqRoFsZ/cTEi6FG9AbDAds5Yj\n' \ 'TwIDAQAB\n' \ '-----END PUBLIC KEY-----' signed_document = jld_document.copy() generate_json_signature(signed_document, private_key_pem) assert signed_document assert signed_document.get('signature') assert signed_document['signature'].get('signatureValue') assert signed_document['signature'].get('nonce') assert signed_document['signature'].get('type') assert len(signed_document['signature']['signatureValue']) > 50 assert signed_document['signature']['type'] == 'RsaSignature2017' assert verify_json_signature(signed_document, public_key_pem) # alter the signed document signed_document['object']['content'] = 'forged content' assert not verify_json_signature(signed_document, public_key_pem) jld_document2 = { "@context": "https://www.w3.org/ns/activitystreams", "actor": "https://somesite.net/users/gerbil", "description": "Another json document", "numberField": 13353, "object": { "content": "More content" } } signed_document2 = jld_document2.copy() generate_json_signature(signed_document2, private_key_pem) assert signed_document2 assert signed_document2.get('signature') assert signed_document2['signature'].get('signatureValue') # changed signature on different document if signed_document['signature']['signatureValue'] == \ signed_document2['signature']['signatureValue']: print('json signature has not changed for different documents') assert '.' not in str(signed_document['signature']['signatureValue']) assert len(str(signed_document['signature']['signatureValue'])) > 340 assert (signed_document['signature']['signatureValue'] != signed_document2['signature']['signatureValue']) print('json-ld tests passed') def _test_site_active(): print('test_site_is_active') timeout = 10 sites_unavailable = [] # at least one site should resolve if not site_is_active('https://archive.org', timeout, sites_unavailable): if not site_is_active('https://wikipedia.org', timeout, sites_unavailable): assert site_is_active('https://mastodon.social', timeout, sites_unavailable) assert not site_is_active('https://notarealwebsite.a.b.c', timeout, sites_unavailable) def _test_strip_html(): print('test_remove_html') test_str = 'This string has no html.' assert remove_html(test_str) == test_str test_str = 'This string has html.' assert remove_html(test_str) == 'This string has html.' test_str = '' assert remove_html(test_str) == 'This string has. Two labels.' test_str = '

This string has.

Two paragraphs.

' assert remove_html(test_str) == 'This string has.\n\nTwo paragraphs.' test_str = 'This string has.
A new line.' assert remove_html(test_str) == 'This string has.\nA new line.' test_str = '

This string contains a url http://somesite.or.other

' assert remove_html(test_str) == \ 'This string contains a url http://somesite.or.other' def _test_danger_css(base_dir: str) -> None: print('test_dangerous_css') for _, _, files in os.walk(base_dir): for fname in files: if not fname.endswith('.css'): continue assert not dangerous_css(base_dir + '/' + fname, False) break def _test_danger_svg(base_dir: str) -> None: print('test_dangerous_svg') svg_content = \ ' ' + \ ' ' + \ '' assert not dangerous_svg(svg_content, False) cleaned_up = remove_script(svg_content, None, None, None) assert cleaned_up == svg_content svg_content = \ ' ' + \ ' ' + \ '' + \ ' ' + \ '' assert dangerous_svg(svg_content, False) svg_clean = \ ' ' + \ ' ' + \ '' cleaned_up = remove_script(svg_content, None, None, None) assert '' not in cleaned_up if cleaned_up != svg_clean: print(cleaned_up) assert cleaned_up == svg_clean session = None http_prefix = 'https' domain = 'ratsratsrats.live' domain_full = domain onion_domain = None i2p_domain = None federation_list = [] debug = True svg_image_filename = base_dir + '/.unit_test_safe.svg' post_json_object = { "object": { "id": "1234", "attributedTo": "someactor", "attachment": [ { "mediaType": "svg", "url": "https://somesiteorother.net/media/wibble.svg" } ] } } with open(svg_image_filename, 'wb+') as fp_svg: fp_svg.write(svg_content.encode('utf-8')) assert os.path.isfile(svg_image_filename) assert svg_content != svg_clean assert cache_svg_images(session, base_dir, http_prefix, domain, domain_full, onion_domain, i2p_domain, post_json_object, federation_list, debug, svg_image_filename) url = get_url_from_post(post_json_object['object']['attachment'][0]['url']) assert url == 'https://ratsratsrats.live/media/1234_wibble.svg' with open(svg_image_filename, 'rb') as fp_svg: cached_content = fp_svg.read().decode() os.remove(svg_image_filename) assert cached_content == svg_clean assert not scan_themes_for_scripts(base_dir) def _test_danger_markup(): print('test_dangerous_markup') allow_local_network_access = False content = '

This is a valid message

' assert not dangerous_markup(content, allow_local_network_access, []) content = 'This is a valid message without markup' assert not dangerous_markup(content, allow_local_network_access, []) content = '

This is a valid-looking message. But wait... ' + \ '

' assert dangerous_markup(content, allow_local_network_access, []) content = '

This is a valid-looking message. But wait... ' + \ '<script>document.getElementById("concentrated")' + \ '.innerHTML = "evil";</script>

' assert dangerous_markup(content, allow_local_network_access, []) content = '

This html contains more than you expected... ' + \ '

' assert dangerous_markup(content, allow_local_network_access, []) content = '

This html contains more than you expected... ' + \ '

' assert dangerous_markup(content, allow_local_network_access, []) content = '

This is a valid-looking message. But wait... ' + \ '' expected_text = 'Some text with some script' safe_text = safe_web_text(web_text) if expected_text != safe_text: print('Original html: ' + web_text) print('Expected html: ' + expected_text) print('Actual html: ' + safe_text) assert expected_text == safe_text def _test_published_to_local_timezone() -> None: print('published_to_local_timezone') published_str = '2022-02-25T20:15:00Z' timezone = 'Europe/Berlin' published = \ date_from_string_format(published_str, ["%Y-%m-%dT%H:%M:%S%z"]) datetime_object = \ convert_published_to_local_timezone(published, timezone) local_time_str = datetime_object.strftime("%a %b %d, %H:%M") assert local_time_str == 'Fri Feb 25, 21:15' timezone = 'Asia/Seoul' published = \ date_from_string_format(published_str, ["%Y-%m-%dT%H:%M:%S%z"]) datetime_object = \ convert_published_to_local_timezone(published, timezone) local_time_str = datetime_object.strftime("%a %b %d, %H:%M") assert local_time_str == 'Sat Feb 26, 05:15' def _test_bold_reading() -> None: print('bold_reading') text = "This is a test of emboldening." text_bold = bold_reading_string(text) expected = \ "This is a test of " + \ "emboldening." if text_bold != expected: print(text_bold) assert text_bold == expected text = "

This is a test of emboldening with paragraph.

" text_bold = bold_reading_string(text) expected = \ "

This is a test of " + \ "emboldening with paragraph.

" if text_bold != expected: print(text_bold) assert text_bold == expected text = \ "

This is a test of emboldening

" + \ "

With more than one paragraph.

" text_bold = bold_reading_string(text) expected = \ "

This is a test of " + \ "emboldening

With more " + \ "than one paragraph.

" if text_bold != expected: print(text_bold) assert text_bold == expected text = '

This is a test

' text_bold = bold_reading_string(text) expected = \ '

This is a test ' + \ '

' if text_bold != expected: print(text_bold) assert text_bold == expected text = "There's the quoted text here" text_bold = bold_reading_string(text) expected = \ "There's the quoted text here" if text_bold != expected: print(text_bold) assert text_bold == expected text = '

@Someone or other' + \ ' some text

' text_bold = bold_reading_string(text) expected = \ '

' + \ '@Someone or other' + \ ' some text

' if text_bold != expected: print(text_bold) assert text_bold == expected def _test_diff_content() -> None: print('diff_content') prev_content = \ 'Some text before.\n' + \ 'Starting sentence. This is some content.\nThis is another line.' content = \ 'Some text before.\nThis is some more content.\nThis is another line.' result = content_diff(content, prev_content) expected = \ '



' + \ '

' assert result == expected content = \ 'Some text before.\nThis is content.\nThis line.' result = content_diff(content, prev_content) expected = \ '


' + \ '
' + \ '
' + \ '
' + \ '

' assert result == expected system_language = "en" languages_understood = ["en"] translate = { "SHOW EDITS": "SHOW EDITS" } timezone = 'Europe/Berlin' content1 = \ "

This is some content.

" + \ "

Some other content.

" content2 = \ "

This is some previous content.

" + \ "

Some other previous content.

" content3 = \ "

This is some more previous content.

" + \ "

Some other previous content.

" post_json_object = { "object": { "content": content1, "published": "2020-12-14T00:08:06Z" } } edits_json = { "2020-12-14T00:05:19Z": { "object": { "content": content3, "published": "2020-12-14T00:05:19Z" } }, "2020-12-14T00:07:34Z": { "object": { "contentMap": { "en": content2 }, "published": "2020-12-14T00:07:34Z" } } } html_str = \ create_edits_html(edits_json, post_json_object, translate, timezone, system_language, languages_understood) assert html_str expected = \ '
SHOW EDITS' + \ '

Mon Dec 14, 01:07



' + \ '
' + \ '

Mon Dec 14, 01:05

' + \ '

' assert html_str == expected def _test_missing_theme_colors(base_dir: str) -> None: print('test_missing_colors') theme_filename = base_dir + '/theme/default/theme.json' assert os.path.isfile(theme_filename) default_theme_json = load_json(theme_filename) assert default_theme_json themes = get_themes_list(base_dir) for theme_name in themes: if theme_name == 'default': continue theme_filename = \ base_dir + '/theme/' + theme_name.lower() + '/theme.json' if not os.path.isfile(theme_filename): continue theme_json = load_json(theme_filename) if not theme_json: continue updated = False for property, value in default_theme_json.items(): if not theme_json.get(property): theme_json[property] = value updated = True if updated: save_json(theme_json, theme_filename) print(theme_name + ' updated') def _test_color_contrast_value(base_dir: str) -> None: print('test_color_contrast_value') minimum_color_contrast = 4.5 background = 'black' foreground = 'white' contrast = color_contrast(background, foreground) assert contrast assert contrast > 20 assert contrast < 22 foreground = 'grey' contrast = color_contrast(background, foreground) assert contrast assert contrast > 5 assert contrast < 6 themes = get_themes_list(base_dir) for theme_name in themes: theme_filename = \ base_dir + '/theme/' + theme_name.lower() + '/theme.json' if not os.path.isfile(theme_filename): continue theme_json = load_json(theme_filename) if not theme_json: continue if not theme_json.get('main-fg-color'): continue if not theme_json.get('main-bg-color'): continue foreground = theme_json['main-fg-color'] background = theme_json['main-bg-color'] contrast = color_contrast(background, foreground) if contrast is None: continue if contrast < minimum_color_contrast: print('Theme ' + theme_name + ' has not enough color contrast ' + str(contrast) + ' < ' + str(minimum_color_contrast)) assert contrast >= minimum_color_contrast print('Color contrast is ok for all themes') def _test_remove_end_of_line(): print('remove_end_of_line') text = 'some text\r\n' expected = 'some text' assert remove_eol(text) == expected text = 'some text' assert remove_eol(text) == expected def _test_dogwhistles(): print('dogwhistles') dogwhistles = { "X-hamstered": "hamsterism", "gerbil": "rodent", "*snake": "slither", "start*end": "something" } content = 'This text does not contain any dogwhistles' assert not detect_dogwhistles(content, dogwhistles) content = 'A gerbil named joe' assert detect_dogwhistles(content, dogwhistles) content = 'A rattlesnake.' assert detect_dogwhistles(content, dogwhistles) content = 'A startthingend.' assert detect_dogwhistles(content, dogwhistles) content = 'This content is unhamstered and yhamstered.' result = detect_dogwhistles(content, dogwhistles) assert result assert result.get('hamstered') assert result['hamstered']['count'] == 2 assert result['hamstered']['category'] == "hamsterism" def _test_text_standardize(): print('text_standardize') expected = 'This is a test' result = standardize_text(expected) if result != expected: print(result) assert result == expected text = '𝔗𝔥𝔦𝔰 𝔦𝔰 𝔞 𝔱𝔢𝔰𝔱' result = standardize_text(text) if result != expected: print(result) assert result == expected text = '𝕿𝖍𝖎𝖘 𝖎𝖘 𝖆 𝖙𝖊𝖘𝖙' result = standardize_text(text) if result != expected: print(result) assert result == expected text = '𝓣𝓱𝓲𝓼 𝓲𝓼 𝓪 𝓽𝓮𝓼𝓽' result = standardize_text(text) if result != expected: print(result) assert result == expected text = '𝒯𝒽𝒾𝓈 𝒾𝓈 𝒶 𝓉𝑒𝓈𝓉' result = standardize_text(text) if result != expected: print(result) assert result == expected text = '𝕋𝕙𝕚𝕤 𝕚𝕤 𝕒 𝕥𝕖𝕤𝕥' result = standardize_text(text) if result != expected: print(result) assert result == expected text = 'This is a test' result = standardize_text(text) if result != expected: print(result) assert result == expected def _test_combine_lines(): print('combine_lines') text = 'This is a test' expected = text result = combine_textarea_lines(text) if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected text = 'First line.\n\nSecond line.' expected = 'First line.

Second line.' result = combine_textarea_lines(text) if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected text = 'First\nline.\n\nSecond\nline.' expected = 'First line.

Second line.' result = combine_textarea_lines(text) if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected # with extra space text = 'First\nline.\n\nSecond \nline.' expected = 'First line.

Second line.' result = combine_textarea_lines(text) if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected text = 'Introduction blurb.\n\n* List item 1\n' + \ '* List item 2\n* List item 3\n\nFinal blurb.' expected = 'Introduction blurb.

* List item 1\n' + \ '* List item 2\n* List item 3

Final blurb.' result = combine_textarea_lines(text) if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected def _test_hashtag_maps(): print('hashtag_maps') content = \ "

This is a test, with a couple of links and a " + \ "#" + \ "Hashtag

" + \ "https://" + \ "" + \ "www.openstreetmap.org/#map=19/52.90860/-3.59917

" + \ "" + \ "https://" + \ "www.google.com/maps/@52.217291,-3.081186" + \ "5,20.04z

" + \ "#" + \ "AnotherHashtag

" map_links = get_map_links_from_post_content(content) link = "www.google.com/maps/@52.217291,-3.0811865,20.04z" assert link in map_links zoom, latitude, longitude = \ geocoords_from_map_link(link, 'openstreetmap.org') assert zoom == 20 assert latitude assert int(latitude * 1000) == 52217 assert longitude assert int(longitude * 1000) == -3081 link = "www.openstreetmap.org/#map=19/52.90860/-3.59917" assert link in map_links zoom, latitude, longitude = \ geocoords_from_map_link(link, 'openstreetmap.org') assert zoom == 19 assert latitude assert int(latitude * 1000) == 52908 assert longitude assert int(longitude * 1000) == -3599 assert len(map_links) == 2 def _test_uninvert(): print('test_uninvert') text = 'ʇsƎʇ ɐ sı sıɥ⊥' expected = "This is a tEst" result = remove_inverted_text(text, 'en') if result != expected: print('text: ' + text) print('expected: ' + expected) print('result: ' + result) assert result == expected text = '🅻🅴🆅🅸🅰🆃🅰🆁 abc' expected = "LEVIATAR abc" result = remove_square_capitals(text, 'en') if result != expected: print('expected: ' + expected) print('result: ' + result) print('text: ' + text) assert result == expected text = '

Some ordinary text

ʇsǝʇ ɐ sı sıɥʇ

' expected = "

Some ordinary text

this is a test

" result = remove_inverted_text(text, 'en') if result != expected: print('text: ' + text) print('expected: ' + expected) print('result: ' + result) assert result == expected def _test_emoji_in_actor_name(base_dir: str) -> None: print('test_emoji_in_actor_name') actor_json = { 'name': 'First Sea Lord Wibbles :verified:', 'tag': [] } http_prefix = 'https' domain = 'fluffysupernova.city' port = 443 add_name_emojis_to_tags(base_dir, http_prefix, domain, port, actor_json) assert len(actor_json['tag']) == 1 assert actor_json['tag'][0].get('updated') assert actor_json['tag'][0]['name'] == ':verified:' def _test_reply_language(base_dir: str) -> None: print('reply_language') post_json_object = { 'object': { 'contentMap': { 'en': 'This is some content' } } } assert get_reply_language(base_dir, post_json_object) == 'en' post_json_object = { 'object': { 'contentMap': { 'xx': 'This is some content', 'de': 'This is some content' } } } assert get_reply_language(base_dir, post_json_object) == 'de' post_json_object = { 'object': { } } assert not get_reply_language(base_dir, post_json_object) def _test_replace_variable(): print('test_replace_variable') link = 'red?firstpost=123' result = replace_link_variable(link, 'firstpost', '456', '?') expected = 'red?firstpost=456' if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected link = 'red?firstpost=123?test?firstpost=444?abc' result = replace_link_variable(link, 'firstpost', '356', '?') expected = 'red?firstpost=356?test?firstpost=356?abc' if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected def _test_replace_remote_tags() -> None: print('replace_remote_tags') nickname = 'mynick' domain = 'furious.duck' content = 'This is a test' result = replace_remote_hashtags(content, nickname, domain) assert result == content link = "https://something/else/mytag" content = 'This is href="' + link + '" test' result = replace_remote_hashtags(content, nickname, domain) assert result == content link = "https://something/tags/mytag" content = 'This is href="' + link + '" test' result = replace_remote_hashtags(content, nickname, domain) expected = \ 'This is href="/users/' + nickname + '?remotetag=' + \ link.replace('/', '--') + '" test' if result != expected: print(expected) print(result) assert result == expected def _test_html_closing_tag() -> None: print('html_closing_tag') content = '

@ooh@wibbly.wobbly.worldLike, ' + \ 'OMG!

Something with
some-widget ' + \ 'and something else ' + \ 'totally' + \ ' on the razzle.

As for it
' + \ 'WHATEVER' + \ ' archaeopteryx.

' assert html_tag_has_closing('code', content) content = '

Some code

' assert not html_tag_has_closing('code', content) content = \ "
a@b1$ c
d
e
f
g

h@i$ j
" + \ " k
l * * *
m * * *
n * * *
×
" + \ "

o

p

" assert html_tag_has_closing('code', content) assert html_tag_has_closing('pre', content) def _test_remove_style() -> None: print('remove_style') html_str = '

this is a test

' result = remove_style_within_html(html_str) assert result == html_str html_str = \ 'something' result = remove_style_within_html(html_str) expected = \ 'something' if result != expected: print(expected + '\n\n' + result) assert result == expected def _test_convert_markdown() -> None: print('convert_markdown') content_str = "

Ooh, it's content!

" expected_content_str = content_str message_json = { "content": content_str, "mediaType": "text/html" } convert_post_content_to_html(message_json) if message_json['content'] != expected_content_str: print("Result: " + message_json['content']) assert message_json['content'] == expected_content_str content_str = "

Ooh, it's **content!**

" expected_content_str = "

Ooh, it's content!

" message_json = { "content": content_str, "mediaType": "text/markdown" } convert_post_content_to_html(message_json) if message_json['content'] != expected_content_str: print("Result: " + message_json['content']) assert message_json['content'] == expected_content_str assert message_json['mediaType'] == 'text/html' content_str = "

Ooh, it's *content!*

" expected_content_str = "

Ooh, it's content!

" message_json = { "content": content_str, "mediaType": "text/markdown" } convert_post_content_to_html(message_json) if message_json['content'] != expected_content_str: print("Result: " + message_json['content']) assert message_json['content'] == expected_content_str assert message_json['mediaType'] == 'text/html' content_str = "Ooh, it's _content!_" expected_content_str = "Ooh, it's content!" message_json = { "content": content_str, "contentMap": { "en": content_str, "bogus": { "decoy": "text" }, "de": content_str }, "mediaType": "text/markdown" } convert_post_content_to_html(message_json) if message_json['content'] != expected_content_str: print("Result: " + message_json['content']) assert message_json['content'] == expected_content_str assert message_json['contentMap']["en"] == expected_content_str assert message_json['contentMap']["de"] == expected_content_str assert message_json['mediaType'] == 'text/html' def _test_xor_hashes(): print('xor_hashes') sync_json = { "orderedItems": [ 'https://somedomain/users/somenick', 'https://anotherdomain/users/anothernick' ] } result = get_followers_sync_hash(sync_json) expected = \ '316f8dfdf471920a9cdc17da48feead398378e927dee3372d938c524aa7d8917' if result != expected: print('expected: ' + expected) print('result: ' + result) assert result == expected def _test_featured_tags() -> None: print('featured_tags') actor_json = { "id": "https://somesite/users/somenick" } featured_tags = '#dog #cat' set_featured_hashtags(actor_json, featured_tags) assert actor_json.get('tag') assert len(actor_json['tag']) == 2 result = get_featured_hashtags(actor_json) if result != featured_tags: pprint(actor_json) print('result: ' + result) print('expected: ' + featured_tags) assert result == featured_tags def _test_remove_tag() -> None: print('remove_tag') test_html = 'This is a test' result = remove_markup_tag(test_html, 'pre') assert result == test_html test_html = '
This is a test
' result = remove_markup_tag(test_html, 'pre') if result != 'This is a test': print('expected: This is a test') print('result: ' + result) assert result == 'This is a test' test_html = 'Previous
this is a test
' result = remove_markup_tag(test_html, 'pre') if result != 'Previous this is a test': print('expected: Previous this is a test') print('result: ' + result) assert result == 'Previous this is a test' test_html = '
This is a test

' + \ 'something
again
' result = remove_markup_tag(test_html, 'pre') if result != 'This is a test
something
again': print('expected: This is a test
something
again') print('result: ' + result) assert result == 'This is a test
something
again' def _test_is_right_to_left() -> None: print('is_right_to_left') text = 'This is a test' assert not is_right_to_left_text(text) # arabic text = 'هذا اختبار' assert is_right_to_left_text(text) text = 'Das ist ein Test' assert not is_right_to_left_text(text) # persian text = 'این یک امتحان است' assert is_right_to_left_text(text) # chinese text = '这是一个测试' assert not is_right_to_left_text(text) # hebrew text = 'זה מבחן' assert is_right_to_left_text(text) # yiddish text = 'דאָס איז אַ פּראָבע' assert is_right_to_left_text(text) def _test_format_mixed_rtl() -> None: print('format_mixed_rtl') content = '

This is some English

' + \ '

هذه عربية

' + \ '

And more English

' result = format_mixed_right_to_left(content, 'en') expected = '

This is some English

' + \ '

هذه عربية

' + \ '

And more English

' if result != expected: print('Expected: ' + expected) print('Result: ' + result) assert result == expected content = '

This is some only English

' result = format_mixed_right_to_left(content, 'en') assert result == content content = 'This is some only English without markup' result = format_mixed_right_to_left(content, 'en') assert result == content content = '

هذا عربي فقط

' result = format_mixed_right_to_left(content, 'en') expected = '

هذا عربي فقط

' assert result == expected result = format_mixed_right_to_left(content, 'ar') assert result == content content = 'This is some English

' + \ 'هذه عربية

' + \ 'And more English' result = format_mixed_right_to_left(content, 'en') expected = 'This is some English

' + \ '
هذه عربية


' + \ 'And more English' if result != expected: print('Expected: ' + expected) print('Result: ' + result) assert result == expected content = 'هذه عربية' result = format_mixed_right_to_left(content, 'en') expected = '
هذه عربية
' assert result == expected def _test_dateformat(): print('dateformat') date_str = 'Mon, 20 Nov 2023 16:51:15 GMT' formats = ("%a, %d %b %Y %H:%M:%S %Z", "%a, %d %b %Y %H:%M:%S %z") dtime = date_from_string_format(date_str, formats) print(str(dtime)) assert dtime.tzinfo def _test_book_link(base_dir: str): print('book_link') system_language = 'en' books_cache = {} max_recent_books = 1000 max_cached_readers = 10 base_dir2 = base_dir + '/.testbookevents' if os.path.isdir(base_dir2): shutil.rmtree(base_dir2, ignore_errors=False) os.mkdir(base_dir2) content = 'Not a link' result = get_book_link_from_content(content) assert result is None book_url = 'https://bookwyrm.instance/book/1234567' content = 'xyz wants to read Title' result = get_book_link_from_content(content) assert result == book_url book_url = 'https://en.wikipedia.org/wiki/The_Arasaka_Brainworm' content = "

wants to read Title

" result = get_book_link_from_content(content) assert result == book_url book_url = 'https://bookwyrm.instance/user/hj/1234567' content = 'xyz wants to read Title' result = get_book_link_from_content(content) assert result == book_url book_url = 'bookwyrm.instance/book/1234567' content = 'xyz wants to read Title' result = get_book_link_from_content(content) assert result is None book_url = 'https://bookwyrm.instance/other/1234567' content = 'xyz wants to read ' + book_url + '">Title' result = get_book_link_from_content(content) assert result is None title = 'Tedious Tome' image_url = 'https://bookwyrm.instance/images/previews/covers/1234.jpg' book_url = 'https://bookwyrm.instance/book/56789' content = '

xyz wants to read ' + title + '

' actor = 'https://bookwyrm.instance/user/xyz' id_str = actor + '/generatednote/63472854' published = '2024-01-01T10:30:00.2+00:00' post_json_object = { '@context': 'https://www.w3.org/ns/activitystreams', 'attachment': [{'@context': 'https://www.w3.org/ns/activitystreams', 'name': title, 'type': 'Document', 'url': image_url}], 'attributedTo': actor, 'cc': [actor + '/followers'], 'content': content, 'id': id_str, 'published': published, 'sensitive': False, 'tag': [{'href': book_url, 'name': title, 'type': 'Edition'}], 'to': ['https://www.w3.org/ns/activitystreams#Public'], 'type': 'Note'} languages_understood = [] translate = {} book_dict = get_book_from_post(post_json_object, True) assert book_dict assert book_dict['name'] == title assert book_dict['href'] == book_url result = get_reading_status(post_json_object, system_language, languages_understood, translate, True) assert result.get('type') assert result['actor'] == actor assert result['published'] == published assert result['type'] == 'want' assert result['href'] == book_url assert result['name'] == title assert result['id'] == id_str assert store_book_events(base_dir2, post_json_object, system_language, languages_understood, translate, True, max_recent_books, books_cache, max_cached_readers) expected_readers = 1 print('reader_list 1: ' + str(books_cache['reader_list'])) actor = "https://some.instance/users/hiw" id_str = actor + "/statuses/6293" book_url = "https://en.wikipedia.org/wiki/The_Arasaka_Brainworm" title = "The Arasaka Brainworm" content = "

wants to read " + title + "

" published = "2024-01-04T19:14:26Z" post_json_object = { "@context": [ "https://www.w3.org/ns/activitystreams", { "ostatus": "http://ostatus.org#", "atomUri": "ostatus:atomUri", "inReplyToAtomUri": "ostatus:inReplyToAtomUri", "conversation": "ostatus:conversation", "sensitive": "as:sensitive", "toot": "http://joinmastodon.org/ns#", "votersCount": "toot:votersCount", "blurhash": "toot:blurhash" } ], "id": id_str + "/activity", "type": "Create", "actor": actor, "published": published, "to": [ "https://www.w3.org/ns/activitystreams#Public" ], "cc": [ actor + "/followers" ], "object": { "id": id_str, "conversation": actor + "/statuses/6293", "context": actor + "/statuses/6293", "type": "Note", "summary": None, "inReplyTo": None, "published": published, "url": "https://some.instance/@hiw/6293", "attributedTo": actor + "", "to": [ "https://www.w3.org/ns/activitystreams#Public" ], "cc": [ actor + "/followers" ], "sensitive": False, "atomUri": actor + "/statuses/6293", "inReplyToAtomUri": None, "commentsEnabled": False, "rejectReplies": True, "mediaType": "text/html", "content": content, "contentMap": { "en": content }, "attachment": [ { "mediaType": "image/jpeg", "name": "Book cover test", "type": "Document", "url": "https://some.instance/808b.jpg", "@context": [ "https://www.w3.org/ns/activitystreams", { "schema": "https://schema.org#" } ], "blurhash": "UcHU%#4n_ND%?bxatRWBIU%MazxtNaRjs:of", "width": 174, "height": 225 }, { "type": "PropertyValue", "name": "license", "value": "https://creativecommons.org/licenses/by-nc/4.0" } ], "tag": [ { "href": book_url, "name": title, "type": "Edition" } ], "crawlable": False } } book_dict = get_book_from_post(post_json_object['object'], True) assert book_dict assert book_dict['name'] == title assert book_dict['href'] == book_url result = get_reading_status(post_json_object, system_language, languages_understood, translate, True) assert result.get('type') assert result['actor'] == actor assert result['published'] == published assert result['type'] == 'want' assert result['href'] == book_url assert result['name'] == title assert result['id'] == id_str assert store_book_events(base_dir2, post_json_object, system_language, languages_understood, translate, True, max_recent_books, books_cache, max_cached_readers) expected_readers += 1 print('reader_list 2: ' + str(books_cache['reader_list'])) title = 'The Rise of the Meritocracy' image_url = 'https://bookwyrm.instance/images/previews/covers/6735.jpg' book_url = 'https://bookwyrm.instance/book/7235' content = 'abc finished reading ' + title + '' actor = 'https://bookwyrm.instance/user/abc' id_str = actor + '/generatednote/366458384' published = '2024-01-02T11:30:00.2+00:00' post_json_object = { '@context': 'https://www.w3.org/ns/activitystreams', 'attachment': [{'@context': 'https://www.w3.org/ns/activitystreams', 'name': title, 'type': 'Document', 'url': image_url}], 'attributedTo': actor, 'cc': [actor + '/followers'], 'content': content, 'id': id_str, 'published': published, 'sensitive': False, 'tag': [{'href': book_url, 'name': title, 'type': 'Edition'}], 'to': ['https://www.w3.org/ns/activitystreams#Public'], 'type': 'Note'} book_dict = get_book_from_post(post_json_object, True) assert book_dict assert book_dict['name'] == title assert book_dict['href'] == book_url result = get_reading_status(post_json_object, system_language, languages_understood, translate, True) assert result.get('type') assert result['actor'] == actor assert result['published'] == published assert result['type'] == 'finished' assert result['href'] == book_url assert result['name'] == title assert result['id'] == id_str assert store_book_events(base_dir2, post_json_object, system_language, languages_understood, translate, True, max_recent_books, books_cache, max_cached_readers) expected_readers += 1 print('reader_list 3: ' + str(books_cache['reader_list'])) title = 'Pirate Enlightenment, or the Real Libertalia' image_url = 'https://bookwyrm.instance/images/previews/covers/5283.jpg' book_url = 'https://bookwyrm.instance/book/78252' content = 'rated ' + title + '' actor = 'https://bookwyrm.instance/user/ghi' rating = 3.5 id_str = actor + '/generatednote/73467834576' published = '2024-01-03T12:30:00.2+00:00' post_json_object = { '@context': 'https://www.w3.org/ns/activitystreams', 'attachment': [{'@context': 'https://www.w3.org/ns/activitystreams', 'name': title, 'type': 'Document', 'url': image_url}], 'attributedTo': actor, 'cc': [actor + '/followers'], 'content': content, 'rating': rating, 'id': id_str, 'published': published, 'sensitive': False, 'to': ['https://www.w3.org/ns/activitystreams#Public'], 'type': 'Note'} book_dict = get_book_from_post(post_json_object, True) assert not book_dict result = get_reading_status(post_json_object, system_language, languages_understood, translate, True) assert result.get('type') assert result['actor'] == actor assert result['published'] == published assert result['type'] == 'rated' assert result['href'] == book_url assert result['rating'] == rating assert result['id'] == id_str assert store_book_events(base_dir2, post_json_object, system_language, languages_understood, translate, True, max_recent_books, books_cache, max_cached_readers) expected_readers += 1 print('reader_list 4: ' + str(books_cache['reader_list'])) assert books_cache assert 'reader_list' in books_cache if len(books_cache['reader_list']) != expected_readers: pprint(books_cache) print('reader_list: ' + str(books_cache['reader_list'])) assert len(books_cache['reader_list']) == expected_readers assert books_cache['reader_list'][expected_readers - 1] == actor assert books_cache['readers'].get(actor) if os.path.isdir(base_dir2): shutil.rmtree(base_dir2, ignore_errors=False) def _test_uninvert2(): print('uninvert2') inverted_text = 'abcdefghijklmnopqrstuvwxyz' uninverted_text = uninvert_text(inverted_text) if uninverted_text != inverted_text: print('inverted: ' + inverted_text) print('uninverted: ' + uninverted_text) assert uninverted_text == inverted_text inverted_text = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' uninverted_text = uninvert_text(inverted_text) if uninverted_text != inverted_text: print('inverted: ' + inverted_text) print('uninverted: ' + uninverted_text) assert uninverted_text == inverted_text inverted_text = '[ʍǝɹpuɐ]' uninverted_text = uninvert_text(inverted_text) if uninverted_text != '[andrew]': print('inverted: ' + inverted_text) print('uninverted: ' + uninverted_text) assert uninverted_text == '[andrew]' inverted_text = '˙ʇsǝʇ ɐ sı sıɥ⊥' uninverted_text = uninvert_text(inverted_text) if uninverted_text != 'This is a test.': print('inverted: ' + inverted_text) print('uninverted: ' + uninverted_text) assert uninverted_text == 'This is a test.' inverted_text = 'uspol' uninverted_text = uninvert_text(inverted_text) if uninverted_text != 'uspol': print('inverted: ' + inverted_text) print('uninverted: ' + uninverted_text) assert uninverted_text == 'uspol' def _test_check_individual_post_content(): print('check_individual_post_content') content = "

Unenshitification?

" + \ "Counter-enshitification?

" + \ "

Anti-enshitification?

Nonshitification?

" content2 = remove_style_within_html(content) if content2 != content: print(content) print(content2) assert content2 == content content3 = remove_long_words(content, 40, []) if content3 != content: print(content) print(content3) assert content3 == content content4 = remove_text_formatting(content, False) if content4 != content: print(content) print(content4) assert content4 == content content5 = limit_repeated_words(content, 6) if content5 != content: print(content) print(content5) assert content5 == content content = "

D-A-N-G-E-R-O-U-S
A-N-I-M-A-L

" + \ "

D-A-N-G-E-R-O-U-S
A-N-I-M-A-L

" content2 = remove_style_within_html(content) if content2 != content: print(content) print(content2) assert content2 == content content3 = remove_long_words(content, 40, []) if content3 != content: print(content) print(content3) assert content3 == content content4 = remove_text_formatting(content, False) if content4 != content: print(content) print(content4) assert content4 == content content5 = limit_repeated_words(content, 6) if content5 != content: print(content) print(content5) assert content5 == content def _test_remove_tags() -> None: print('remove_tags') content = 'This is some content' result = remove_incomplete_code_tags(content) assert result == content content = 'This is some content' result = remove_incomplete_code_tags(content) assert result == 'This is some content' content = 'This is some content' result = remove_incomplete_code_tags(content) assert result == 'This is some content' content = 'This is some content. Some other content' result = remove_incomplete_code_tags(content) assert result == 'This is some content. Some other content' content = \ 'This is some content. Some other content' result = remove_incomplete_code_tags(content) assert result == 'This is some content. Some other content' def _test_link_tracking() -> None: print('link tracking') url = 'someweblink.net/some/path' expected = url assert remove_link_tracking(url) == expected url = \ 'https://somenauseating.com/we-want-to-track-your-web-browsing-' + \ 'habits-and-then-sell-that-to-letter-agencies?utm_medium=email&' + \ 'utm_campaign=Latest%20from%20SomeNauseating%20DotCom' + \ '%20for%20April%2024%202024%20-%503948479461&utm_content=' + \ 'Latest%20from%20SomeNauseating%20DotCom%20for%20April%2024%' + \ '202024%20-%34567123+CID_34678246&utm_source=campaign_monitor_uk' + \ '&utm_term=wibble' expected = \ 'https://somenauseating.com/we-want-to-track-your-web-browsing-' + \ 'habits-and-then-sell-that-to-letter-agencies' assert remove_link_tracking(url) == expected content = 'Some content' expected = content assert remove_link_trackers_from_content(content) == expected content = \ 'Some content' expected = \ 'Some content' assert remove_link_trackers_from_content(content) == expected content = \ 'Some content ' + \ 'scurrilous' expected = \ 'Some content ' + \ 'scurrilous' assert remove_link_trackers_from_content(content) == expected def run_all_tests(): base_dir = os.getcwd() data_dir_testing(base_dir) print('Running tests...') update_default_themes_list(os.getcwd()) _test_source_contains_no_tabs() _translate_ontology(base_dir) _test_get_price_from_string() _test_post_variable_names() _test_config_param_names() _test_post_field_names('daemon.py', ['fields', 'actor_json']) _test_post_field_names('theme.py', ['config_json']) _test_post_field_names('inbox.py', ['queue_json', 'post_json_object', 'message_json', 'liked_post_json']) _test_checkbox_names() _test_thread_functions() _test_functions() _test_link_tracking() _test_remove_tags() _test_check_individual_post_content() _test_uninvert2() _test_book_link(base_dir) _test_dateformat() _test_is_right_to_left() _test_format_mixed_rtl() _test_remove_tag() _test_featured_tags() _test_xor_hashes() _test_convert_markdown() _test_remove_style() _test_html_closing_tag() _test_replace_remote_tags() _test_replace_variable() _test_missing_theme_colors(base_dir) _test_reply_language(base_dir) _test_emoji_in_actor_name(base_dir) _test_uninvert() _test_hashtag_maps() _test_combine_lines() _test_text_standardize() _test_dogwhistles() _test_remove_end_of_line() _test_translation_labels() _test_color_contrast_value(base_dir) _test_diff_content() _test_bold_reading() _test_published_to_local_timezone() _test_safe_webtext() _test_link_from_rss_item() _test_xml_podcast_dict(base_dir) _test_get_actor_from_in_reply_to() _test_valid_emoji_content() _test_add_cw_lists(base_dir) _test_word_similarity() _test_seconds_between_publish() _test_sign_and_verify() _test_danger_svg(base_dir) _test_can_replyto(base_dir) _test_date_conversions() _test_authorized_shared_items() _test_valid_password2() _test_get_links_from_content() _test_set_actor_language() _test_limit_repeted_words() _test_word_lengths_limit() _test_switch_word(base_dir) _test_useragent_domain() _test_roles() _test_skills() _test_spoofed_geolocation() _test_remove_interactions() _test_extract_pgp_public_key() _test_emoji_images() _test_camel_case_split() _test_speaker_replace_link() _test_extract_text_fields_from_post() _test_markdown_to_html() _test_valid_hash_tag() _test_prepare_html_post_nick() _test_domain_handling() _test_mastoapi() _test_links_within_post(base_dir) _test_reply_to_public_post(base_dir) _test_mentioned_people(base_dir) _test_guess_tag_category() _test_valid_nick() _test_parse_newswire_feed_date() _test_first_paragraph_from_string() _test_newswire_tags() _test_hashtag_rules() _test_strip_html_tag() _test_replace_email_quote() _test_constant_time_string() _test_translations(base_dir) _test_valid_content_warning() _test_remove_id_ending() _test_json_post_allows_comment() _run_html_replace_quote_marks() _test_danger_css(base_dir) _test_danger_markup() _test_strip_html() _test_site_active() _test_jsonld() _test_remove_txt_formatting() _test_web_links() _test_recent_posts_cache() _test_theme() _test_save_load_json() _test_json_string() _test_get_status_number() _test_addemoji(base_dir) _test_actor_parsing() _test_httpsig(base_dir) _test_http_signed_get(base_dir) _test_http_sig_new('rsa-sha256', 'rsa-sha256') _test_httpsig_base_new(True, base_dir, 'rsa-sha256', 'rsa-sha256') _test_httpsig_base_new(False, base_dir, 'rsa-sha256', 'rsa-sha256') _test_cache() _test_threads() _test_create_person_account(base_dir) _test_authentication(base_dir) _test_followers_of_person(base_dir) _test_followers_on_domain(base_dir) _test_follows(base_dir) _test_group_followers(base_dir) time.sleep(2) print('Tests succeeded\n')