2020-04-04 11:27:51 +00:00
|
|
|
|
__filename__ = "shares.py"
|
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
|
__license__ = "AGPL3+"
|
2024-01-21 19:01:20 +00:00
|
|
|
|
__version__ = "1.5.0"
|
2020-04-04 11:27:51 +00:00
|
|
|
|
__maintainer__ = "Bob Mottram"
|
2021-09-10 16:14:50 +00:00
|
|
|
|
__email__ = "bob@libreserver.org"
|
2020-04-04 11:27:51 +00:00
|
|
|
|
__status__ = "Production"
|
2021-06-15 15:08:12 +00:00
|
|
|
|
__module_group__ = "Timeline"
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2019-07-23 12:33:09 +00:00
|
|
|
|
import os
|
2021-07-25 10:17:39 +00:00
|
|
|
|
import re
|
2021-07-26 09:40:51 +00:00
|
|
|
|
import secrets
|
2019-07-23 12:33:09 +00:00
|
|
|
|
import time
|
2021-07-24 22:08:11 +00:00
|
|
|
|
import datetime
|
2021-08-07 10:29:40 +00:00
|
|
|
|
from random import randint
|
2021-08-04 12:04:35 +00:00
|
|
|
|
from pprint import pprint
|
2021-12-29 21:55:09 +00:00
|
|
|
|
from session import get_json
|
|
|
|
|
from webfinger import webfinger_handle
|
2021-12-28 21:36:27 +00:00
|
|
|
|
from auth import create_basic_auth_header
|
2021-12-29 21:55:09 +00:00
|
|
|
|
from auth import constant_time_string_check
|
|
|
|
|
from posts import get_person_box
|
|
|
|
|
from session import post_json
|
|
|
|
|
from session import post_image
|
2021-12-28 16:56:57 +00:00
|
|
|
|
from session import create_session
|
2023-08-13 09:58:02 +00:00
|
|
|
|
from session import get_json_valid
|
2024-05-12 12:35:26 +00:00
|
|
|
|
from utils import data_dir
|
2024-01-27 17:04:21 +00:00
|
|
|
|
from utils import resembles_url
|
2023-11-20 22:27:58 +00:00
|
|
|
|
from utils import date_utcnow
|
2023-08-24 13:58:06 +00:00
|
|
|
|
from utils import dangerous_markup
|
2023-08-22 17:13:35 +00:00
|
|
|
|
from utils import remove_html
|
|
|
|
|
from utils import get_media_extensions
|
2022-12-18 15:29:54 +00:00
|
|
|
|
from utils import acct_handle_dir
|
2022-06-21 11:58:50 +00:00
|
|
|
|
from utils import remove_eol
|
2022-04-09 15:11:22 +00:00
|
|
|
|
from utils import has_object_string_type
|
2021-12-26 18:01:02 +00:00
|
|
|
|
from utils import date_string_to_seconds
|
2021-12-26 17:55:38 +00:00
|
|
|
|
from utils import date_seconds_to_string
|
2021-12-26 14:08:58 +00:00
|
|
|
|
from utils import get_config_param
|
2021-12-26 12:45:03 +00:00
|
|
|
|
from utils import get_full_domain
|
2021-12-28 14:41:10 +00:00
|
|
|
|
from utils import valid_nickname
|
2021-12-26 15:13:34 +00:00
|
|
|
|
from utils import load_json
|
2021-12-26 14:47:21 +00:00
|
|
|
|
from utils import save_json
|
2021-12-26 14:26:16 +00:00
|
|
|
|
from utils import get_image_extensions
|
2021-12-26 18:17:37 +00:00
|
|
|
|
from utils import remove_domain_port
|
2021-12-26 18:46:43 +00:00
|
|
|
|
from utils import is_account_dir
|
2021-12-26 12:02:29 +00:00
|
|
|
|
from utils import acct_dir
|
2021-12-26 18:03:39 +00:00
|
|
|
|
from utils import is_float
|
2021-12-26 17:18:34 +00:00
|
|
|
|
from utils import get_category_types
|
2021-12-26 17:24:00 +00:00
|
|
|
|
from utils import get_shares_files_list
|
2021-12-26 10:19:59 +00:00
|
|
|
|
from utils import local_actor_url
|
2024-01-09 16:59:23 +00:00
|
|
|
|
from utils import get_actor_from_post
|
2021-12-28 21:36:27 +00:00
|
|
|
|
from media import process_meta_data
|
|
|
|
|
from media import convert_image_to_low_bandwidth
|
2021-12-29 21:55:09 +00:00
|
|
|
|
from filters import is_filtered_globally
|
|
|
|
|
from siteactive import site_is_active
|
|
|
|
|
from content import get_price_from_string
|
|
|
|
|
from blocking import is_blocked
|
2022-07-28 09:59:18 +00:00
|
|
|
|
from threads import begin_thread
|
2023-08-23 16:29:55 +00:00
|
|
|
|
from cache import remove_person_from_cache
|
|
|
|
|
from cache import store_person_in_cache
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _load_dfc_ids(base_dir: str, system_language: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
product_type: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
http_prefix: str, domain_full: str) -> {}:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
"""Loads the product types ontology
|
|
|
|
|
This is used to add an id to shared items
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
product_types_filename = \
|
|
|
|
|
base_dir + '/ontology/custom' + product_type.title() + 'Types.json'
|
|
|
|
|
if not os.path.isfile(product_types_filename):
|
|
|
|
|
product_types_filename = \
|
|
|
|
|
base_dir + '/ontology/' + product_type + 'Types.json'
|
|
|
|
|
product_types = load_json(product_types_filename)
|
|
|
|
|
if not product_types:
|
|
|
|
|
print('Unable to load ontology: ' + product_types_filename)
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not product_types.get('@graph'):
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('No @graph list within ontology')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if len(product_types['@graph']) == 0:
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('@graph list has no contents')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not product_types['@graph'][0].get('rdfs:label'):
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('@graph list entry has no rdfs:label')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
2022-01-03 16:58:50 +00:00
|
|
|
|
language_exists = False
|
|
|
|
|
for label in product_types['@graph'][0]['rdfs:label']:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
if not label.get('@language'):
|
|
|
|
|
continue
|
2021-12-25 23:03:28 +00:00
|
|
|
|
if label['@language'] == system_language:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
language_exists = True
|
2021-07-24 14:38:43 +00:00
|
|
|
|
break
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not language_exists:
|
|
|
|
|
print('product_types ontology does not contain the language ' +
|
2021-12-25 23:03:28 +00:00
|
|
|
|
system_language)
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_ids = {}
|
|
|
|
|
for item in product_types['@graph']:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
if not item.get('@id'):
|
|
|
|
|
continue
|
|
|
|
|
if not item.get('rdfs:label'):
|
|
|
|
|
continue
|
|
|
|
|
for label in item['rdfs:label']:
|
|
|
|
|
if not label.get('@language'):
|
|
|
|
|
continue
|
|
|
|
|
if not label.get('@value'):
|
|
|
|
|
continue
|
2021-12-25 23:03:28 +00:00
|
|
|
|
if label['@language'] == system_language:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_id = \
|
2021-09-12 17:10:15 +00:00
|
|
|
|
item['@id'].replace('http://static.datafoodconsortium.org',
|
2021-12-26 10:00:46 +00:00
|
|
|
|
http_prefix + '://' + domain_full)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_ids[label['@value'].lower()] = item_id
|
2021-07-24 14:38:43 +00:00
|
|
|
|
break
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return dfc_ids
|
2021-07-24 14:38:43 +00:00
|
|
|
|
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
def _get_valid_shared_item_id(actor: str, display_name: str) -> str:
|
2019-11-02 10:24:25 +00:00
|
|
|
|
"""Removes any invalid characters from the display name to
|
|
|
|
|
produce an item ID
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
remove_chars = (' ', '\n', '\r', '#')
|
|
|
|
|
for char in remove_chars:
|
|
|
|
|
display_name = display_name.replace(char, '')
|
|
|
|
|
remove_chars2 = ('+', '/', '\\', '?', '&')
|
|
|
|
|
for char in remove_chars2:
|
|
|
|
|
display_name = display_name.replace(char, '-')
|
|
|
|
|
display_name = display_name.replace('.', '_')
|
|
|
|
|
display_name = display_name.replace("’", "'")
|
2021-07-27 20:14:13 +00:00
|
|
|
|
actor = actor.replace('://', '___')
|
|
|
|
|
actor = actor.replace('/', '--')
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return actor + '--shareditems--' + display_name
|
2019-11-02 10:24:25 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2024-03-01 17:10:04 +00:00
|
|
|
|
def remove_shared_item2(base_dir: str, nickname: str, domain: str,
|
|
|
|
|
item_id: str, shares_file_type: str) -> None:
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""Removes a share for a person
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
|
|
|
|
|
if not os.path.isfile(shares_filename):
|
2021-08-09 13:07:32 +00:00
|
|
|
|
print('ERROR: remove shared item, missing ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type + '.json ' + shares_filename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
return
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_json = load_json(shares_filename)
|
|
|
|
|
if not shares_json:
|
2021-08-09 13:07:32 +00:00
|
|
|
|
print('ERROR: remove shared item, ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type + '.json could not be loaded from ' +
|
|
|
|
|
shares_filename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
return
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if shares_json.get(item_id):
|
2019-07-23 12:33:09 +00:00
|
|
|
|
# remove any image for the item
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_idfile = base_dir + '/sharefiles/' + nickname + '/' + item_id
|
|
|
|
|
if shares_json[item_id]['imageUrl']:
|
2021-12-26 14:26:16 +00:00
|
|
|
|
formats = get_image_extensions()
|
2020-09-22 15:59:47 +00:00
|
|
|
|
for ext in formats:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if shares_json[item_id]['imageUrl'].endswith('.' + ext):
|
|
|
|
|
if os.path.isfile(item_idfile + '.' + ext):
|
2021-09-05 10:17:43 +00:00
|
|
|
|
try:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
os.remove(item_idfile + '.' + ext)
|
2021-11-25 18:42:38 +00:00
|
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('EX: remove_shared_item unable to delete ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_idfile + '.' + ext)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
# remove the item itself
|
2022-01-03 16:58:50 +00:00
|
|
|
|
del shares_json[item_id]
|
|
|
|
|
save_json(shares_json, shares_filename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('ERROR: share index "' + item_id +
|
|
|
|
|
'" does not exist in ' + shares_filename)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _add_share_duration_sec(duration: str, published: int) -> int:
|
2021-06-26 19:01:48 +00:00
|
|
|
|
"""Returns the duration for the shared item in seconds
|
|
|
|
|
"""
|
|
|
|
|
if ' ' not in duration:
|
|
|
|
|
return 0
|
2022-01-03 16:58:50 +00:00
|
|
|
|
duration_list = duration.split(' ')
|
|
|
|
|
if not duration_list[0].isdigit():
|
2021-06-26 19:01:48 +00:00
|
|
|
|
return 0
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if 'hour' in duration_list[1]:
|
|
|
|
|
return published + (int(duration_list[0]) * 60 * 60)
|
|
|
|
|
if 'day' in duration_list[1]:
|
|
|
|
|
return published + (int(duration_list[0]) * 60 * 60 * 24)
|
|
|
|
|
if 'week' in duration_list[1]:
|
|
|
|
|
return published + (int(duration_list[0]) * 60 * 60 * 24 * 7)
|
|
|
|
|
if 'month' in duration_list[1]:
|
|
|
|
|
return published + (int(duration_list[0]) * 60 * 60 * 24 * 30)
|
|
|
|
|
if 'year' in duration_list[1]:
|
|
|
|
|
return published + (int(duration_list[0]) * 60 * 60 * 24 * 365)
|
2021-06-26 19:01:48 +00:00
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _dfc_product_type_from_category(base_dir: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_category: str, translate: {}) -> str:
|
2021-08-03 19:08:28 +00:00
|
|
|
|
"""Does the shared item category match a DFC product type?
|
|
|
|
|
If so then return the product type.
|
|
|
|
|
This will be used to select an appropriate ontology file
|
|
|
|
|
such as ontology/foodTypes.json
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
product_types_list = get_category_types(base_dir)
|
|
|
|
|
category_lower = item_category.lower()
|
|
|
|
|
for product_type in product_types_list:
|
|
|
|
|
if translate.get(product_type):
|
|
|
|
|
if translate[product_type] in category_lower:
|
|
|
|
|
return product_type
|
2021-08-04 12:44:24 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if product_type in category_lower:
|
|
|
|
|
return product_type
|
2021-08-03 19:08:28 +00:00
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _getshare_dfc_id(base_dir: str, system_language: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_type: str, item_category: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
translate: {},
|
|
|
|
|
http_prefix: str, domain_full: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_ids: {} = None) -> str:
|
2021-07-24 14:43:52 +00:00
|
|
|
|
"""Attempts to obtain a DFC Id for the shared item,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
based upon product_types ontology.
|
2021-07-24 14:38:43 +00:00
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
|
|
|
|
"""
|
2021-08-03 19:08:28 +00:00
|
|
|
|
# does the category field match any prodyct type ontology
|
|
|
|
|
# files in the ontology subdirectory?
|
2022-01-03 16:58:50 +00:00
|
|
|
|
matched_product_type = \
|
|
|
|
|
_dfc_product_type_from_category(base_dir, item_category, translate)
|
|
|
|
|
if not matched_product_type:
|
|
|
|
|
item_type = item_type.replace(' ', '_')
|
|
|
|
|
item_type = item_type.replace('.', '')
|
|
|
|
|
return 'epicyon#' + item_type
|
|
|
|
|
if not dfc_ids:
|
|
|
|
|
dfc_ids = _load_dfc_ids(base_dir, system_language,
|
|
|
|
|
matched_product_type,
|
|
|
|
|
http_prefix, domain_full)
|
|
|
|
|
if not dfc_ids:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
return ''
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_type_lower = item_type.lower()
|
|
|
|
|
match_name = ''
|
|
|
|
|
match_id = ''
|
|
|
|
|
for name, uri in dfc_ids.items():
|
|
|
|
|
if name not in item_type_lower:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if len(name) > len(match_name):
|
|
|
|
|
match_name = name
|
|
|
|
|
match_id = uri
|
|
|
|
|
if not match_id:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
# bag of words match
|
2022-01-03 16:58:50 +00:00
|
|
|
|
max_matched_words = 0
|
|
|
|
|
for name, uri in dfc_ids.items():
|
2021-08-06 16:52:12 +00:00
|
|
|
|
name = name.replace('-', ' ')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
words = name.split(' ')
|
|
|
|
|
score = 0
|
|
|
|
|
for wrd in words:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if wrd in item_type_lower:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
score += 1
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if score > max_matched_words:
|
|
|
|
|
max_matched_words = score
|
|
|
|
|
match_id = uri
|
|
|
|
|
return match_id
|
2021-07-24 14:38:43 +00:00
|
|
|
|
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
def _getshare_type_from_dfc_id(dfc_uri: str, dfc_ids: {}) -> str:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"""Attempts to obtain a share item type from its DFC Id,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
based upon product_types ontology.
|
2021-07-27 12:55:44 +00:00
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if dfc_uri.startswith('epicyon#'):
|
|
|
|
|
item_type = dfc_uri.split('#')[1]
|
|
|
|
|
item_type = item_type.replace('_', ' ')
|
|
|
|
|
return item_type
|
2021-07-27 21:59:49 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for name, uri in dfc_ids.items():
|
|
|
|
|
if uri.endswith('#' + dfc_uri):
|
2021-07-27 12:55:44 +00:00
|
|
|
|
return name
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if uri == dfc_uri:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
return name
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _indicate_new_share_available(base_dir: str, http_prefix: str,
|
|
|
|
|
nickname: str, domain: str,
|
|
|
|
|
domain_full: str,
|
2024-02-11 13:18:16 +00:00
|
|
|
|
shares_file_type: str,
|
|
|
|
|
block_federated: []) -> None:
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"""Indicate to each account that a new share is available
|
|
|
|
|
"""
|
2024-05-12 12:35:26 +00:00
|
|
|
|
dir_str = data_dir(base_dir)
|
|
|
|
|
for _, dirs, _ in os.walk(dir_str):
|
2021-07-25 09:33:59 +00:00
|
|
|
|
for handle in dirs:
|
2021-12-26 18:46:43 +00:00
|
|
|
|
if not is_account_dir(handle):
|
2021-07-25 09:33:59 +00:00
|
|
|
|
continue
|
2022-12-18 15:29:54 +00:00
|
|
|
|
account_dir = acct_handle_dir(base_dir, handle)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if shares_file_type == 'shares':
|
|
|
|
|
new_share_file = account_dir + '/.newShare'
|
2021-08-09 18:41:05 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
new_share_file = account_dir + '/.newWanted'
|
|
|
|
|
if os.path.isfile(new_share_file):
|
2021-07-25 09:33:59 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
account_nickname = handle.split('@')[0]
|
2021-08-09 11:00:23 +00:00
|
|
|
|
# does this account block you?
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if account_nickname != nickname:
|
|
|
|
|
if is_blocked(base_dir, account_nickname, domain,
|
2024-02-11 13:18:16 +00:00
|
|
|
|
nickname, domain, None, block_federated):
|
2021-08-09 11:00:23 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
local_actor = \
|
|
|
|
|
local_actor_url(http_prefix, account_nickname, domain_full)
|
2021-07-25 09:33:59 +00:00
|
|
|
|
try:
|
2022-06-09 14:46:30 +00:00
|
|
|
|
with open(new_share_file, 'w+', encoding='utf-8') as fp_new:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if shares_file_type == 'shares':
|
|
|
|
|
fp_new.write(local_actor + '/tlshares')
|
2021-08-09 18:41:05 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
fp_new.write(local_actor + '/tlwanted')
|
2021-11-25 22:22:54 +00:00
|
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('EX: _indicate_new_share_available unable to write ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
str(new_share_file))
|
2021-07-25 09:33:59 +00:00
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def add_share(base_dir: str,
|
|
|
|
|
http_prefix: str, nickname: str, domain: str, port: int,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
display_name: str, summary: str, image_filename: str,
|
|
|
|
|
item_qty: float, item_type: str, item_category: str,
|
|
|
|
|
location: str, duration: str, debug: bool, city: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
price: str, currency: str,
|
|
|
|
|
system_language: str, translate: {},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type: str, low_bandwidth: bool,
|
2024-02-11 13:18:16 +00:00
|
|
|
|
content_license_url: str, share_on_profile: bool,
|
|
|
|
|
block_federated: []) -> None:
|
2020-09-22 15:55:21 +00:00
|
|
|
|
"""Adds a new share
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""
|
2021-12-29 21:55:09 +00:00
|
|
|
|
if is_filtered_globally(base_dir,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
display_name + ' ' + summary + ' ' +
|
2022-09-25 17:26:11 +00:00
|
|
|
|
item_type + ' ' + item_category,
|
|
|
|
|
system_language):
|
2021-07-28 21:12:39 +00:00
|
|
|
|
print('Shared item was filtered due to content')
|
|
|
|
|
return
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
|
|
|
|
|
shares_json = {}
|
|
|
|
|
if os.path.isfile(shares_filename):
|
2024-06-20 10:47:58 +00:00
|
|
|
|
shares_json = load_json(shares_filename)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
duration = duration.lower()
|
|
|
|
|
published = int(time.time())
|
2022-01-03 16:58:50 +00:00
|
|
|
|
duration_sec = _add_share_duration_sec(duration, published)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:19:59 +00:00
|
|
|
|
actor = local_actor_url(http_prefix, nickname, domain_full)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_id = _get_valid_shared_item_id(actor, display_name)
|
|
|
|
|
dfc_id = _getshare_dfc_id(base_dir, system_language,
|
|
|
|
|
item_type, item_category, translate,
|
|
|
|
|
http_prefix, domain_full)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2019-07-23 19:02:26 +00:00
|
|
|
|
# has an image for this share been uploaded?
|
2022-01-03 16:58:50 +00:00
|
|
|
|
image_url = None
|
|
|
|
|
move_image = False
|
2021-12-26 14:42:21 +00:00
|
|
|
|
if not image_filename:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_image_filename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/upload'
|
2021-12-26 14:26:16 +00:00
|
|
|
|
formats = get_image_extensions()
|
2020-09-22 15:59:47 +00:00
|
|
|
|
for ext in formats:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if os.path.isfile(shares_image_filename + '.' + ext):
|
|
|
|
|
image_filename = shares_image_filename + '.' + ext
|
|
|
|
|
move_image = True
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
|
domain_full = get_full_domain(domain, port)
|
2019-11-02 12:11:59 +00:00
|
|
|
|
|
2019-07-23 19:02:26 +00:00
|
|
|
|
# copy or move the image for the shared item to its destination
|
2021-12-26 14:42:21 +00:00
|
|
|
|
if image_filename:
|
|
|
|
|
if os.path.isfile(image_filename):
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if not os.path.isdir(base_dir + '/sharefiles'):
|
|
|
|
|
os.mkdir(base_dir + '/sharefiles')
|
|
|
|
|
if not os.path.isdir(base_dir + '/sharefiles/' + nickname):
|
|
|
|
|
os.mkdir(base_dir + '/sharefiles/' + nickname)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_idfile = base_dir + '/sharefiles/' + nickname + '/' + item_id
|
2021-12-26 14:26:16 +00:00
|
|
|
|
formats = get_image_extensions()
|
2020-09-22 15:59:47 +00:00
|
|
|
|
for ext in formats:
|
2021-12-26 14:42:21 +00:00
|
|
|
|
if not image_filename.endswith('.' + ext):
|
2021-07-04 18:01:31 +00:00
|
|
|
|
continue
|
2021-12-25 18:20:56 +00:00
|
|
|
|
if low_bandwidth:
|
2021-12-28 21:36:27 +00:00
|
|
|
|
convert_image_to_low_bandwidth(image_filename)
|
|
|
|
|
process_meta_data(base_dir, nickname, domain,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
image_filename, item_idfile + '.' + ext,
|
2021-12-28 21:36:27 +00:00
|
|
|
|
city, content_license_url)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if move_image:
|
2021-09-05 10:17:43 +00:00
|
|
|
|
try:
|
2021-12-26 14:42:21 +00:00
|
|
|
|
os.remove(image_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('EX: add_share unable to delete ' +
|
2021-12-26 14:42:21 +00:00
|
|
|
|
str(image_filename))
|
2022-01-03 16:58:50 +00:00
|
|
|
|
image_url = \
|
2021-12-26 10:00:46 +00:00
|
|
|
|
http_prefix + '://' + domain_full + \
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'/sharefiles/' + nickname + '/' + item_id + '.' + ext
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_json[item_id] = {
|
|
|
|
|
"displayName": display_name,
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"summary": summary,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"imageUrl": image_url,
|
|
|
|
|
"itemQty": float(item_qty),
|
|
|
|
|
"dfcId": dfc_id,
|
|
|
|
|
"itemType": item_type,
|
|
|
|
|
"category": item_category,
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"location": location,
|
|
|
|
|
"published": published,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"expire": duration_sec,
|
2021-07-27 20:26:10 +00:00
|
|
|
|
"itemPrice": price,
|
2023-08-20 16:01:05 +00:00
|
|
|
|
"itemCurrency": currency,
|
2023-08-22 18:23:25 +00:00
|
|
|
|
"shareOnProfile": share_on_profile
|
2019-07-23 12:33:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
save_json(shares_json, shares_filename)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
_indicate_new_share_available(base_dir, http_prefix,
|
|
|
|
|
nickname, domain, domain_full,
|
2024-02-11 13:18:16 +00:00
|
|
|
|
shares_file_type,
|
|
|
|
|
block_federated)
|
2019-11-02 11:15:13 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2023-08-23 16:29:55 +00:00
|
|
|
|
def expire_shares(base_dir: str, max_shares_on_profile: int,
|
|
|
|
|
person_cache: {}) -> None:
|
2019-10-17 09:58:30 +00:00
|
|
|
|
"""Removes expired items from shares
|
|
|
|
|
"""
|
2024-05-12 12:35:26 +00:00
|
|
|
|
dir_str = data_dir(base_dir)
|
|
|
|
|
for _, dirs, _ in os.walk(dir_str):
|
2019-10-17 09:58:30 +00:00
|
|
|
|
for account in dirs:
|
2021-12-26 18:46:43 +00:00
|
|
|
|
if not is_account_dir(account):
|
2019-10-17 09:58:30 +00:00
|
|
|
|
continue
|
2020-04-04 11:27:51 +00:00
|
|
|
|
nickname = account.split('@')[0]
|
|
|
|
|
domain = account.split('@')[1]
|
2023-08-23 16:29:55 +00:00
|
|
|
|
shares_list = get_shares_files_list()
|
|
|
|
|
expired_ctr = 0
|
|
|
|
|
for shares_file_type in shares_list:
|
|
|
|
|
ctr = \
|
|
|
|
|
_expire_shares_for_account(base_dir, nickname, domain,
|
|
|
|
|
shares_file_type)
|
|
|
|
|
if shares_file_type == 'shares':
|
|
|
|
|
expired_ctr = ctr
|
|
|
|
|
# have shared items been expired?
|
|
|
|
|
if expired_ctr > 0:
|
|
|
|
|
continue
|
|
|
|
|
# regenerate shared items within actor attachment
|
|
|
|
|
actor_filename = acct_dir(base_dir, nickname, domain) + '.json'
|
|
|
|
|
if not os.path.isfile(actor_filename):
|
|
|
|
|
continue
|
|
|
|
|
actor_json = load_json(actor_filename)
|
|
|
|
|
if not actor_json:
|
|
|
|
|
continue
|
|
|
|
|
if add_shares_to_actor(base_dir,
|
|
|
|
|
nickname, domain,
|
|
|
|
|
actor_json,
|
|
|
|
|
max_shares_on_profile):
|
|
|
|
|
actor = actor_json['id']
|
|
|
|
|
remove_person_from_cache(base_dir, actor,
|
|
|
|
|
person_cache)
|
|
|
|
|
store_person_in_cache(base_dir, actor,
|
|
|
|
|
actor_json,
|
|
|
|
|
person_cache, True)
|
|
|
|
|
save_json(actor_json, actor_filename)
|
2020-12-13 22:13:45 +00:00
|
|
|
|
break
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2019-10-17 09:58:30 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _expire_shares_for_account(base_dir: str, nickname: str, domain: str,
|
2023-08-23 16:29:55 +00:00
|
|
|
|
shares_file_type: str) -> int:
|
2020-09-22 16:03:31 +00:00
|
|
|
|
"""Removes expired items from shares for a particular account
|
2023-08-23 16:29:55 +00:00
|
|
|
|
Returns the number of items removed
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
handle_domain = remove_domain_port(domain)
|
|
|
|
|
handle = nickname + '@' + handle_domain
|
|
|
|
|
shares_filename = \
|
2022-12-18 15:29:54 +00:00
|
|
|
|
acct_handle_dir(base_dir, handle) + '/' + shares_file_type + '.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not os.path.isfile(shares_filename):
|
2023-08-23 16:29:55 +00:00
|
|
|
|
return 0
|
2024-06-20 10:47:58 +00:00
|
|
|
|
shares_json = load_json(shares_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not shares_json:
|
2023-08-23 16:29:55 +00:00
|
|
|
|
return 0
|
2021-12-26 13:17:46 +00:00
|
|
|
|
curr_time = int(time.time())
|
2022-01-03 16:58:50 +00:00
|
|
|
|
delete_item_id = []
|
|
|
|
|
for item_id, item in shares_json.items():
|
2021-12-26 13:17:46 +00:00
|
|
|
|
if curr_time > item['expire']:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
delete_item_id.append(item_id)
|
|
|
|
|
if not delete_item_id:
|
2023-08-23 16:29:55 +00:00
|
|
|
|
return 0
|
|
|
|
|
removed_ctr = len(delete_item_id)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for item_id in delete_item_id:
|
|
|
|
|
del shares_json[item_id]
|
2021-07-24 22:08:11 +00:00
|
|
|
|
# remove any associated images
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_idfile = base_dir + '/sharefiles/' + nickname + '/' + item_id
|
2021-12-26 14:26:16 +00:00
|
|
|
|
formats = get_image_extensions()
|
2021-07-24 22:08:11 +00:00
|
|
|
|
for ext in formats:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if os.path.isfile(item_idfile + '.' + ext):
|
2021-09-05 10:17:43 +00:00
|
|
|
|
try:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
os.remove(item_idfile + '.' + ext)
|
2021-11-25 18:42:38 +00:00
|
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('EX: _expire_shares_for_account unable to delete ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_idfile + '.' + ext)
|
|
|
|
|
save_json(shares_json, shares_filename)
|
2023-08-23 16:29:55 +00:00
|
|
|
|
return removed_ctr
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def get_shares_feed_for_person(base_dir: str,
|
|
|
|
|
domain: str, port: int,
|
|
|
|
|
path: str, http_prefix: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
shares_per_page: int) -> {}:
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""Returns the shares for an account from GET requests
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if '/' + shares_file_type not in path:
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return None
|
|
|
|
|
# handle page numbers
|
2022-01-03 16:58:50 +00:00
|
|
|
|
header_only = True
|
|
|
|
|
page_number = None
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if '?page=' in path:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
page_number = path.split('?page=')[1]
|
2022-09-08 10:34:12 +00:00
|
|
|
|
if len(page_number) > 5:
|
|
|
|
|
page_number = 1
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if page_number == 'true':
|
|
|
|
|
page_number = 1
|
2019-07-23 12:33:09 +00:00
|
|
|
|
else:
|
|
|
|
|
try:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
page_number = int(page_number)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
except BaseException:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('EX: get_shares_feed_for_person ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'unable to convert to int ' + str(page_number))
|
2020-04-04 11:27:51 +00:00
|
|
|
|
path = path.split('?page=')[0]
|
2022-01-03 16:58:50 +00:00
|
|
|
|
header_only = False
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not path.endswith('/' + shares_file_type):
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return None
|
2020-04-04 11:27:51 +00:00
|
|
|
|
nickname = None
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if path.startswith('/users/'):
|
2021-08-09 13:07:32 +00:00
|
|
|
|
nickname = \
|
2022-01-03 16:58:50 +00:00
|
|
|
|
path.replace('/users/', '', 1).replace('/' + shares_file_type, '')
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if path.startswith('/@'):
|
2023-04-23 15:55:48 +00:00
|
|
|
|
if '/@/' not in path:
|
|
|
|
|
nickname = \
|
|
|
|
|
path.replace('/@', '', 1).replace('/' + shares_file_type, '')
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if not nickname:
|
|
|
|
|
return None
|
2021-12-28 14:41:10 +00:00
|
|
|
|
if not valid_nickname(domain, nickname):
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return None
|
2019-07-24 09:53:07 +00:00
|
|
|
|
|
2021-12-26 12:45:03 +00:00
|
|
|
|
domain = get_full_domain(domain, port)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
handle_domain = remove_domain_port(domain)
|
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, handle_domain) + '/' + \
|
|
|
|
|
shares_file_type + '.json'
|
|
|
|
|
|
|
|
|
|
if header_only:
|
|
|
|
|
no_of_shares = 0
|
|
|
|
|
if os.path.isfile(shares_filename):
|
|
|
|
|
shares_json = load_json(shares_filename)
|
|
|
|
|
if shares_json:
|
|
|
|
|
no_of_shares = len(shares_json.items())
|
|
|
|
|
id_str = local_actor_url(http_prefix, nickname, domain)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
shares = {
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'first': id_str + '/' + shares_file_type + '?page=1',
|
|
|
|
|
'id': id_str + '/' + shares_file_type,
|
|
|
|
|
'totalItems': str(no_of_shares),
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
|
}
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return shares
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not page_number:
|
|
|
|
|
page_number = 1
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
next_page_number = int(page_number + 1)
|
|
|
|
|
id_str = local_actor_url(http_prefix, nickname, domain)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
shares = {
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'id': id_str + '/' + shares_file_type + '?page=' + str(page_number),
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'orderedItems': [],
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'partOf': id_str + '/' + shares_file_type,
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'totalItems': 0,
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'type': 'OrderedCollectionPage'
|
|
|
|
|
}
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not os.path.isfile(shares_filename):
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return shares
|
2022-01-03 16:58:50 +00:00
|
|
|
|
curr_page = 1
|
|
|
|
|
page_ctr = 0
|
|
|
|
|
total_ctr = 0
|
|
|
|
|
|
|
|
|
|
shares_json = load_json(shares_filename)
|
|
|
|
|
if shares_json:
|
|
|
|
|
for item_id, item in shares_json.items():
|
|
|
|
|
page_ctr += 1
|
|
|
|
|
total_ctr += 1
|
|
|
|
|
if curr_page == page_number:
|
|
|
|
|
item['shareId'] = item_id
|
2019-07-23 12:33:09 +00:00
|
|
|
|
shares['orderedItems'].append(item)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if page_ctr >= shares_per_page:
|
|
|
|
|
page_ctr = 0
|
|
|
|
|
curr_page += 1
|
|
|
|
|
shares['totalItems'] = total_ctr
|
|
|
|
|
last_page = int(total_ctr / shares_per_page)
|
2022-05-30 21:41:18 +00:00
|
|
|
|
last_page = max(last_page, 1)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if next_page_number > last_page:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
shares['next'] = \
|
2021-12-26 10:19:59 +00:00
|
|
|
|
local_actor_url(http_prefix, nickname, domain) + \
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'/' + shares_file_type + '?page=' + str(last_page)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return shares
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def send_share_via_server(base_dir, session,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_nickname: str, password: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain: str, from_port: int,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
http_prefix: str, display_name: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
summary: str, image_filename: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_qty: float, item_type: str, item_category: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
location: str, duration: str,
|
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
|
debug: bool, project_version: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
item_price: str, item_currency: str,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
|
system_language: str) -> {}:
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"""Creates an item share via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('WARN: No session for send_share_via_server')
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 6
|
|
|
|
|
|
2021-08-07 17:25:07 +00:00
|
|
|
|
# convert $4.23 to 4.23 USD
|
2022-05-30 21:41:18 +00:00
|
|
|
|
new_item_price, new_item_currency = get_price_from_string(item_price)
|
|
|
|
|
if new_item_price != item_price:
|
|
|
|
|
item_price = new_item_price
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not item_currency:
|
|
|
|
|
if new_item_currency != item_currency:
|
|
|
|
|
item_currency = new_item_currency
|
2021-08-07 17:25:07 +00:00
|
|
|
|
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain_full = get_full_domain(from_domain, from_port)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
|
|
|
|
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
|
|
|
|
cc_url = actor + '/followers'
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
new_share_json = {
|
2019-08-18 11:07:06 +00:00
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
2019-07-23 19:02:26 +00:00
|
|
|
|
'type': 'Add',
|
2020-04-04 11:27:51 +00:00
|
|
|
|
'actor': actor,
|
2021-06-22 12:29:17 +00:00
|
|
|
|
'target': actor + '/shares',
|
2019-07-23 19:02:26 +00:00
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"displayName": display_name,
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"summary": summary,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"itemQty": float(item_qty),
|
|
|
|
|
"itemType": item_type,
|
|
|
|
|
"category": item_category,
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"location": location,
|
|
|
|
|
"duration": duration,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
"itemPrice": item_price,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"itemCurrency": item_currency,
|
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2019-07-23 19:02:26 +00:00
|
|
|
|
},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2019-07-23 19:02:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2022-01-02 14:51:02 +00:00
|
|
|
|
wf_request = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
webfinger_handle(session, handle, http_prefix,
|
|
|
|
|
cached_webfingers,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_domain, project_version, debug, False,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
signing_priv_key_pem)
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not wf_request:
|
2019-07-23 19:02:26 +00:00
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: share webfinger failed for ' + handle)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 1
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not isinstance(wf_request, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('WARN: share webfinger for ' + handle +
|
2022-01-02 14:51:02 +00:00
|
|
|
|
' did not return a dict. ' + str(wf_request))
|
2020-06-23 10:41:12 +00:00
|
|
|
|
return 1
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_to_box = 'outbox'
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2022-01-03 16:58:50 +00:00
|
|
|
|
origin_domain = from_domain
|
|
|
|
|
(inbox_url, _, _, from_person_id, _, _,
|
|
|
|
|
display_name, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
|
origin_domain,
|
|
|
|
|
base_dir, session, wf_request,
|
|
|
|
|
person_cache, project_version,
|
|
|
|
|
http_prefix, from_nickname,
|
|
|
|
|
from_domain, post_to_box,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
83653, system_language)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
|
|
|
|
|
if not inbox_url:
|
2019-07-23 19:02:26 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: share no ' + post_to_box +
|
2021-03-18 10:01:01 +00:00
|
|
|
|
' was found for ' + handle)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 3
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not from_person_id:
|
2019-07-23 19:02:26 +00:00
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: share no actor was found for ' + handle)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 4
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
auth_header = create_basic_auth_header(from_nickname, password)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2021-12-26 14:42:21 +00:00
|
|
|
|
if image_filename:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
headers = {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'host': from_domain,
|
|
|
|
|
'Authorization': auth_header
|
2020-03-22 20:36:19 +00:00
|
|
|
|
}
|
2022-02-28 11:55:36 +00:00
|
|
|
|
inbox_url_str = inbox_url.replace('/' + post_to_box, '/shares')
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_result = \
|
2022-02-28 11:55:36 +00:00
|
|
|
|
post_image(session, image_filename, [], inbox_url_str,
|
|
|
|
|
headers, http_prefix, from_domain_full)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
headers = {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'host': from_domain,
|
2020-04-04 11:27:51 +00:00
|
|
|
|
'Content-type': 'application/json',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'Authorization': auth_header
|
2020-03-22 20:36:19 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_result = \
|
|
|
|
|
post_json(http_prefix, from_domain_full,
|
|
|
|
|
session, new_share_json, [], inbox_url, headers, 30, True)
|
|
|
|
|
if not post_result:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: POST share failed for c2s to ' + inbox_url)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
# return 5
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
if debug:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
print('DEBUG: c2s POST share item success')
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return new_share_json
|
2019-07-23 20:00:17 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def send_undo_share_via_server(base_dir: str, session,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_nickname: str, password: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain: str, from_port: int,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
http_prefix: str, display_name: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
|
debug: bool, project_version: str,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
|
system_language: str) -> {}:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
"""Undoes a share via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('WARN: No session for send_undo_share_via_server')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 6
|
|
|
|
|
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain_full = get_full_domain(from_domain, from_port)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
|
|
|
|
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
|
|
|
|
cc_url = actor + '/followers'
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
undo_share_json = {
|
2019-08-18 11:07:06 +00:00
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
2019-07-23 21:14:16 +00:00
|
|
|
|
'type': 'Remove',
|
2020-04-04 11:27:51 +00:00
|
|
|
|
'actor': actor,
|
|
|
|
|
'target': actor + '/shares',
|
2019-07-23 21:14:16 +00:00
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"displayName": display_name,
|
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2019-07-23 21:14:16 +00:00
|
|
|
|
},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2019-07-23 21:14:16 +00:00
|
|
|
|
}
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2022-01-02 14:51:02 +00:00
|
|
|
|
wf_request = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
webfinger_handle(session, handle, http_prefix, cached_webfingers,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_domain, project_version, debug, False,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
signing_priv_key_pem)
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not wf_request:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: unshare webfinger failed for ' + handle)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 1
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not isinstance(wf_request, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('WARN: unshare webfinger for ' + handle +
|
2022-01-02 14:51:02 +00:00
|
|
|
|
' did not return a dict. ' + str(wf_request))
|
2020-06-23 10:41:12 +00:00
|
|
|
|
return 1
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_to_box = 'outbox'
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2022-01-03 16:58:50 +00:00
|
|
|
|
origin_domain = from_domain
|
|
|
|
|
(inbox_url, _, _, from_person_id, _, _,
|
|
|
|
|
display_name, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
|
origin_domain,
|
|
|
|
|
base_dir, session, wf_request,
|
|
|
|
|
person_cache, project_version,
|
|
|
|
|
http_prefix, from_nickname,
|
|
|
|
|
from_domain, post_to_box,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
12663, system_language)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
|
|
|
|
|
if not inbox_url:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: unshare no ' + post_to_box +
|
2021-03-18 10:01:01 +00:00
|
|
|
|
' was found for ' + handle)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 3
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not from_person_id:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: unshare no actor was found for ' + handle)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 4
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
auth_header = create_basic_auth_header(from_nickname, password)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
headers = {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'host': from_domain,
|
2020-04-04 11:27:51 +00:00
|
|
|
|
'Content-type': 'application/json',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'Authorization': auth_header
|
2020-03-22 20:36:19 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_result = \
|
|
|
|
|
post_json(http_prefix, from_domain_full,
|
|
|
|
|
session, undo_share_json, [], inbox_url,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
headers, 30, True)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not post_result:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: POST unshare failed for c2s to ' + inbox_url)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
# return 5
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: c2s POST unshare success')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return undo_share_json
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def send_wanted_via_server(base_dir, session,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_nickname: str, password: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain: str, from_port: int,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
http_prefix: str, display_name: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
summary: str, image_filename: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_qty: float, item_type: str, item_category: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
location: str, duration: str,
|
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
|
debug: bool, project_version: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
item_max_price: str, item_currency: str,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
|
system_language: str) -> {}:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"""Creates a wanted item via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('WARN: No session for send_wanted_via_server')
|
2021-08-09 19:37:18 +00:00
|
|
|
|
return 6
|
|
|
|
|
|
|
|
|
|
# convert $4.23 to 4.23 USD
|
2022-05-30 21:41:18 +00:00
|
|
|
|
new_item_max_price, new_item_currency = \
|
|
|
|
|
get_price_from_string(item_max_price)
|
|
|
|
|
if new_item_max_price != item_max_price:
|
|
|
|
|
item_max_price = new_item_max_price
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not item_currency:
|
|
|
|
|
if new_item_currency != item_currency:
|
|
|
|
|
item_currency = new_item_currency
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain_full = get_full_domain(from_domain, from_port)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
|
|
|
|
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
|
|
|
|
cc_url = actor + '/followers'
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
new_share_json = {
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
|
'type': 'Add',
|
|
|
|
|
'actor': actor,
|
|
|
|
|
'target': actor + '/wanted',
|
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"displayName": display_name,
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"summary": summary,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"itemQty": float(item_qty),
|
|
|
|
|
"itemType": item_type,
|
|
|
|
|
"category": item_category,
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"location": location,
|
|
|
|
|
"duration": duration,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
"itemPrice": item_max_price,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"itemCurrency": item_currency,
|
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2021-08-09 19:37:18 +00:00
|
|
|
|
},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2021-08-09 19:37:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2022-01-02 14:51:02 +00:00
|
|
|
|
wf_request = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
webfinger_handle(session, handle, http_prefix,
|
|
|
|
|
cached_webfingers,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_domain, project_version, debug, False,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
signing_priv_key_pem)
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not wf_request:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: share webfinger failed for ' + handle)
|
|
|
|
|
return 1
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not isinstance(wf_request, dict):
|
2021-08-09 19:37:18 +00:00
|
|
|
|
print('WARN: wanted webfinger for ' + handle +
|
2022-01-02 14:51:02 +00:00
|
|
|
|
' did not return a dict. ' + str(wf_request))
|
2021-08-09 19:37:18 +00:00
|
|
|
|
return 1
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_to_box = 'outbox'
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2022-01-03 16:58:50 +00:00
|
|
|
|
origin_domain = from_domain
|
|
|
|
|
(inbox_url, _, _, from_person_id, _, _,
|
|
|
|
|
display_name, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
|
origin_domain,
|
|
|
|
|
base_dir, session, wf_request,
|
|
|
|
|
person_cache, project_version,
|
|
|
|
|
http_prefix, from_nickname,
|
|
|
|
|
from_domain, post_to_box,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
23653, system_language)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
|
|
|
|
|
if not inbox_url:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: wanted no ' + post_to_box +
|
2021-08-09 19:37:18 +00:00
|
|
|
|
' was found for ' + handle)
|
|
|
|
|
return 3
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not from_person_id:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: wanted no actor was found for ' + handle)
|
|
|
|
|
return 4
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
auth_header = create_basic_auth_header(from_nickname, password)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
2021-12-26 14:42:21 +00:00
|
|
|
|
if image_filename:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
headers = {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'host': from_domain,
|
|
|
|
|
'Authorization': auth_header
|
2021-08-09 19:37:18 +00:00
|
|
|
|
}
|
2022-02-28 11:55:36 +00:00
|
|
|
|
inbox_url_str = inbox_url.replace('/' + post_to_box, '/wanted')
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_result = \
|
2022-02-28 11:55:36 +00:00
|
|
|
|
post_image(session, image_filename, [], inbox_url_str,
|
|
|
|
|
headers, http_prefix, from_domain_full)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
headers = {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'host': from_domain,
|
2021-08-09 19:37:18 +00:00
|
|
|
|
'Content-type': 'application/json',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'Authorization': auth_header
|
2021-08-09 19:37:18 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_result = \
|
|
|
|
|
post_json(http_prefix, from_domain_full,
|
|
|
|
|
session, new_share_json, [], inbox_url, headers, 30, True)
|
|
|
|
|
if not post_result:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: POST wanted failed for c2s to ' + inbox_url)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s POST wanted item success')
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return new_share_json
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def send_undo_wanted_via_server(base_dir: str, session,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_nickname: str, password: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain: str, from_port: int,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
http_prefix: str, display_name: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
cached_webfingers: {}, person_cache: {},
|
|
|
|
|
debug: bool, project_version: str,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
signing_priv_key_pem: str,
|
|
|
|
|
system_language: str) -> {}:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"""Undoes a wanted item via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('WARN: No session for send_undo_wanted_via_server')
|
2021-08-09 19:37:18 +00:00
|
|
|
|
return 6
|
|
|
|
|
|
2022-05-30 21:41:18 +00:00
|
|
|
|
from_domain_full = get_full_domain(from_domain, from_port)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
|
|
|
|
|
to_url = 'https://www.w3.org/ns/activitystreams#Public'
|
|
|
|
|
cc_url = actor + '/followers'
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
undo_share_json = {
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
|
'type': 'Remove',
|
|
|
|
|
'actor': actor,
|
|
|
|
|
'target': actor + '/wanted',
|
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"displayName": display_name,
|
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2021-08-09 19:37:18 +00:00
|
|
|
|
},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'to': [to_url],
|
|
|
|
|
'cc': [cc_url]
|
2021-08-09 19:37:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2022-01-02 14:51:02 +00:00
|
|
|
|
wf_request = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
webfinger_handle(session, handle, http_prefix, cached_webfingers,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
from_domain, project_version, debug, False,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
signing_priv_key_pem)
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not wf_request:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: unwant webfinger failed for ' + handle)
|
|
|
|
|
return 1
|
2022-01-02 14:51:02 +00:00
|
|
|
|
if not isinstance(wf_request, dict):
|
2021-08-09 19:37:18 +00:00
|
|
|
|
print('WARN: unwant webfinger for ' + handle +
|
2022-01-02 14:51:02 +00:00
|
|
|
|
' did not return a dict. ' + str(wf_request))
|
2021-08-09 19:37:18 +00:00
|
|
|
|
return 1
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_to_box = 'outbox'
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2022-01-03 16:58:50 +00:00
|
|
|
|
origin_domain = from_domain
|
|
|
|
|
(inbox_url, _, _, from_person_id, _, _,
|
|
|
|
|
display_name, _) = get_person_box(signing_priv_key_pem,
|
|
|
|
|
origin_domain,
|
|
|
|
|
base_dir, session, wf_request,
|
|
|
|
|
person_cache, project_version,
|
|
|
|
|
http_prefix, from_nickname,
|
|
|
|
|
from_domain, post_to_box,
|
2023-10-25 19:55:40 +00:00
|
|
|
|
12693, system_language)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
|
|
|
|
|
if not inbox_url:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: unwant no ' + post_to_box +
|
2021-08-09 19:37:18 +00:00
|
|
|
|
' was found for ' + handle)
|
|
|
|
|
return 3
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not from_person_id:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: unwant no actor was found for ' + handle)
|
|
|
|
|
return 4
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
auth_header = create_basic_auth_header(from_nickname, password)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
headers = {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'host': from_domain,
|
2021-08-09 19:37:18 +00:00
|
|
|
|
'Content-type': 'application/json',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'Authorization': auth_header
|
2021-08-09 19:37:18 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
post_result = \
|
|
|
|
|
post_json(http_prefix, from_domain_full,
|
|
|
|
|
session, undo_share_json, [], inbox_url,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
headers, 30, True)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not post_result:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('DEBUG: POST unwant failed for c2s to ' + inbox_url)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s POST unwant success')
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return undo_share_json
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
|
2022-06-12 22:14:47 +00:00
|
|
|
|
def get_shared_items_catalog_via_server(session, nickname: str, password: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
domain: str, port: int,
|
|
|
|
|
http_prefix: str, debug: bool,
|
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
2021-08-04 12:04:35 +00:00
|
|
|
|
"""Returns the shared items catalog via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
|
print('WARN: No session for get_shared_items_catalog_via_server')
|
2021-08-04 12:04:35 +00:00
|
|
|
|
return 6
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
auth_header = create_basic_auth_header(nickname, password)
|
2021-08-04 12:04:35 +00:00
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': domain,
|
|
|
|
|
'Content-type': 'application/json',
|
2022-01-03 16:58:50 +00:00
|
|
|
|
'Authorization': auth_header,
|
2021-08-04 12:04:35 +00:00
|
|
|
|
'Accept': 'application/json'
|
|
|
|
|
}
|
2021-12-26 12:45:03 +00:00
|
|
|
|
domain_full = get_full_domain(domain, port)
|
2021-12-26 10:19:59 +00:00
|
|
|
|
url = local_actor_url(http_prefix, nickname, domain_full) + '/catalog'
|
2021-08-04 12:04:35 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Shared items catalog request to: ' + url)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
catalog_json = get_json(signing_priv_key_pem, session, url, headers, None,
|
|
|
|
|
debug, __version__, http_prefix, None)
|
2023-08-13 09:58:02 +00:00
|
|
|
|
if not get_json_valid(catalog_json):
|
2021-08-04 12:04:35 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: GET shared items catalog failed for c2s to ' + url)
|
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s GET shared items catalog success')
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return catalog_json
|
2021-08-04 12:04:35 +00:00
|
|
|
|
|
|
|
|
|
|
2023-06-27 16:41:33 +00:00
|
|
|
|
def get_offers_via_server(session, nickname: str, password: str,
|
|
|
|
|
domain: str, port: int,
|
|
|
|
|
http_prefix: str, debug: bool,
|
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
|
|
|
|
"""Returns the offers collection for shared items via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for get_offers_via_server')
|
|
|
|
|
return 6
|
|
|
|
|
|
|
|
|
|
auth_header = create_basic_auth_header(nickname, password)
|
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': domain,
|
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
|
'Authorization': auth_header,
|
|
|
|
|
'Accept': 'application/json'
|
|
|
|
|
}
|
|
|
|
|
domain_full = get_full_domain(domain, port)
|
|
|
|
|
url = local_actor_url(http_prefix, nickname, domain_full) + '/offers'
|
|
|
|
|
if debug:
|
|
|
|
|
print('Offers collection request to: ' + url)
|
|
|
|
|
offers_json = get_json(signing_priv_key_pem, session, url, headers, None,
|
|
|
|
|
debug, __version__, http_prefix, None)
|
2023-08-13 09:58:02 +00:00
|
|
|
|
if not get_json_valid(offers_json):
|
2023-06-27 16:41:33 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: GET offers collection failed for c2s to ' + url)
|
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s GET offers collection success')
|
|
|
|
|
|
|
|
|
|
return offers_json
|
|
|
|
|
|
|
|
|
|
|
2023-06-27 16:58:15 +00:00
|
|
|
|
def get_wanted_via_server(session, nickname: str, password: str,
|
|
|
|
|
domain: str, port: int,
|
|
|
|
|
http_prefix: str, debug: bool,
|
|
|
|
|
signing_priv_key_pem: str) -> {}:
|
|
|
|
|
"""Returns the wanted collection for shared items via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for get_wanted_via_server')
|
|
|
|
|
return 6
|
|
|
|
|
|
|
|
|
|
auth_header = create_basic_auth_header(nickname, password)
|
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': domain,
|
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
|
'Authorization': auth_header,
|
|
|
|
|
'Accept': 'application/json'
|
|
|
|
|
}
|
|
|
|
|
domain_full = get_full_domain(domain, port)
|
|
|
|
|
url = local_actor_url(http_prefix, nickname, domain_full) + '/wanted'
|
|
|
|
|
if debug:
|
|
|
|
|
print('Wanted collection request to: ' + url)
|
|
|
|
|
wanted_json = get_json(signing_priv_key_pem, session, url, headers, None,
|
|
|
|
|
debug, __version__, http_prefix, None)
|
2023-08-13 09:58:02 +00:00
|
|
|
|
if not get_json_valid(wanted_json):
|
2023-06-27 16:58:15 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: GET wanted collection failed for c2s to ' + url)
|
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s GET wanted collection success')
|
|
|
|
|
|
|
|
|
|
return wanted_json
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def outbox_share_upload(base_dir: str, http_prefix: str,
|
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
|
message_json: {}, debug: bool, city: str,
|
|
|
|
|
system_language: str, translate: {},
|
|
|
|
|
low_bandwidth: bool,
|
2024-02-11 13:18:16 +00:00
|
|
|
|
content_license_url: str,
|
|
|
|
|
block_federated: []) -> None:
|
2019-07-23 20:00:17 +00:00
|
|
|
|
""" When a shared item is received by the outbox from c2s
|
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json.get('type'):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['type'] == 'Add':
|
2019-07-23 20:00:17 +00:00
|
|
|
|
return
|
2022-04-09 15:11:22 +00:00
|
|
|
|
if not has_object_string_type(message_json, debug):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object']['type'] == 'Offer':
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: not an Offer activity')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('displayName'):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: displayName missing from Offer')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('summary'):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: summary missing from Offer')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('itemQty'):
|
2021-07-24 11:30:46 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: itemQty missing from Offer')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('itemType'):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: itemType missing from Offer')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('category'):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: category missing from Offer')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('duration'):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: duration missing from Offer')
|
|
|
|
|
return
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_qty = float(message_json['object']['itemQty'])
|
2021-07-28 21:17:02 +00:00
|
|
|
|
location = ''
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if message_json['object'].get('location'):
|
|
|
|
|
location = message_json['object']['location']
|
2021-12-26 14:42:21 +00:00
|
|
|
|
image_filename = None
|
|
|
|
|
if message_json['object'].get('image_filename'):
|
|
|
|
|
image_filename = message_json['object']['image_filename']
|
2021-08-04 12:04:35 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Adding shared item')
|
2021-12-25 23:51:19 +00:00
|
|
|
|
pprint(message_json)
|
2021-08-04 17:14:23 +00:00
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
add_share(base_dir,
|
|
|
|
|
http_prefix, nickname, domain, port,
|
|
|
|
|
message_json['object']['displayName'],
|
|
|
|
|
message_json['object']['summary'],
|
|
|
|
|
image_filename,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_qty,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
message_json['object']['itemType'],
|
|
|
|
|
message_json['object']['category'],
|
|
|
|
|
location,
|
|
|
|
|
message_json['object']['duration'],
|
|
|
|
|
debug, city,
|
|
|
|
|
message_json['object']['itemPrice'],
|
|
|
|
|
message_json['object']['itemCurrency'],
|
|
|
|
|
system_language, translate, 'shares',
|
2023-08-20 16:01:05 +00:00
|
|
|
|
low_bandwidth, content_license_url,
|
2024-02-11 13:18:16 +00:00
|
|
|
|
False, block_federated)
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item received via c2s')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2022-06-12 22:14:47 +00:00
|
|
|
|
def outbox_undo_share_upload(base_dir: str, nickname: str, domain: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
message_json: {}, debug: bool) -> None:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
""" When a shared item is removed via c2s
|
|
|
|
|
"""
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json.get('type'):
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['type'] == 'Remove':
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return
|
2022-04-09 15:11:22 +00:00
|
|
|
|
if not has_object_string_type(message_json, debug):
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object']['type'] == 'Offer':
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: not an Offer activity')
|
|
|
|
|
return
|
2021-12-25 23:51:19 +00:00
|
|
|
|
if not message_json['object'].get('displayName'):
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: displayName missing from Offer')
|
|
|
|
|
return
|
2024-03-01 17:10:04 +00:00
|
|
|
|
remove_shared_item2(base_dir, nickname, domain,
|
|
|
|
|
message_json['object']['displayName'],
|
|
|
|
|
'shares')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item removed via c2s')
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _shares_catalog_params(path: str) -> (bool, float, float, str):
|
2021-07-25 13:09:39 +00:00
|
|
|
|
"""Returns parameters when accessing the shares catalog
|
|
|
|
|
"""
|
|
|
|
|
today = False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
min_price = 0
|
|
|
|
|
max_price = 9999999
|
|
|
|
|
match_pattern = None
|
2021-07-25 13:09:39 +00:00
|
|
|
|
if '?' not in path:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return today, min_price, max_price, match_pattern
|
2021-07-25 13:09:39 +00:00
|
|
|
|
args = path.split('?', 1)[1]
|
2022-01-03 16:58:50 +00:00
|
|
|
|
arg_list = args.split(';')
|
|
|
|
|
for arg in arg_list:
|
2021-07-25 13:09:39 +00:00
|
|
|
|
if '=' not in arg:
|
|
|
|
|
continue
|
|
|
|
|
key = arg.split('=')[0].lower()
|
|
|
|
|
value = arg.split('=')[1]
|
|
|
|
|
if key == 'today':
|
|
|
|
|
value = value.lower()
|
2021-07-25 13:50:03 +00:00
|
|
|
|
if 't' in value or 'y' in value or '1' in value:
|
2021-07-25 13:09:39 +00:00
|
|
|
|
today = True
|
|
|
|
|
elif key.startswith('min'):
|
2021-12-26 18:03:39 +00:00
|
|
|
|
if is_float(value):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
min_price = float(value)
|
2021-07-25 13:09:39 +00:00
|
|
|
|
elif key.startswith('max'):
|
2021-12-26 18:03:39 +00:00
|
|
|
|
if is_float(value):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
max_price = float(value)
|
2021-07-25 13:09:39 +00:00
|
|
|
|
elif key.startswith('match'):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
match_pattern = value
|
|
|
|
|
return today, min_price, max_price, match_pattern
|
2021-07-25 13:09:39 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def shares_catalog_account_endpoint(base_dir: str, http_prefix: str,
|
|
|
|
|
nickname: str, domain: str,
|
|
|
|
|
domain_full: str,
|
|
|
|
|
path: str, debug: bool,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type: str) -> {}:
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""Returns the endpoint for the shares catalog of a particular account
|
2021-07-24 22:12:26 +00:00
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
2021-09-12 16:09:31 +00:00
|
|
|
|
Also the subdirectory ontology/DFC
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
today, min_price, max_price, match_pattern = _shares_catalog_params(path)
|
|
|
|
|
dfc_url = \
|
2021-12-26 10:00:46 +00:00
|
|
|
|
http_prefix + '://' + domain_full + '/ontologies/DFC_FullModel.owl#'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_pt_url = \
|
2021-12-26 10:00:46 +00:00
|
|
|
|
http_prefix + '://' + domain_full + \
|
2021-09-12 16:31:18 +00:00
|
|
|
|
'/ontologies/DFC_ProductGlossary.rdf#'
|
2021-12-26 10:19:59 +00:00
|
|
|
|
owner = local_actor_url(http_prefix, nickname, domain_full)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if shares_file_type == 'shares':
|
|
|
|
|
dfc_instance_id = owner + '/catalog'
|
2021-08-09 13:07:32 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_instance_id = owner + '/wantedItems'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
endpoint = {
|
|
|
|
|
"@context": {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC": dfc_url,
|
|
|
|
|
"dfc-pt": dfc_pt_url,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"@base": "http://maPlateformeNationale"
|
|
|
|
|
},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"@id": dfc_instance_id,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"@type": "DFC:Entreprise",
|
|
|
|
|
"DFC:supplies": []
|
|
|
|
|
}
|
|
|
|
|
|
2023-11-20 22:27:58 +00:00
|
|
|
|
curr_date = date_utcnow()
|
2022-01-03 16:58:50 +00:00
|
|
|
|
curr_date_str = curr_date.strftime("%Y-%m-%d")
|
2021-07-25 09:56:57 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
|
|
|
|
|
if not os.path.isfile(shares_filename):
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print(shares_file_type + '.json file not found: ' +
|
|
|
|
|
shares_filename)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
return endpoint
|
2024-06-20 10:47:58 +00:00
|
|
|
|
shares_json = load_json(shares_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not shares_json:
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('Unable to load json for ' + shares_filename)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
return endpoint
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for item_id, item in shares_json.items():
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not item.get('dfcId'):
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('Item does not have dfcId: ' + item_id)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
continue
|
2021-07-25 09:33:59 +00:00
|
|
|
|
if '#' not in item['dfcId']:
|
|
|
|
|
continue
|
2021-07-25 09:56:57 +00:00
|
|
|
|
if today:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not item['published'].startswith(curr_date_str):
|
2021-07-25 09:56:57 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if min_price is not None:
|
|
|
|
|
if float(item['itemPrice']) < min_price:
|
2021-07-25 10:17:39 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if max_price is not None:
|
|
|
|
|
if float(item['itemPrice']) > max_price:
|
2021-07-25 10:17:39 +00:00
|
|
|
|
continue
|
|
|
|
|
description = item['displayName'] + ': ' + item['summary']
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if match_pattern:
|
|
|
|
|
if not re.match(match_pattern, description):
|
2021-07-25 10:17:39 +00:00
|
|
|
|
continue
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
2023-11-20 22:27:58 +00:00
|
|
|
|
expire_date = datetime.datetime.fromtimestamp(item['expire'],
|
|
|
|
|
datetime.timezone.utc)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
expire_date_str = expire_date.strftime("%Y-%m-%dT%H:%M:%SZ")
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
share_id = _get_valid_shared_item_id(owner, item['displayName'])
|
2021-07-27 21:59:49 +00:00
|
|
|
|
if item['dfcId'].startswith('epicyon#'):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_id = "epicyon:" + item['dfcId'].split('#')[1]
|
2021-07-27 21:59:49 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_id = "dfc-pt:" + item['dfcId'].split('#')[1]
|
|
|
|
|
price_str = item['itemPrice'] + ' ' + item['itemCurrency']
|
|
|
|
|
catalog_item = {
|
|
|
|
|
"@id": share_id,
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"@type": "DFC:SuppliedProduct",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC:hasType": dfc_id,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:startDate": item['published'],
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC:expiryDate": expire_date_str,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"DFC:quantity": float(item['itemQty']),
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC:price": price_str,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:Image": item['imageUrl'],
|
2021-07-25 10:17:39 +00:00
|
|
|
|
"DFC:description": description
|
2021-07-24 22:08:11 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
endpoint['DFC:supplies'].append(catalog_item)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
|
|
|
|
return endpoint
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def shares_catalog_endpoint(base_dir: str, http_prefix: str,
|
|
|
|
|
domain_full: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
path: str, shares_file_type: str) -> {}:
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""Returns the endpoint for the shares catalog for the instance
|
2021-07-24 22:12:26 +00:00
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
2021-09-12 16:09:31 +00:00
|
|
|
|
Also the subdirectory ontology/DFC
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
today, min_price, max_price, match_pattern = _shares_catalog_params(path)
|
|
|
|
|
dfc_url = \
|
2021-12-26 10:00:46 +00:00
|
|
|
|
http_prefix + '://' + domain_full + '/ontologies/DFC_FullModel.owl#'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_pt_url = \
|
2021-12-26 10:00:46 +00:00
|
|
|
|
http_prefix + '://' + domain_full + \
|
2021-09-12 16:31:18 +00:00
|
|
|
|
'/ontologies/DFC_ProductGlossary.rdf#'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_instance_id = http_prefix + '://' + domain_full + '/catalog'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
endpoint = {
|
|
|
|
|
"@context": {
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC": dfc_url,
|
|
|
|
|
"dfc-pt": dfc_pt_url,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"@base": "http://maPlateformeNationale"
|
|
|
|
|
},
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"@id": dfc_instance_id,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"@type": "DFC:Entreprise",
|
|
|
|
|
"DFC:supplies": []
|
|
|
|
|
}
|
|
|
|
|
|
2023-11-20 22:27:58 +00:00
|
|
|
|
curr_date = date_utcnow()
|
2022-01-03 16:58:50 +00:00
|
|
|
|
curr_date_str = curr_date.strftime("%Y-%m-%d")
|
2021-07-25 09:56:57 +00:00
|
|
|
|
|
2024-05-12 12:35:26 +00:00
|
|
|
|
dir_str = data_dir(base_dir)
|
|
|
|
|
for _, dirs, _ in os.walk(dir_str):
|
2021-07-24 22:08:11 +00:00
|
|
|
|
for acct in dirs:
|
2021-12-26 18:46:43 +00:00
|
|
|
|
if not is_account_dir(acct):
|
2021-07-24 22:08:11 +00:00
|
|
|
|
continue
|
|
|
|
|
nickname = acct.split('@')[0]
|
|
|
|
|
domain = acct.split('@')[1]
|
2021-12-26 10:19:59 +00:00
|
|
|
|
owner = local_actor_url(http_prefix, nickname, domain_full)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_filename = \
|
2021-12-26 12:02:29 +00:00
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + \
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type + '.json'
|
|
|
|
|
if not os.path.isfile(shares_filename):
|
2021-07-24 22:08:11 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('Test 78363 ' + shares_filename)
|
2024-06-20 10:47:58 +00:00
|
|
|
|
shares_json = load_json(shares_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not shares_json:
|
2021-07-24 22:08:11 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for _, item in shares_json.items():
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not item.get('dfcId'):
|
|
|
|
|
continue
|
2021-07-25 09:33:59 +00:00
|
|
|
|
if '#' not in item['dfcId']:
|
|
|
|
|
continue
|
2021-07-25 09:56:57 +00:00
|
|
|
|
if today:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not item['published'].startswith(curr_date_str):
|
2021-07-25 09:56:57 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if min_price is not None:
|
|
|
|
|
if float(item['itemPrice']) < min_price:
|
2021-07-25 10:17:39 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if max_price is not None:
|
|
|
|
|
if float(item['itemPrice']) > max_price:
|
2021-07-25 10:17:39 +00:00
|
|
|
|
continue
|
|
|
|
|
description = item['displayName'] + ': ' + item['summary']
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if match_pattern:
|
|
|
|
|
if not re.match(match_pattern, description):
|
2021-07-25 10:17:39 +00:00
|
|
|
|
continue
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
start_date_str = date_seconds_to_string(item['published'])
|
|
|
|
|
expire_date_str = date_seconds_to_string(item['expire'])
|
|
|
|
|
share_id = \
|
|
|
|
|
_get_valid_shared_item_id(owner, item['displayName'])
|
2021-07-27 21:59:49 +00:00
|
|
|
|
if item['dfcId'].startswith('epicyon#'):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_id = "epicyon:" + item['dfcId'].split('#')[1]
|
2021-07-27 21:59:49 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_id = "dfc-pt:" + item['dfcId'].split('#')[1]
|
|
|
|
|
price_str = item['itemPrice'] + ' ' + item['itemCurrency']
|
|
|
|
|
catalog_item = {
|
|
|
|
|
"@id": share_id,
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"@type": "DFC:SuppliedProduct",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC:hasType": dfc_id,
|
|
|
|
|
"DFC:startDate": start_date_str,
|
|
|
|
|
"DFC:expiryDate": expire_date_str,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"DFC:quantity": float(item['itemQty']),
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"DFC:price": price_str,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:Image": item['imageUrl'],
|
|
|
|
|
"DFC:description": description
|
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
endpoint['DFC:supplies'].append(catalog_item)
|
2022-07-22 09:58:42 +00:00
|
|
|
|
break
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
|
|
|
|
return endpoint
|
2021-07-25 13:30:42 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def shares_catalog_csv_endpoint(base_dir: str, http_prefix: str,
|
|
|
|
|
domain_full: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
path: str, shares_file_type: str) -> str:
|
2021-07-25 13:30:42 +00:00
|
|
|
|
"""Returns a CSV version of the shares catalog
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
catalog_json = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
shares_catalog_endpoint(base_dir, http_prefix, domain_full, path,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_file_type)
|
|
|
|
|
if not catalog_json:
|
2021-07-25 13:30:42 +00:00
|
|
|
|
return ''
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not catalog_json.get('DFC:supplies'):
|
2021-07-25 13:30:42 +00:00
|
|
|
|
return ''
|
2022-01-03 16:58:50 +00:00
|
|
|
|
csv_str = \
|
2021-07-25 13:30:42 +00:00
|
|
|
|
'id,type,hasType,startDate,expiryDate,' + \
|
2021-07-28 10:04:47 +00:00
|
|
|
|
'quantity,price,currency,Image,description,\n'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for item in catalog_json['DFC:supplies']:
|
|
|
|
|
csv_str += '"' + item['@id'] + '",'
|
|
|
|
|
csv_str += '"' + item['@type'] + '",'
|
|
|
|
|
csv_str += '"' + item['DFC:hasType'] + '",'
|
|
|
|
|
csv_str += '"' + item['DFC:startDate'] + '",'
|
|
|
|
|
csv_str += '"' + item['DFC:expiryDate'] + '",'
|
|
|
|
|
csv_str += str(item['DFC:quantity']) + ','
|
|
|
|
|
csv_str += item['DFC:price'].split(' ')[0] + ','
|
|
|
|
|
csv_str += '"' + item['DFC:price'].split(' ')[1] + '",'
|
2021-09-19 14:34:54 +00:00
|
|
|
|
if item.get('DFC:Image'):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
csv_str += '"' + item['DFC:Image'] + '",'
|
2021-07-28 09:50:27 +00:00
|
|
|
|
description = item['DFC:description'].replace('"', "'")
|
2022-01-03 16:58:50 +00:00
|
|
|
|
csv_str += '"' + description + '",\n'
|
|
|
|
|
return csv_str
|
2021-07-26 09:40:51 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def generate_shared_item_federation_tokens(shared_items_federated_domains: [],
|
|
|
|
|
base_dir: str) -> {}:
|
2021-07-26 09:40:51 +00:00
|
|
|
|
"""Generates tokens for shared item federated domains
|
|
|
|
|
"""
|
2021-12-25 18:05:01 +00:00
|
|
|
|
if not shared_items_federated_domains:
|
2021-07-26 12:20:07 +00:00
|
|
|
|
return {}
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_json = {}
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_filename = \
|
2024-05-12 12:35:26 +00:00
|
|
|
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if os.path.isfile(tokens_filename):
|
2024-06-20 10:47:58 +00:00
|
|
|
|
tokens_json = load_json(tokens_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if tokens_json is None:
|
|
|
|
|
tokens_json = {}
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_added = False
|
2021-12-26 10:00:46 +00:00
|
|
|
|
for domain_full in shared_items_federated_domains:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_json.get(domain_full):
|
|
|
|
|
tokens_json[domain_full] = ''
|
|
|
|
|
tokens_added = True
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_added:
|
|
|
|
|
return tokens_json
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
save_json(tokens_json, tokens_filename)
|
|
|
|
|
return tokens_json
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def update_shared_item_federation_token(base_dir: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
token_domain_full: str, new_token: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
debug: bool,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_json: {} = None) -> {}:
|
2021-07-26 21:01:48 +00:00
|
|
|
|
"""Updates an individual token for shared item federation
|
2021-07-26 12:20:07 +00:00
|
|
|
|
"""
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('Updating shared items token for ' + token_domain_full)
|
|
|
|
|
if not tokens_json:
|
|
|
|
|
tokens_json = {}
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_filename = \
|
2024-05-12 12:35:26 +00:00
|
|
|
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if os.path.isfile(tokens_filename):
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print('Update loading tokens for ' + token_domain_full)
|
2024-06-20 10:47:58 +00:00
|
|
|
|
tokens_json = load_json(tokens_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if tokens_json is None:
|
|
|
|
|
tokens_json = {}
|
|
|
|
|
update_required = False
|
|
|
|
|
if tokens_json.get(token_domain_full):
|
|
|
|
|
if tokens_json[token_domain_full] != new_token:
|
|
|
|
|
update_required = True
|
2021-07-26 14:41:06 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
update_required = True
|
|
|
|
|
if update_required:
|
|
|
|
|
tokens_json[token_domain_full] = new_token
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
save_json(tokens_json, tokens_filename)
|
|
|
|
|
return tokens_json
|
2021-07-26 12:20:07 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def merge_shared_item_tokens(base_dir: str, domain_full: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
new_shared_items_federated_domains: [],
|
|
|
|
|
tokens_json: {}) -> {}:
|
2021-07-26 21:01:48 +00:00
|
|
|
|
"""When the shared item federation domains list has changed, update
|
|
|
|
|
the tokens dict accordingly
|
|
|
|
|
"""
|
|
|
|
|
removals = []
|
|
|
|
|
changed = False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for token_domain_full, _ in tokens_json.items():
|
2021-12-26 10:00:46 +00:00
|
|
|
|
if domain_full:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if token_domain_full.startswith(domain_full):
|
2021-07-26 21:01:48 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if token_domain_full not in new_shared_items_federated_domains:
|
|
|
|
|
removals.append(token_domain_full)
|
2021-07-26 21:01:48 +00:00
|
|
|
|
# remove domains no longer in the federation list
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for token_domain_full in removals:
|
|
|
|
|
del tokens_json[token_domain_full]
|
2021-07-26 21:01:48 +00:00
|
|
|
|
changed = True
|
|
|
|
|
# add new domains from the federation list
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for token_domain_full in new_shared_items_federated_domains:
|
|
|
|
|
if token_domain_full not in tokens_json:
|
|
|
|
|
tokens_json[token_domain_full] = ''
|
2021-07-26 21:01:48 +00:00
|
|
|
|
changed = True
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir and changed:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_filename = \
|
2024-05-12 12:35:26 +00:00
|
|
|
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
save_json(tokens_json, tokens_filename)
|
|
|
|
|
return tokens_json
|
2021-07-26 21:01:48 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def create_shared_item_federation_token(base_dir: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
token_domain_full: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
force: bool,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_json: {} = None) -> {}:
|
2021-07-26 21:01:48 +00:00
|
|
|
|
"""Updates an individual token for shared item federation
|
2021-07-26 12:20:07 +00:00
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_json:
|
|
|
|
|
tokens_json = {}
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_filename = \
|
2024-05-12 12:35:26 +00:00
|
|
|
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if os.path.isfile(tokens_filename):
|
2024-06-20 10:47:58 +00:00
|
|
|
|
tokens_json = load_json(tokens_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if tokens_json is None:
|
|
|
|
|
tokens_json = {}
|
|
|
|
|
if force or not tokens_json.get(token_domain_full):
|
|
|
|
|
tokens_json[token_domain_full] = secrets.token_urlsafe(64)
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if base_dir:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
save_json(tokens_json, tokens_filename)
|
|
|
|
|
return tokens_json
|
2021-07-26 12:20:07 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-28 21:36:27 +00:00
|
|
|
|
def authorize_shared_items(shared_items_federated_domains: [],
|
|
|
|
|
base_dir: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
origin_domain_full: str,
|
2022-05-30 21:41:18 +00:00
|
|
|
|
calling_domain_full: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
auth_header: str,
|
2021-12-28 21:36:27 +00:00
|
|
|
|
debug: bool,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_json: {} = None) -> bool:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
"""HTTP simple token check for shared item federation
|
|
|
|
|
"""
|
2021-12-25 18:05:01 +00:00
|
|
|
|
if not shared_items_federated_domains:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
# no shared item federation
|
|
|
|
|
return False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if origin_domain_full not in shared_items_federated_domains:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
print(origin_domain_full +
|
2021-08-05 12:43:21 +00:00
|
|
|
|
' is not in the shared items federation list ' +
|
2021-12-25 18:05:01 +00:00
|
|
|
|
str(shared_items_federated_domains))
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if 'Basic ' in auth_header:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation should not use basic auth')
|
|
|
|
|
return False
|
2022-06-21 11:58:50 +00:00
|
|
|
|
provided_token = remove_eol(auth_header).strip()
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not provided_token:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token is empty')
|
|
|
|
|
return False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if len(provided_token) < 60:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token is too small ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
provided_token)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_json:
|
|
|
|
|
tokens_filename = \
|
2024-05-12 12:35:26 +00:00
|
|
|
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not os.path.isfile(tokens_filename):
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation tokens file missing ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_filename)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
2024-06-20 10:47:58 +00:00
|
|
|
|
tokens_json = load_json(tokens_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_json:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
2022-05-30 21:41:18 +00:00
|
|
|
|
if not tokens_json.get(calling_domain_full):
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token ' +
|
2022-05-30 21:41:18 +00:00
|
|
|
|
'check failed for ' + calling_domain_full)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
2022-05-30 21:41:18 +00:00
|
|
|
|
if not constant_time_string_check(tokens_json[calling_domain_full],
|
2022-01-03 16:58:50 +00:00
|
|
|
|
provided_token):
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token ' +
|
2022-05-30 21:41:18 +00:00
|
|
|
|
'mismatch for ' + calling_domain_full)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
|
|
|
|
return True
|
2021-07-26 17:54:13 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _update_federated_shares_cache(session, shared_items_federated_domains: [],
|
|
|
|
|
base_dir: str, domain_full: str,
|
|
|
|
|
http_prefix: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_json: {}, debug: bool,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
system_language: str,
|
2023-09-15 21:04:31 +00:00
|
|
|
|
shares_file_type: str,
|
|
|
|
|
sites_unavailable: []) -> None:
|
2021-07-26 17:54:13 +00:00
|
|
|
|
"""Updates the cache of federated shares for the instance.
|
|
|
|
|
This enables shared items to be available even when other instances
|
|
|
|
|
might not be online
|
|
|
|
|
"""
|
|
|
|
|
# create directories where catalogs will be stored
|
2022-01-03 16:58:50 +00:00
|
|
|
|
cache_dir = base_dir + '/cache'
|
|
|
|
|
if not os.path.isdir(cache_dir):
|
|
|
|
|
os.mkdir(cache_dir)
|
|
|
|
|
if shares_file_type == 'shares':
|
|
|
|
|
catalogs_dir = cache_dir + '/catalogs'
|
2021-08-09 13:07:32 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
catalogs_dir = cache_dir + '/wantedItems'
|
|
|
|
|
if not os.path.isdir(catalogs_dir):
|
|
|
|
|
os.mkdir(catalogs_dir)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
as_header = {
|
2021-08-05 11:24:24 +00:00
|
|
|
|
"Accept": "application/ld+json",
|
2021-12-26 10:00:46 +00:00
|
|
|
|
"Origin": domain_full
|
2021-07-26 17:54:13 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for federated_domain_full in shared_items_federated_domains:
|
2021-07-27 11:29:07 +00:00
|
|
|
|
# NOTE: federatedDomain does not have a port extension,
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# so may not work in some situations
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if federated_domain_full.startswith(domain_full):
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# only download from instances other than this one
|
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_json.get(federated_domain_full):
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# token has been obtained for the other domain
|
|
|
|
|
continue
|
2023-09-15 21:04:31 +00:00
|
|
|
|
if not site_is_active(http_prefix + '://' + federated_domain_full, 10,
|
|
|
|
|
sites_unavailable):
|
2021-07-29 19:27:36 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if shares_file_type == 'shares':
|
|
|
|
|
url = http_prefix + '://' + federated_domain_full + '/catalog'
|
2021-08-09 13:07:32 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
url = http_prefix + '://' + federated_domain_full + '/wantedItems'
|
|
|
|
|
as_header['Authorization'] = tokens_json[federated_domain_full]
|
|
|
|
|
catalog_json = get_json(session, url, as_header, None,
|
|
|
|
|
debug, __version__, http_prefix, None)
|
2023-08-13 09:58:02 +00:00
|
|
|
|
if not get_json_valid(catalog_json):
|
2021-07-26 17:54:13 +00:00
|
|
|
|
print('WARN: failed to download shared items catalog for ' +
|
2022-01-03 16:58:50 +00:00
|
|
|
|
federated_domain_full)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
catalog_filename = catalogs_dir + '/' + federated_domain_full + '.json'
|
|
|
|
|
if save_json(catalog_json, catalog_filename):
|
|
|
|
|
print('Downloaded shared items catalog for ' +
|
|
|
|
|
federated_domain_full)
|
|
|
|
|
shares_json = _dfc_to_shares_format(catalog_json,
|
|
|
|
|
base_dir, system_language,
|
|
|
|
|
http_prefix, domain_full)
|
|
|
|
|
if shares_json:
|
|
|
|
|
shares_filename = \
|
|
|
|
|
catalogs_dir + '/' + federated_domain_full + '.' + \
|
|
|
|
|
shares_file_type + '.json'
|
|
|
|
|
save_json(shares_json, shares_filename)
|
|
|
|
|
print('Converted shares catalog for ' + federated_domain_full)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
else:
|
|
|
|
|
time.sleep(2)
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def run_federated_shares_watchdog(project_version: str, httpd) -> None:
|
2021-07-26 17:54:13 +00:00
|
|
|
|
"""This tries to keep the federated shares update thread
|
|
|
|
|
running even if it dies
|
|
|
|
|
"""
|
2022-03-13 11:01:07 +00:00
|
|
|
|
print('THREAD: Starting federated shares watchdog')
|
2022-01-03 16:58:50 +00:00
|
|
|
|
federated_shares_original = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
httpd.thrPostSchedule.clone(run_federated_shares_daemon)
|
2022-07-28 09:59:18 +00:00
|
|
|
|
begin_thread(httpd.thrFederatedSharesDaemon,
|
|
|
|
|
'run_federated_shares_watchdog')
|
2021-07-26 17:54:13 +00:00
|
|
|
|
while True:
|
|
|
|
|
time.sleep(55)
|
|
|
|
|
if httpd.thrFederatedSharesDaemon.is_alive():
|
|
|
|
|
continue
|
|
|
|
|
httpd.thrFederatedSharesDaemon.kill()
|
2022-03-13 11:01:07 +00:00
|
|
|
|
print('THREAD: restarting federated shares watchdog')
|
2021-07-26 17:54:13 +00:00
|
|
|
|
httpd.thrFederatedSharesDaemon = \
|
2022-01-03 16:58:50 +00:00
|
|
|
|
federated_shares_original.clone(run_federated_shares_daemon)
|
2022-07-28 09:59:18 +00:00
|
|
|
|
begin_thread(httpd.thrFederatedSharesDaemon,
|
|
|
|
|
'run_federated_shares_watchdog 2')
|
2021-07-26 17:54:13 +00:00
|
|
|
|
print('Restarting federated shares daemon...')
|
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _generate_next_shares_token_update(base_dir: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
min_days: int, max_days: int) -> None:
|
2021-08-07 10:29:40 +00:00
|
|
|
|
"""Creates a file containing the next date when the shared items token
|
|
|
|
|
for this instance will be updated
|
|
|
|
|
"""
|
2024-05-12 12:35:26 +00:00
|
|
|
|
token_update_dir = data_dir(base_dir)
|
2021-12-25 16:17:53 +00:00
|
|
|
|
if not os.path.isdir(base_dir):
|
|
|
|
|
os.mkdir(base_dir)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not os.path.isdir(token_update_dir):
|
|
|
|
|
os.mkdir(token_update_dir)
|
|
|
|
|
token_update_filename = token_update_dir + '/.tokenUpdate'
|
|
|
|
|
next_update_sec = None
|
|
|
|
|
if os.path.isfile(token_update_filename):
|
2024-07-13 14:38:11 +00:00
|
|
|
|
try:
|
|
|
|
|
with open(token_update_filename, 'r', encoding='utf-8') as fp_tok:
|
|
|
|
|
next_update_str = fp_tok.read()
|
|
|
|
|
if next_update_str:
|
|
|
|
|
if next_update_str.isdigit():
|
|
|
|
|
next_update_sec = int(next_update_str)
|
|
|
|
|
except OSError:
|
|
|
|
|
print('EX: _generate_next_shares_token_update unable to read ' +
|
|
|
|
|
token_update_filename)
|
2021-12-26 13:17:46 +00:00
|
|
|
|
curr_time = int(time.time())
|
2021-08-07 10:29:40 +00:00
|
|
|
|
updated = False
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if next_update_sec:
|
|
|
|
|
if curr_time > next_update_sec:
|
|
|
|
|
next_update_days = randint(min_days, max_days)
|
|
|
|
|
next_update_interval = int(60 * 60 * 24 * next_update_days)
|
|
|
|
|
next_update_sec += next_update_interval
|
2021-08-07 10:29:40 +00:00
|
|
|
|
updated = True
|
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
next_update_days = randint(min_days, max_days)
|
|
|
|
|
next_update_interval = int(60 * 60 * 24 * next_update_days)
|
|
|
|
|
next_update_sec = curr_time + next_update_interval
|
2021-08-07 10:29:40 +00:00
|
|
|
|
updated = True
|
|
|
|
|
if updated:
|
2024-02-01 13:30:59 +00:00
|
|
|
|
try:
|
|
|
|
|
with open(token_update_filename, 'w+', encoding='utf-8') as fp_tok:
|
|
|
|
|
fp_tok.write(str(next_update_sec))
|
|
|
|
|
except OSError:
|
|
|
|
|
print('EX: _generate_next_shares_token_update unable to write' +
|
|
|
|
|
token_update_filename)
|
2021-08-07 10:29:40 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def _regenerate_shares_token(base_dir: str, domain_full: str,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
min_days: int, max_days: int, httpd) -> None:
|
2021-08-07 10:29:40 +00:00
|
|
|
|
"""Occasionally the shared items token for your instance is updated.
|
|
|
|
|
Scenario:
|
|
|
|
|
- You share items with $FriendlyInstance
|
|
|
|
|
- Some time later under new management
|
|
|
|
|
$FriendlyInstance becomes $HostileInstance
|
|
|
|
|
- You block $HostileInstance and remove them from your
|
|
|
|
|
federated shares domains list
|
|
|
|
|
- $HostileInstance still knows your shared items token,
|
|
|
|
|
and can still have access to your shared items if it presents a
|
|
|
|
|
spoofed Origin header together with the token
|
|
|
|
|
By rotating the token occasionally $HostileInstance will eventually
|
|
|
|
|
lose access to your federated shares. If other instances within your
|
|
|
|
|
federated shares list of domains continue to follow and communicate
|
|
|
|
|
then they will receive the new token automatically
|
|
|
|
|
"""
|
2024-05-12 12:35:26 +00:00
|
|
|
|
token_update_filename = data_dir(base_dir) + '/.tokenUpdate'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not os.path.isfile(token_update_filename):
|
2021-08-07 10:29:40 +00:00
|
|
|
|
return
|
2022-01-03 16:58:50 +00:00
|
|
|
|
next_update_sec = None
|
2024-07-13 14:38:11 +00:00
|
|
|
|
try:
|
|
|
|
|
with open(token_update_filename, 'r', encoding='utf-8') as fp_tok:
|
|
|
|
|
next_update_str = fp_tok.read()
|
|
|
|
|
if next_update_str:
|
|
|
|
|
if next_update_str.isdigit():
|
|
|
|
|
next_update_sec = int(next_update_str)
|
|
|
|
|
except OSError:
|
|
|
|
|
print('EX: _regenerate_shares_token unable to read ' +
|
|
|
|
|
token_update_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not next_update_sec:
|
2021-08-07 10:29:40 +00:00
|
|
|
|
return
|
2021-12-26 13:17:46 +00:00
|
|
|
|
curr_time = int(time.time())
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if curr_time <= next_update_sec:
|
2021-08-07 10:29:40 +00:00
|
|
|
|
return
|
2021-12-29 21:55:09 +00:00
|
|
|
|
create_shared_item_federation_token(base_dir, domain_full, True, None)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
_generate_next_shares_token_update(base_dir, min_days, max_days)
|
2021-08-07 10:29:40 +00:00
|
|
|
|
# update the tokens used within the daemon
|
2021-12-25 18:05:01 +00:00
|
|
|
|
shared_fed_domains = httpd.shared_items_federated_domains
|
2022-01-01 15:11:42 +00:00
|
|
|
|
httpd.shared_item_federation_tokens = \
|
2021-12-29 21:55:09 +00:00
|
|
|
|
generate_shared_item_federation_tokens(shared_fed_domains,
|
|
|
|
|
base_dir)
|
2021-08-07 10:29:40 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def run_federated_shares_daemon(base_dir: str, httpd, http_prefix: str,
|
|
|
|
|
domain_full: str, proxy_type: str, debug: bool,
|
|
|
|
|
system_language: str) -> None:
|
2021-07-26 17:54:13 +00:00
|
|
|
|
"""Runs the daemon used to update federated shared items
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
seconds_per_hour = 60 * 60
|
|
|
|
|
file_check_interval_sec = 120
|
2021-07-26 17:54:13 +00:00
|
|
|
|
time.sleep(60)
|
2021-08-07 10:29:40 +00:00
|
|
|
|
# the token for this instance will be changed every 7-14 days
|
2022-01-03 16:58:50 +00:00
|
|
|
|
min_days = 7
|
|
|
|
|
max_days = 14
|
|
|
|
|
_generate_next_shares_token_update(base_dir, min_days, max_days)
|
2023-09-15 21:04:31 +00:00
|
|
|
|
sites_unavailable = []
|
2021-07-26 17:54:13 +00:00
|
|
|
|
while True:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shared_items_federated_domains_str = \
|
|
|
|
|
get_config_param(base_dir, 'sharedItemsFederatedDomains')
|
|
|
|
|
if not shared_items_federated_domains_str:
|
|
|
|
|
time.sleep(file_check_interval_sec)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2021-08-07 10:29:40 +00:00
|
|
|
|
# occasionally change the federated shared items token
|
|
|
|
|
# for this instance
|
2021-12-29 21:55:09 +00:00
|
|
|
|
_regenerate_shares_token(base_dir, domain_full,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
min_days, max_days, httpd)
|
2021-08-07 10:29:40 +00:00
|
|
|
|
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# get a list of the domains within the shared items federation
|
2021-12-25 18:05:01 +00:00
|
|
|
|
shared_items_federated_domains = []
|
|
|
|
|
fed_domains_list = \
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shared_items_federated_domains_str.split(',')
|
2021-12-25 18:05:01 +00:00
|
|
|
|
for shared_fed_domain in fed_domains_list:
|
|
|
|
|
shared_items_federated_domains.append(shared_fed_domain.strip())
|
|
|
|
|
if not shared_items_federated_domains:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
time.sleep(file_check_interval_sec)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# load the tokens
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_filename = \
|
2024-05-12 12:35:26 +00:00
|
|
|
|
data_dir(base_dir) + '/sharedItemsFederationTokens.json'
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not os.path.isfile(tokens_filename):
|
|
|
|
|
time.sleep(file_check_interval_sec)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
continue
|
2024-06-20 10:47:58 +00:00
|
|
|
|
tokens_json = load_json(tokens_filename)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not tokens_json:
|
|
|
|
|
time.sleep(file_check_interval_sec)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2021-12-28 16:56:57 +00:00
|
|
|
|
session = create_session(proxy_type)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for shares_file_type in get_shares_files_list():
|
2021-12-29 21:55:09 +00:00
|
|
|
|
_update_federated_shares_cache(session,
|
|
|
|
|
shared_items_federated_domains,
|
|
|
|
|
base_dir, domain_full, http_prefix,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
tokens_json, debug, system_language,
|
2023-09-15 21:04:31 +00:00
|
|
|
|
shares_file_type, sites_unavailable)
|
2022-01-03 16:58:50 +00:00
|
|
|
|
time.sleep(seconds_per_hour * 6)
|
2021-07-27 12:55:44 +00:00
|
|
|
|
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
def _dfc_to_shares_format(catalog_json: {},
|
2021-12-29 21:55:09 +00:00
|
|
|
|
base_dir: str, system_language: str,
|
|
|
|
|
http_prefix: str, domain_full: str) -> {}:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"""Converts DFC format into the internal formal used to store shared items.
|
|
|
|
|
This simplifies subsequent search and display
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if not catalog_json.get('DFC:supplies'):
|
2021-07-27 12:55:44 +00:00
|
|
|
|
return {}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
shares_json = {}
|
2021-08-03 19:08:28 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_ids = {}
|
|
|
|
|
product_types_list = get_category_types(base_dir)
|
|
|
|
|
for product_type in product_types_list:
|
|
|
|
|
dfc_ids[product_type] = \
|
|
|
|
|
_load_dfc_ids(base_dir, system_language, product_type,
|
2021-12-29 21:55:09 +00:00
|
|
|
|
http_prefix, domain_full)
|
2021-08-03 19:08:28 +00:00
|
|
|
|
|
2021-12-26 13:17:46 +00:00
|
|
|
|
curr_time = int(time.time())
|
2022-01-03 16:58:50 +00:00
|
|
|
|
for item in catalog_json['DFC:supplies']:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
if not item.get('@id') or \
|
|
|
|
|
not item.get('@type') or \
|
|
|
|
|
not item.get('DFC:hasType') or \
|
|
|
|
|
not item.get('DFC:startDate') or \
|
|
|
|
|
not item.get('DFC:expiryDate') or \
|
|
|
|
|
not item.get('DFC:quantity') or \
|
|
|
|
|
not item.get('DFC:price') or \
|
|
|
|
|
not item.get('DFC:description'):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if ' ' not in item['DFC:price']:
|
|
|
|
|
continue
|
|
|
|
|
if ':' not in item['DFC:description']:
|
|
|
|
|
continue
|
|
|
|
|
if ':' not in item['DFC:hasType']:
|
|
|
|
|
continue
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
start_time_sec = date_string_to_seconds(item['DFC:startDate'])
|
|
|
|
|
if not start_time_sec:
|
2021-07-28 09:35:21 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
expiry_time_sec = date_string_to_seconds(item['DFC:expiryDate'])
|
|
|
|
|
if not expiry_time_sec:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
continue
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if expiry_time_sec < curr_time:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
# has expired
|
|
|
|
|
continue
|
|
|
|
|
|
2021-07-27 22:06:35 +00:00
|
|
|
|
if item['DFC:hasType'].startswith('epicyon:'):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_type = item['DFC:hasType'].split(':')[1]
|
|
|
|
|
item_type = item_type.replace('_', ' ')
|
|
|
|
|
item_category = 'non-food'
|
|
|
|
|
product_type = None
|
2021-07-27 22:06:35 +00:00
|
|
|
|
else:
|
2022-01-03 16:58:50 +00:00
|
|
|
|
has_type = item['DFC:hasType'].split(':')[1]
|
|
|
|
|
item_type = None
|
|
|
|
|
product_type = None
|
|
|
|
|
for prod_type in product_types_list:
|
|
|
|
|
item_type = \
|
|
|
|
|
_getshare_type_from_dfc_id(has_type, dfc_ids[prod_type])
|
|
|
|
|
if item_type:
|
|
|
|
|
product_type = prod_type
|
2021-08-03 19:08:28 +00:00
|
|
|
|
break
|
2022-01-03 16:58:50 +00:00
|
|
|
|
item_category = 'food'
|
|
|
|
|
if not item_type:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
continue
|
2021-07-28 21:28:41 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
all_text = \
|
|
|
|
|
item['DFC:description'] + ' ' + item_type + ' ' + item_category
|
2022-09-25 17:26:11 +00:00
|
|
|
|
if is_filtered_globally(base_dir, all_text, system_language):
|
2021-07-28 21:28:41 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
dfc_id = None
|
|
|
|
|
if product_type:
|
|
|
|
|
dfc_id = dfc_ids[product_type][item_type]
|
|
|
|
|
item_id = item['@id']
|
2021-07-27 12:55:44 +00:00
|
|
|
|
description = item['DFC:description'].split(':', 1)[1].strip()
|
2021-07-28 21:28:41 +00:00
|
|
|
|
|
2022-01-03 16:58:50 +00:00
|
|
|
|
image_url = ''
|
2021-09-19 14:34:54 +00:00
|
|
|
|
if item.get('DFC:Image'):
|
2022-01-03 16:58:50 +00:00
|
|
|
|
image_url = item['DFC:Image']
|
|
|
|
|
shares_json[item_id] = {
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"displayName": item['DFC:description'].split(':')[0],
|
|
|
|
|
"summary": description,
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"imageUrl": image_url,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"itemQty": float(item['DFC:quantity']),
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"dfcId": dfc_id,
|
|
|
|
|
"itemType": item_type,
|
|
|
|
|
"category": item_category,
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"location": "",
|
2022-01-03 16:58:50 +00:00
|
|
|
|
"published": start_time_sec,
|
|
|
|
|
"expire": expiry_time_sec,
|
2021-07-27 20:26:10 +00:00
|
|
|
|
"itemPrice": item['DFC:price'].split(' ')[0],
|
2023-08-20 16:01:05 +00:00
|
|
|
|
"itemCurrency": item['DFC:price'].split(' ')[1],
|
2023-08-22 18:23:25 +00:00
|
|
|
|
"shareOnProfile": False
|
2021-07-27 12:55:44 +00:00
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
return shares_json
|
2021-09-19 13:59:31 +00:00
|
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
|
def share_category_icon(category: str) -> str:
|
2021-09-19 13:59:31 +00:00
|
|
|
|
"""Returns unicode icon for the given category
|
|
|
|
|
"""
|
2022-01-03 16:58:50 +00:00
|
|
|
|
category_icons = {
|
2021-09-19 13:59:31 +00:00
|
|
|
|
'accommodation': '🏠',
|
|
|
|
|
'clothes': '👚',
|
|
|
|
|
'tools': '🔧',
|
|
|
|
|
'food': '🍏'
|
|
|
|
|
}
|
2022-01-03 16:58:50 +00:00
|
|
|
|
if category_icons.get(category):
|
|
|
|
|
return category_icons[category]
|
2021-09-19 13:59:31 +00:00
|
|
|
|
return ''
|
2023-08-22 17:13:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _currency_to_wikidata(currency_type: str) -> str:
|
|
|
|
|
"""Converts a currency type, such as USD, into a wikidata reference
|
|
|
|
|
"""
|
|
|
|
|
currencies = {
|
|
|
|
|
"GBP": "https://www.wikidata.org/wiki/Q25224",
|
|
|
|
|
"EUR": "https://www.wikidata.org/wiki/Q4916",
|
|
|
|
|
"CAD": "https://www.wikidata.org/wiki/Q1104069",
|
|
|
|
|
"USD": "https://www.wikidata.org/wiki/Q4917",
|
|
|
|
|
"AUD": "https://www.wikidata.org/wiki/Q259502",
|
|
|
|
|
"PKR": "https://www.wikidata.org/wiki/Q188289",
|
|
|
|
|
"PEN": "https://www.wikidata.org/wiki/Q204656",
|
|
|
|
|
"PAB": "https://www.wikidata.org/wiki/Q210472",
|
|
|
|
|
"PHP": "https://www.wikidata.org/wiki/Q17193",
|
|
|
|
|
"RWF": "https://www.wikidata.org/wiki/Q4741",
|
|
|
|
|
"NZD": "https://www.wikidata.org/wiki/Q1472704",
|
|
|
|
|
"MXN": "https://www.wikidata.org/wiki/Q4730",
|
|
|
|
|
"JMD": "https://www.wikidata.org/wiki/Q209792",
|
|
|
|
|
"ISK": "https://www.wikidata.org/wiki/Q131473",
|
|
|
|
|
"EGP": "https://www.wikidata.org/wiki/Q199462",
|
|
|
|
|
"CNY": "https://www.wikidata.org/wiki/Q39099",
|
|
|
|
|
"AFN": "https://www.wikidata.org/wiki/Q199471",
|
|
|
|
|
"AWG": "https://www.wikidata.org/wiki/Q232270",
|
|
|
|
|
"AZN": "https://www.wikidata.org/wiki/Q483725",
|
|
|
|
|
"BYN": "https://www.wikidata.org/wiki/Q21531507",
|
|
|
|
|
"BZD": "https://www.wikidata.org/wiki/Q275112",
|
|
|
|
|
"BOB": "https://www.wikidata.org/wiki/Q200737",
|
|
|
|
|
"BAM": "https://www.wikidata.org/wiki/Q179620",
|
|
|
|
|
"BWP": "https://www.wikidata.org/wiki/Q186794",
|
|
|
|
|
"BGN": "https://www.wikidata.org/wiki/Q172540",
|
|
|
|
|
"BRL": "https://www.wikidata.org/wiki/Q173117",
|
|
|
|
|
"KHR": "https://www.wikidata.org/wiki/Q204737",
|
|
|
|
|
"UYU": "https://www.wikidata.org/wiki/Q209272",
|
|
|
|
|
"DOP": "https://www.wikidata.org/wiki/Q242922",
|
|
|
|
|
"CRC": "https://www.wikidata.org/wiki/Q242915",
|
|
|
|
|
"HRK": "https://www.wikidata.org/wiki/Q595634",
|
|
|
|
|
"CUP": "https://www.wikidata.org/wiki/Q201505",
|
|
|
|
|
"CZK": "https://www.wikidata.org/wiki/Q131016",
|
|
|
|
|
"NOK": "https://www.wikidata.org/wiki/Q132643",
|
|
|
|
|
"GHS": "https://www.wikidata.org/wiki/Q183530",
|
|
|
|
|
"GTQ": "https://www.wikidata.org/wiki/Q207396",
|
|
|
|
|
"HNL": "https://www.wikidata.org/wiki/Q4719",
|
|
|
|
|
"HUF": "https://www.wikidata.org/wiki/Q47190",
|
|
|
|
|
"IDR": "https://www.wikidata.org/wiki/Q41588",
|
|
|
|
|
"INR": "https://www.wikidata.org/wiki/Q80524",
|
|
|
|
|
"IRR": "https://www.wikidata.org/wiki/Q188608",
|
|
|
|
|
"ILS": "https://www.wikidata.org/wiki/Q131309",
|
|
|
|
|
"JPY": "https://www.wikidata.org/wiki/Q8146",
|
|
|
|
|
"KRW": "https://www.wikidata.org/wiki/Q202040",
|
|
|
|
|
"LAK": "https://www.wikidata.org/wiki/Q200055",
|
|
|
|
|
"MKD": "https://www.wikidata.org/wiki/Q177875",
|
|
|
|
|
"MYR": "https://www.wikidata.org/wiki/Q163712",
|
|
|
|
|
"MUR": "https://www.wikidata.org/wiki/Q212967",
|
|
|
|
|
"MNT": "https://www.wikidata.org/wiki/Q183435",
|
|
|
|
|
"MZN": "https://www.wikidata.org/wiki/Q200753",
|
|
|
|
|
"NIO": "https://www.wikidata.org/wiki/Q207312",
|
|
|
|
|
"NGN": "https://www.wikidata.org/wiki/Q203567",
|
|
|
|
|
"PYG": "https://www.wikidata.org/wiki/Q207514",
|
|
|
|
|
"PLN": "https://www.wikidata.org/wiki/Q123213",
|
|
|
|
|
"RON": "https://www.wikidata.org/wiki/Q131645",
|
|
|
|
|
"RUB": "https://www.wikidata.org/wiki/Q41044",
|
|
|
|
|
"RSD": "https://www.wikidata.org/wiki/Q172524",
|
|
|
|
|
"SOS": "https://www.wikidata.org/wiki/Q4603",
|
|
|
|
|
"ZAR": "https://www.wikidata.org/wiki/Q181907",
|
|
|
|
|
"CHF": "https://www.wikidata.org/wiki/Q25344",
|
|
|
|
|
"TWD": "https://www.wikidata.org/wiki/Q208526",
|
|
|
|
|
"THB": "https://www.wikidata.org/wiki/Q177882",
|
|
|
|
|
"TTD": "https://www.wikidata.org/wiki/Q242890",
|
|
|
|
|
"UAH": "https://www.wikidata.org/wiki/Q81893",
|
|
|
|
|
"VES": "https://www.wikidata.org/wiki/Q56349362",
|
|
|
|
|
"VEB": "https://www.wikidata.org/wiki/Q56349362",
|
|
|
|
|
"VND": "https://www.wikidata.org/wiki/Q192090"
|
|
|
|
|
}
|
|
|
|
|
currency_type = currency_type.upper()
|
|
|
|
|
for curr, curr_url in currencies.items():
|
|
|
|
|
if curr in currency_type:
|
|
|
|
|
return curr_url
|
|
|
|
|
return "https://www.wikidata.org/wiki/Q25224"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _vf_share_id(share_id: str) -> str:
|
|
|
|
|
"""returns the share id
|
|
|
|
|
"""
|
|
|
|
|
share_id = share_id.replace('___', '://')
|
|
|
|
|
return share_id.replace('--', '/')
|
|
|
|
|
|
|
|
|
|
|
2024-01-30 10:19:33 +00:00
|
|
|
|
def vf_proposal_from_share(shared_item: {}, share_type: str) -> {}:
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"""Returns a ValueFlows proposal from a shared item
|
|
|
|
|
"""
|
|
|
|
|
if not shared_item.get('shareId'):
|
|
|
|
|
return {}
|
|
|
|
|
om2_link = \
|
|
|
|
|
"http://www.ontology-of-units-of-measure.org/resource/om-2/"
|
|
|
|
|
share_id = _vf_share_id(shared_item['shareId'])
|
2023-08-23 12:48:15 +00:00
|
|
|
|
published = date_seconds_to_string(shared_item['published'])
|
2024-01-09 16:59:23 +00:00
|
|
|
|
actor_url = get_actor_from_post(shared_item)
|
2023-08-22 17:13:35 +00:00
|
|
|
|
offer_item = {
|
|
|
|
|
"@context": [
|
|
|
|
|
"https://www.w3.org/ns/activitystreams",
|
|
|
|
|
{
|
|
|
|
|
"om2": om2_link,
|
2024-01-30 10:19:33 +00:00
|
|
|
|
"vf": "https://w3id.org/valueflows/ont/vf#",
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"Proposal": "vf:Proposal",
|
|
|
|
|
"Intent": "vf:Intent",
|
|
|
|
|
"action": "vf:action",
|
2024-01-30 10:19:33 +00:00
|
|
|
|
"purpose": "vf:purpose",
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"unitBased": "vf:unitBased",
|
|
|
|
|
"publishes": "vf:publishes",
|
|
|
|
|
"reciprocal": "vf:reciprocal",
|
|
|
|
|
"resourceConformsTo": "vf:resourceConformsTo",
|
|
|
|
|
"resourceQuantity": "vf:resourceQuantity",
|
|
|
|
|
"hasUnit": "om2:hasUnit",
|
|
|
|
|
"hasNumericalValue": "om2:hasNumericalValue"
|
|
|
|
|
}
|
|
|
|
|
],
|
2024-01-30 10:19:33 +00:00
|
|
|
|
"type": "Proposal",
|
|
|
|
|
"purpose": share_type,
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"id": share_id,
|
2024-01-09 16:59:23 +00:00
|
|
|
|
"attributedTo": actor_url,
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"name": shared_item['displayName'],
|
|
|
|
|
"content": shared_item['summary'],
|
2023-08-23 12:48:15 +00:00
|
|
|
|
"published": published,
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"publishes": {
|
|
|
|
|
"type": "Intent",
|
|
|
|
|
"id": share_id + '#primary',
|
|
|
|
|
"action": "transfer",
|
|
|
|
|
"resourceQuantity": {
|
|
|
|
|
"hasUnit": "one",
|
|
|
|
|
"hasNumericalValue": str(shared_item['itemQty'])
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
"attachment": [],
|
|
|
|
|
"unitBased": False,
|
|
|
|
|
"to": "https://www.w3.org/ns/activitystreams#Public"
|
|
|
|
|
}
|
|
|
|
|
if shared_item.get('dfcId'):
|
|
|
|
|
offer_item['publishes']['resourceConformsTo'] = \
|
|
|
|
|
shared_item['dfcId']
|
|
|
|
|
if shared_item['category']:
|
|
|
|
|
offer_item['attachment'].append({
|
|
|
|
|
"type": "PropertyValue",
|
|
|
|
|
"name": "category",
|
|
|
|
|
"value": shared_item['category']
|
|
|
|
|
})
|
|
|
|
|
if shared_item['location']:
|
|
|
|
|
# pixelfed style representation of location
|
|
|
|
|
offer_item['location'] = {
|
|
|
|
|
"type": "Place",
|
|
|
|
|
"name": shared_item['location'].title()
|
|
|
|
|
}
|
|
|
|
|
if shared_item['imageUrl']:
|
2024-01-27 17:04:21 +00:00
|
|
|
|
if resembles_url(shared_item['imageUrl']):
|
2023-08-22 17:13:35 +00:00
|
|
|
|
file_extension = None
|
|
|
|
|
accepted_types = get_media_extensions()
|
|
|
|
|
for mtype in accepted_types:
|
|
|
|
|
if shared_item['imageUrl'].endswith('.' + mtype):
|
|
|
|
|
if mtype == 'jpg':
|
|
|
|
|
mtype = 'jpeg'
|
|
|
|
|
if mtype == 'mp3':
|
|
|
|
|
mtype = 'mpeg'
|
|
|
|
|
file_extension = mtype
|
|
|
|
|
if file_extension:
|
|
|
|
|
media_type = 'image/' + file_extension
|
|
|
|
|
shared_item_url = remove_html(shared_item['imageUrl'])
|
|
|
|
|
offer_item['attachment'].append({
|
|
|
|
|
'mediaType': media_type,
|
|
|
|
|
'name': shared_item['displayName'],
|
|
|
|
|
'type': 'Document',
|
|
|
|
|
'url': shared_item_url
|
|
|
|
|
})
|
|
|
|
|
if shared_item['itemPrice'] and shared_item['itemCurrency']:
|
|
|
|
|
currency_url = _currency_to_wikidata(shared_item['itemCurrency'])
|
2023-08-23 12:40:20 +00:00
|
|
|
|
offer_item['reciprocal'] = {
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"type": "Intent",
|
|
|
|
|
"id": share_id + '#reciprocal',
|
|
|
|
|
"action": "transfer",
|
|
|
|
|
"resourceConformsTo": currency_url,
|
|
|
|
|
"resourceQuantity": {
|
|
|
|
|
"hasUnit": "one",
|
|
|
|
|
"hasNumericalValue": str(shared_item['itemPrice'])
|
2024-01-30 10:19:33 +00:00
|
|
|
|
}
|
2023-08-22 17:13:35 +00:00
|
|
|
|
}
|
|
|
|
|
return offer_item
|
|
|
|
|
|
|
|
|
|
|
2023-08-23 10:37:27 +00:00
|
|
|
|
def get_share_category(base_dir: str, nickname: str, domain: str,
|
|
|
|
|
shares_file_type: str, share_id: str) -> str:
|
|
|
|
|
"""Returns the category for a shared item
|
|
|
|
|
"""
|
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
|
|
|
|
|
if not os.path.isfile(shares_filename):
|
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
shares_json = load_json(shares_filename)
|
|
|
|
|
if not shares_json:
|
|
|
|
|
return ''
|
|
|
|
|
if not shares_json.get(share_id):
|
|
|
|
|
return ''
|
|
|
|
|
if not shares_json[share_id].get('category'):
|
|
|
|
|
return ''
|
|
|
|
|
return shares_json[share_id]['category']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def vf_proposal_from_id(base_dir: str, nickname: str, domain: str,
|
2023-08-23 12:36:23 +00:00
|
|
|
|
shares_file_type: str, share_id: str,
|
|
|
|
|
actor: str) -> {}:
|
2023-08-23 10:37:27 +00:00
|
|
|
|
"""Returns a ValueFlows proposal from a shared item id
|
|
|
|
|
"""
|
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
|
|
|
|
|
if not os.path.isfile(shares_filename):
|
2023-08-23 12:24:25 +00:00
|
|
|
|
print('DEBUG: vf_proposal_from_id file not found ' + shares_filename)
|
2023-08-23 10:37:27 +00:00
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
shares_json = load_json(shares_filename)
|
|
|
|
|
if not shares_json:
|
2023-08-23 12:24:25 +00:00
|
|
|
|
print('DEBUG: vf_proposal_from_id file not loaded ' + shares_filename)
|
2023-08-23 10:37:27 +00:00
|
|
|
|
return {}
|
|
|
|
|
if not shares_json.get(share_id):
|
2023-08-23 12:24:25 +00:00
|
|
|
|
print('DEBUG: vf_proposal_from_id does not contain id ' + share_id)
|
2023-08-23 10:37:27 +00:00
|
|
|
|
return {}
|
|
|
|
|
if shares_file_type == 'shares':
|
2024-01-30 10:19:33 +00:00
|
|
|
|
share_type = 'offer'
|
2023-08-23 10:37:27 +00:00
|
|
|
|
else:
|
2024-01-30 10:19:33 +00:00
|
|
|
|
share_type = 'request'
|
2023-08-23 12:24:25 +00:00
|
|
|
|
shares_json[share_id]['shareId'] = share_id
|
2023-08-23 12:36:23 +00:00
|
|
|
|
shares_json[share_id]['actor'] = actor
|
2023-08-23 10:37:27 +00:00
|
|
|
|
return vf_proposal_from_share(shares_json[share_id],
|
2024-01-30 10:19:33 +00:00
|
|
|
|
share_type)
|
2023-08-23 10:37:27 +00:00
|
|
|
|
|
|
|
|
|
|
2023-08-23 19:20:55 +00:00
|
|
|
|
def _is_valueflows_attachment(attach_item: {}) -> bool:
|
|
|
|
|
"""Returns true if the given item is a ValueFlows entry
|
|
|
|
|
within the actor attachment list
|
|
|
|
|
"""
|
2023-08-24 13:58:06 +00:00
|
|
|
|
if 'rel' not in attach_item or \
|
|
|
|
|
'href' not in attach_item or \
|
|
|
|
|
'name' not in attach_item:
|
|
|
|
|
return False
|
|
|
|
|
if not isinstance(attach_item['rel'], list):
|
|
|
|
|
return False
|
|
|
|
|
if not isinstance(attach_item['name'], str):
|
|
|
|
|
return False
|
|
|
|
|
if not isinstance(attach_item['href'], str):
|
|
|
|
|
return False
|
|
|
|
|
if len(attach_item['rel']) != 2:
|
|
|
|
|
return False
|
|
|
|
|
if len(attach_item['name']) <= 1:
|
|
|
|
|
return False
|
|
|
|
|
if attach_item['rel'][0] == 'payment' and \
|
|
|
|
|
attach_item['rel'][1].endswith('/valueflows/Proposal'):
|
|
|
|
|
if not dangerous_markup(attach_item['href'], False, []):
|
|
|
|
|
return True
|
2023-08-23 19:20:55 +00:00
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2023-08-23 10:37:27 +00:00
|
|
|
|
def actor_attached_shares(actor_json: {}) -> []:
|
|
|
|
|
"""Returns any shared items attached to an actor
|
|
|
|
|
https://codeberg.org/fediverse/fep/src/branch/main/fep/0837/fep-0837.md
|
|
|
|
|
"""
|
|
|
|
|
if not actor_json.get('attachment'):
|
|
|
|
|
return []
|
2023-11-29 10:58:48 +00:00
|
|
|
|
if not isinstance(actor_json['attachment'], list):
|
|
|
|
|
return []
|
2023-08-23 10:37:27 +00:00
|
|
|
|
|
|
|
|
|
attached_shares = []
|
|
|
|
|
for attach_item in actor_json['attachment']:
|
2023-08-23 19:20:55 +00:00
|
|
|
|
if _is_valueflows_attachment(attach_item):
|
|
|
|
|
attached_shares.append(attach_item['href'])
|
2023-08-23 10:37:27 +00:00
|
|
|
|
return attached_shares
|
|
|
|
|
|
|
|
|
|
|
2023-08-24 10:55:19 +00:00
|
|
|
|
def actor_attached_shares_as_html(actor_json: {},
|
|
|
|
|
max_shares_on_profile: int) -> str:
|
2023-08-23 19:20:55 +00:00
|
|
|
|
"""Returns html for any shared items attached to an actor
|
|
|
|
|
https://codeberg.org/fediverse/fep/src/branch/main/fep/0837/fep-0837.md
|
|
|
|
|
"""
|
2023-08-24 11:54:16 +00:00
|
|
|
|
if not actor_json.get('attachment') or \
|
|
|
|
|
max_shares_on_profile == 0:
|
2023-08-23 19:20:55 +00:00
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
html_str = ''
|
2023-08-24 11:54:16 +00:00
|
|
|
|
ctr = 0
|
2023-08-23 19:20:55 +00:00
|
|
|
|
for attach_item in actor_json['attachment']:
|
|
|
|
|
if _is_valueflows_attachment(attach_item):
|
2023-08-23 19:30:58 +00:00
|
|
|
|
if not html_str:
|
|
|
|
|
html_str = '<ul>\n'
|
2023-08-23 19:20:55 +00:00
|
|
|
|
html_str += \
|
2023-08-23 19:30:58 +00:00
|
|
|
|
' <li><a href="' + attach_item['href'] + '" tabindex="1">' + \
|
2023-08-24 13:58:06 +00:00
|
|
|
|
remove_html(attach_item['name']) + '</a></li>\n'
|
2023-08-24 11:54:16 +00:00
|
|
|
|
ctr += 1
|
|
|
|
|
if ctr >= max_shares_on_profile:
|
|
|
|
|
break
|
2023-08-23 19:20:55 +00:00
|
|
|
|
if html_str:
|
2023-08-23 19:30:58 +00:00
|
|
|
|
html_str = html_str.strip() + '</ul>\n'
|
2023-08-23 19:20:55 +00:00
|
|
|
|
return html_str
|
|
|
|
|
|
|
|
|
|
|
2023-08-22 17:13:35 +00:00
|
|
|
|
def add_shares_to_actor(base_dir: str,
|
|
|
|
|
nickname: str, domain: str,
|
|
|
|
|
actor_json: {},
|
|
|
|
|
max_shares_on_profile: int) -> bool:
|
2023-08-22 17:15:36 +00:00
|
|
|
|
"""Adds shared items to the given actor attachments
|
|
|
|
|
https://codeberg.org/fediverse/fep/src/branch/main/fep/0837/fep-0837.md
|
2023-08-22 17:13:35 +00:00
|
|
|
|
"""
|
|
|
|
|
if 'attachment' not in actor_json:
|
|
|
|
|
actor_json['attachment'] = []
|
|
|
|
|
changed = False
|
|
|
|
|
|
|
|
|
|
# remove any existing ValueFlows items from attachment list
|
|
|
|
|
new_attachment = []
|
|
|
|
|
for attach_item in actor_json['attachment']:
|
|
|
|
|
is_proposal = False
|
2023-08-23 19:20:55 +00:00
|
|
|
|
if _is_valueflows_attachment(attach_item):
|
|
|
|
|
changed = True
|
|
|
|
|
is_proposal = True
|
2023-08-22 17:13:35 +00:00
|
|
|
|
if not is_proposal:
|
|
|
|
|
new_attachment.append(attach_item)
|
|
|
|
|
actor_json['attachment'] = new_attachment
|
|
|
|
|
|
|
|
|
|
# do shared items exist for this account?
|
|
|
|
|
shares_filename = \
|
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/shares.json'
|
|
|
|
|
if not os.path.isfile(shares_filename):
|
|
|
|
|
return changed
|
|
|
|
|
shares_json = load_json(shares_filename)
|
|
|
|
|
if not shares_json:
|
|
|
|
|
return changed
|
|
|
|
|
|
|
|
|
|
# add ValueFlows items to the attachment list
|
|
|
|
|
media_type = \
|
|
|
|
|
"application/ld+json; profile=" + \
|
|
|
|
|
"\"https://www.w3.org/ns/activitystreams\""
|
|
|
|
|
ctr = 0
|
|
|
|
|
for share_id, shared_item in shares_json.items():
|
|
|
|
|
if ctr >= max_shares_on_profile:
|
|
|
|
|
break
|
2023-08-22 18:23:25 +00:00
|
|
|
|
if not shared_item.get('shareOnProfile'):
|
2023-08-22 17:13:35 +00:00
|
|
|
|
continue
|
2023-08-22 21:33:13 +00:00
|
|
|
|
share_id = _vf_share_id(share_id)
|
2023-08-22 17:13:35 +00:00
|
|
|
|
actor_json['attachment'].append({
|
|
|
|
|
"type": "Link",
|
|
|
|
|
"name": shared_item['displayName'],
|
|
|
|
|
"mediaType": media_type,
|
|
|
|
|
"href": share_id,
|
2024-01-29 17:12:37 +00:00
|
|
|
|
"rel": ["payment", "https://w3id.org/valueflows/ont/vf#Proposal"]
|
2023-08-22 17:13:35 +00:00
|
|
|
|
})
|
|
|
|
|
changed = True
|
|
|
|
|
ctr += 1
|
|
|
|
|
return changed
|