epicyon/shares.py

1867 lines
69 KiB
Python
Raw Normal View History

2020-04-04 11:27:51 +00:00
__filename__ = "shares.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
2022-02-03 13:58:20 +00:00
__version__ = "1.3.0"
2020-04-04 11:27:51 +00:00
__maintainer__ = "Bob Mottram"
2021-09-10 16:14:50 +00:00
__email__ = "bob@libreserver.org"
2020-04-04 11:27:51 +00:00
__status__ = "Production"
2021-06-15 15:08:12 +00:00
__module_group__ = "Timeline"
2020-04-04 11:27:51 +00:00
2019-07-23 12:33:09 +00:00
import os
2021-07-25 10:17:39 +00:00
import re
2021-07-26 09:40:51 +00:00
import secrets
2019-07-23 12:33:09 +00:00
import time
2021-07-24 22:08:11 +00:00
import datetime
from random import randint
2021-08-04 12:04:35 +00:00
from pprint import pprint
2021-12-29 21:55:09 +00:00
from session import get_json
from webfinger import webfinger_handle
2021-12-28 21:36:27 +00:00
from auth import create_basic_auth_header
2021-12-29 21:55:09 +00:00
from auth import constant_time_string_check
from posts import get_person_box
from session import post_json
from session import post_image
2021-12-28 16:56:57 +00:00
from session import create_session
2021-12-26 17:12:07 +00:00
from utils import has_object_stringType
2021-12-26 18:01:02 +00:00
from utils import date_string_to_seconds
2021-12-26 17:55:38 +00:00
from utils import date_seconds_to_string
2021-12-26 14:08:58 +00:00
from utils import get_config_param
2021-12-26 12:45:03 +00:00
from utils import get_full_domain
2021-12-28 14:41:10 +00:00
from utils import valid_nickname
2021-12-26 15:13:34 +00:00
from utils import load_json
2021-12-26 14:47:21 +00:00
from utils import save_json
2021-12-26 14:26:16 +00:00
from utils import get_image_extensions
2021-12-26 18:17:37 +00:00
from utils import remove_domain_port
2021-12-26 18:46:43 +00:00
from utils import is_account_dir
2021-12-26 12:02:29 +00:00
from utils import acct_dir
2021-12-26 18:03:39 +00:00
from utils import is_float
2021-12-26 17:18:34 +00:00
from utils import get_category_types
2021-12-26 17:24:00 +00:00
from utils import get_shares_files_list
2021-12-26 10:19:59 +00:00
from utils import local_actor_url
2021-12-28 21:36:27 +00:00
from media import process_meta_data
from media import convert_image_to_low_bandwidth
2021-12-29 21:55:09 +00:00
from filters import is_filtered_globally
from siteactive import site_is_active
from content import get_price_from_string
from blocking import is_blocked
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def _load_dfc_ids(base_dir: str, system_language: str,
2022-01-03 16:58:50 +00:00
product_type: str,
2021-12-29 21:55:09 +00:00
http_prefix: str, domain_full: str) -> {}:
2021-07-24 14:38:43 +00:00
"""Loads the product types ontology
This is used to add an id to shared items
"""
2022-01-03 16:58:50 +00:00
product_types_filename = \
base_dir + '/ontology/custom' + product_type.title() + 'Types.json'
if not os.path.isfile(product_types_filename):
product_types_filename = \
base_dir + '/ontology/' + product_type + 'Types.json'
product_types = load_json(product_types_filename)
if not product_types:
print('Unable to load ontology: ' + product_types_filename)
2021-07-24 14:38:43 +00:00
return None
2022-01-03 16:58:50 +00:00
if not product_types.get('@graph'):
2021-08-06 16:52:12 +00:00
print('No @graph list within ontology')
2021-07-24 14:38:43 +00:00
return None
2022-01-03 16:58:50 +00:00
if len(product_types['@graph']) == 0:
2021-08-06 16:52:12 +00:00
print('@graph list has no contents')
2021-07-24 14:38:43 +00:00
return None
2022-01-03 16:58:50 +00:00
if not product_types['@graph'][0].get('rdfs:label'):
2021-08-06 16:52:12 +00:00
print('@graph list entry has no rdfs:label')
2021-07-24 14:38:43 +00:00
return None
2022-01-03 16:58:50 +00:00
language_exists = False
for label in product_types['@graph'][0]['rdfs:label']:
2021-07-24 14:38:43 +00:00
if not label.get('@language'):
continue
2021-12-25 23:03:28 +00:00
if label['@language'] == system_language:
2022-01-03 16:58:50 +00:00
language_exists = True
2021-07-24 14:38:43 +00:00
break
2022-01-03 16:58:50 +00:00
if not language_exists:
print('product_types ontology does not contain the language ' +
2021-12-25 23:03:28 +00:00
system_language)
2021-07-24 14:38:43 +00:00
return None
2022-01-03 16:58:50 +00:00
dfc_ids = {}
for item in product_types['@graph']:
2021-07-24 14:38:43 +00:00
if not item.get('@id'):
continue
if not item.get('rdfs:label'):
continue
for label in item['rdfs:label']:
if not label.get('@language'):
continue
if not label.get('@value'):
continue
2021-12-25 23:03:28 +00:00
if label['@language'] == system_language:
2022-01-03 16:58:50 +00:00
item_id = \
2021-09-12 17:10:15 +00:00
item['@id'].replace('http://static.datafoodconsortium.org',
2021-12-26 10:00:46 +00:00
http_prefix + '://' + domain_full)
2022-01-03 16:58:50 +00:00
dfc_ids[label['@value'].lower()] = item_id
2021-07-24 14:38:43 +00:00
break
2022-01-03 16:58:50 +00:00
return dfc_ids
2021-07-24 14:38:43 +00:00
2022-01-03 16:58:50 +00:00
def _get_valid_shared_item_id(actor: str, display_name: str) -> str:
2019-11-02 10:24:25 +00:00
"""Removes any invalid characters from the display name to
produce an item ID
"""
2022-01-03 16:58:50 +00:00
remove_chars = (' ', '\n', '\r', '#')
for char in remove_chars:
display_name = display_name.replace(char, '')
remove_chars2 = ('+', '/', '\\', '?', '&')
for char in remove_chars2:
display_name = display_name.replace(char, '-')
display_name = display_name.replace('.', '_')
display_name = display_name.replace("", "'")
2021-07-27 20:14:13 +00:00
actor = actor.replace('://', '___')
actor = actor.replace('/', '--')
2022-01-03 16:58:50 +00:00
return actor + '--shareditems--' + display_name
2019-11-02 10:24:25 +00:00
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def remove_shared_item(base_dir: str, nickname: str, domain: str,
2022-01-03 16:58:50 +00:00
item_id: str,
2021-12-29 21:55:09 +00:00
http_prefix: str, domain_full: str,
2022-01-03 16:58:50 +00:00
shares_file_type: str) -> None:
2019-07-23 12:33:09 +00:00
"""Removes a share for a person
"""
2022-01-03 16:58:50 +00:00
shares_filename = \
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
if not os.path.isfile(shares_filename):
print('ERROR: remove shared item, missing ' +
2022-01-03 16:58:50 +00:00
shares_file_type + '.json ' + shares_filename)
2019-11-03 10:04:28 +00:00
return
2022-01-03 16:58:50 +00:00
shares_json = load_json(shares_filename)
if not shares_json:
print('ERROR: remove shared item, ' +
2022-01-03 16:58:50 +00:00
shares_file_type + '.json could not be loaded from ' +
shares_filename)
2019-11-03 10:04:28 +00:00
return
2019-07-23 12:33:09 +00:00
2022-01-03 16:58:50 +00:00
if shares_json.get(item_id):
2019-07-23 12:33:09 +00:00
# remove any image for the item
2022-01-03 16:58:50 +00:00
item_idfile = base_dir + '/sharefiles/' + nickname + '/' + item_id
if shares_json[item_id]['imageUrl']:
2021-12-26 14:26:16 +00:00
formats = get_image_extensions()
2020-09-22 15:59:47 +00:00
for ext in formats:
2022-01-03 16:58:50 +00:00
if shares_json[item_id]['imageUrl'].endswith('.' + ext):
if os.path.isfile(item_idfile + '.' + ext):
try:
2022-01-03 16:58:50 +00:00
os.remove(item_idfile + '.' + ext)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: remove_shared_item unable to delete ' +
2022-01-03 16:58:50 +00:00
item_idfile + '.' + ext)
2019-07-23 12:33:09 +00:00
# remove the item itself
2022-01-03 16:58:50 +00:00
del shares_json[item_id]
save_json(shares_json, shares_filename)
2019-11-03 10:04:28 +00:00
else:
2022-01-03 16:58:50 +00:00
print('ERROR: share index "' + item_id +
'" does not exist in ' + shares_filename)
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def _add_share_duration_sec(duration: str, published: int) -> int:
2021-06-26 19:01:48 +00:00
"""Returns the duration for the shared item in seconds
"""
if ' ' not in duration:
return 0
2022-01-03 16:58:50 +00:00
duration_list = duration.split(' ')
if not duration_list[0].isdigit():
2021-06-26 19:01:48 +00:00
return 0
2022-01-03 16:58:50 +00:00
if 'hour' in duration_list[1]:
return published + (int(duration_list[0]) * 60 * 60)
if 'day' in duration_list[1]:
return published + (int(duration_list[0]) * 60 * 60 * 24)
if 'week' in duration_list[1]:
return published + (int(duration_list[0]) * 60 * 60 * 24 * 7)
if 'month' in duration_list[1]:
return published + (int(duration_list[0]) * 60 * 60 * 24 * 30)
if 'year' in duration_list[1]:
return published + (int(duration_list[0]) * 60 * 60 * 24 * 365)
2021-06-26 19:01:48 +00:00
return 0
2021-12-29 21:55:09 +00:00
def _dfc_product_type_from_category(base_dir: str,
2022-01-03 16:58:50 +00:00
item_category: str, translate: {}) -> str:
"""Does the shared item category match a DFC product type?
If so then return the product type.
This will be used to select an appropriate ontology file
such as ontology/foodTypes.json
"""
2022-01-03 16:58:50 +00:00
product_types_list = get_category_types(base_dir)
category_lower = item_category.lower()
for product_type in product_types_list:
if translate.get(product_type):
if translate[product_type] in category_lower:
return product_type
2021-08-04 12:44:24 +00:00
else:
2022-01-03 16:58:50 +00:00
if product_type in category_lower:
return product_type
return None
2021-12-29 21:55:09 +00:00
def _getshare_dfc_id(base_dir: str, system_language: str,
2022-01-03 16:58:50 +00:00
item_type: str, item_category: str,
2021-12-29 21:55:09 +00:00
translate: {},
http_prefix: str, domain_full: str,
2022-01-03 16:58:50 +00:00
dfc_ids: {} = None) -> str:
"""Attempts to obtain a DFC Id for the shared item,
2022-01-03 16:58:50 +00:00
based upon product_types ontology.
2021-07-24 14:38:43 +00:00
See https://github.com/datafoodconsortium/ontology
"""
# does the category field match any prodyct type ontology
# files in the ontology subdirectory?
2022-01-03 16:58:50 +00:00
matched_product_type = \
_dfc_product_type_from_category(base_dir, item_category, translate)
if not matched_product_type:
item_type = item_type.replace(' ', '_')
item_type = item_type.replace('.', '')
return 'epicyon#' + item_type
if not dfc_ids:
dfc_ids = _load_dfc_ids(base_dir, system_language,
matched_product_type,
http_prefix, domain_full)
if not dfc_ids:
return ''
2022-01-03 16:58:50 +00:00
item_type_lower = item_type.lower()
match_name = ''
match_id = ''
for name, uri in dfc_ids.items():
if name not in item_type_lower:
2021-07-24 14:38:43 +00:00
continue
2022-01-03 16:58:50 +00:00
if len(name) > len(match_name):
match_name = name
match_id = uri
if not match_id:
2021-07-24 14:38:43 +00:00
# bag of words match
2022-01-03 16:58:50 +00:00
max_matched_words = 0
for name, uri in dfc_ids.items():
2021-08-06 16:52:12 +00:00
name = name.replace('-', ' ')
2021-07-24 14:38:43 +00:00
words = name.split(' ')
score = 0
for wrd in words:
2022-01-03 16:58:50 +00:00
if wrd in item_type_lower:
2021-07-24 14:38:43 +00:00
score += 1
2022-01-03 16:58:50 +00:00
if score > max_matched_words:
max_matched_words = score
match_id = uri
return match_id
2021-07-24 14:38:43 +00:00
2022-01-03 16:58:50 +00:00
def _getshare_type_from_dfc_id(dfc_uri: str, dfc_ids: {}) -> str:
"""Attempts to obtain a share item type from its DFC Id,
2022-01-03 16:58:50 +00:00
based upon product_types ontology.
See https://github.com/datafoodconsortium/ontology
"""
2022-01-03 16:58:50 +00:00
if dfc_uri.startswith('epicyon#'):
item_type = dfc_uri.split('#')[1]
item_type = item_type.replace('_', ' ')
return item_type
2021-07-27 21:59:49 +00:00
2022-01-03 16:58:50 +00:00
for name, uri in dfc_ids.items():
if uri.endswith('#' + dfc_uri):
return name
2022-01-03 16:58:50 +00:00
if uri == dfc_uri:
return name
return None
2021-12-29 21:55:09 +00:00
def _indicate_new_share_available(base_dir: str, http_prefix: str,
nickname: str, domain: str,
domain_full: str,
2022-01-03 16:58:50 +00:00
shares_file_type: str) -> None:
2021-07-25 09:33:59 +00:00
"""Indicate to each account that a new share is available
"""
2022-01-03 16:58:50 +00:00
for _, dirs, _ in os.walk(base_dir + '/accounts'):
2021-07-25 09:33:59 +00:00
for handle in dirs:
2021-12-26 18:46:43 +00:00
if not is_account_dir(handle):
2021-07-25 09:33:59 +00:00
continue
2022-01-03 16:58:50 +00:00
account_dir = base_dir + '/accounts/' + handle
if shares_file_type == 'shares':
new_share_file = account_dir + '/.newShare'
2021-08-09 18:41:05 +00:00
else:
2022-01-03 16:58:50 +00:00
new_share_file = account_dir + '/.newWanted'
if os.path.isfile(new_share_file):
2021-07-25 09:33:59 +00:00
continue
2022-01-03 16:58:50 +00:00
account_nickname = handle.split('@')[0]
# does this account block you?
2022-01-03 16:58:50 +00:00
if account_nickname != nickname:
if is_blocked(base_dir, account_nickname, domain,
2021-12-29 21:55:09 +00:00
nickname, domain, None):
continue
2022-01-03 16:58:50 +00:00
local_actor = \
local_actor_url(http_prefix, account_nickname, domain_full)
2021-07-25 09:33:59 +00:00
try:
2022-01-03 16:58:50 +00:00
with open(new_share_file, 'w+') as fp_new:
if shares_file_type == 'shares':
fp_new.write(local_actor + '/tlshares')
2021-08-09 18:41:05 +00:00
else:
2022-01-03 16:58:50 +00:00
fp_new.write(local_actor + '/tlwanted')
2021-11-25 22:22:54 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _indicate_new_share_available unable to write ' +
2022-01-03 16:58:50 +00:00
str(new_share_file))
2021-07-25 09:33:59 +00:00
break
2021-12-29 21:55:09 +00:00
def add_share(base_dir: str,
http_prefix: str, nickname: str, domain: str, port: int,
2022-01-03 16:58:50 +00:00
display_name: str, summary: str, image_filename: str,
item_qty: float, item_type: str, item_category: str,
location: str, duration: str, debug: bool, city: str,
2021-12-29 21:55:09 +00:00
price: str, currency: str,
system_language: str, translate: {},
2022-01-03 16:58:50 +00:00
shares_file_type: str, low_bandwidth: bool,
2021-12-29 21:55:09 +00:00
content_license_url: str) -> None:
2020-09-22 15:55:21 +00:00
"""Adds a new share
2019-07-23 12:33:09 +00:00
"""
2021-12-29 21:55:09 +00:00
if is_filtered_globally(base_dir,
2022-01-03 16:58:50 +00:00
display_name + ' ' + summary + ' ' +
item_type + ' ' + item_category):
2021-07-28 21:12:39 +00:00
print('Shared item was filtered due to content')
return
2022-01-03 16:58:50 +00:00
shares_filename = \
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
shares_json = {}
if os.path.isfile(shares_filename):
shares_json = load_json(shares_filename, 1, 2)
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
duration = duration.lower()
published = int(time.time())
2022-01-03 16:58:50 +00:00
duration_sec = _add_share_duration_sec(duration, published)
2019-07-23 12:33:09 +00:00
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:19:59 +00:00
actor = local_actor_url(http_prefix, nickname, domain_full)
2022-01-03 16:58:50 +00:00
item_id = _get_valid_shared_item_id(actor, display_name)
dfc_id = _getshare_dfc_id(base_dir, system_language,
item_type, item_category, translate,
http_prefix, domain_full)
2019-07-23 12:33:09 +00:00
2019-07-23 19:02:26 +00:00
# has an image for this share been uploaded?
2022-01-03 16:58:50 +00:00
image_url = None
move_image = False
2021-12-26 14:42:21 +00:00
if not image_filename:
2022-01-03 16:58:50 +00:00
shares_image_filename = \
2021-12-26 12:02:29 +00:00
acct_dir(base_dir, nickname, domain) + '/upload'
2021-12-26 14:26:16 +00:00
formats = get_image_extensions()
2020-09-22 15:59:47 +00:00
for ext in formats:
2022-01-03 16:58:50 +00:00
if os.path.isfile(shares_image_filename + '.' + ext):
image_filename = shares_image_filename + '.' + ext
move_image = True
2020-04-04 11:27:51 +00:00
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2019-07-23 19:02:26 +00:00
# copy or move the image for the shared item to its destination
2021-12-26 14:42:21 +00:00
if image_filename:
if os.path.isfile(image_filename):
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir + '/sharefiles'):
os.mkdir(base_dir + '/sharefiles')
if not os.path.isdir(base_dir + '/sharefiles/' + nickname):
os.mkdir(base_dir + '/sharefiles/' + nickname)
2022-01-03 16:58:50 +00:00
item_idfile = base_dir + '/sharefiles/' + nickname + '/' + item_id
2021-12-26 14:26:16 +00:00
formats = get_image_extensions()
2020-09-22 15:59:47 +00:00
for ext in formats:
2021-12-26 14:42:21 +00:00
if not image_filename.endswith('.' + ext):
2021-07-04 18:01:31 +00:00
continue
2021-12-25 18:20:56 +00:00
if low_bandwidth:
2021-12-28 21:36:27 +00:00
convert_image_to_low_bandwidth(image_filename)
process_meta_data(base_dir, nickname, domain,
2022-01-03 16:58:50 +00:00
image_filename, item_idfile + '.' + ext,
2021-12-28 21:36:27 +00:00
city, content_license_url)
2022-01-03 16:58:50 +00:00
if move_image:
try:
2021-12-26 14:42:21 +00:00
os.remove(image_filename)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: add_share unable to delete ' +
2021-12-26 14:42:21 +00:00
str(image_filename))
2022-01-03 16:58:50 +00:00
image_url = \
2021-12-26 10:00:46 +00:00
http_prefix + '://' + domain_full + \
2022-01-03 16:58:50 +00:00
'/sharefiles/' + nickname + '/' + item_id + '.' + ext
2019-07-23 12:33:09 +00:00
2022-01-03 16:58:50 +00:00
shares_json[item_id] = {
"displayName": display_name,
2019-07-23 12:33:09 +00:00
"summary": summary,
2022-01-03 16:58:50 +00:00
"imageUrl": image_url,
"itemQty": float(item_qty),
"dfcId": dfc_id,
"itemType": item_type,
"category": item_category,
2019-07-23 12:33:09 +00:00
"location": location,
"published": published,
2022-01-03 16:58:50 +00:00
"expire": duration_sec,
2021-07-27 20:26:10 +00:00
"itemPrice": price,
"itemCurrency": currency
2019-07-23 12:33:09 +00:00
}
2022-01-03 16:58:50 +00:00
save_json(shares_json, shares_filename)
2019-07-23 12:33:09 +00:00
2021-12-29 21:55:09 +00:00
_indicate_new_share_available(base_dir, http_prefix,
nickname, domain, domain_full,
2022-01-03 16:58:50 +00:00
shares_file_type)
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def expire_shares(base_dir: str) -> None:
2019-10-17 09:58:30 +00:00
"""Removes expired items from shares
"""
2022-01-03 16:58:50 +00:00
for _, dirs, _ in os.walk(base_dir + '/accounts'):
2019-10-17 09:58:30 +00:00
for account in dirs:
2021-12-26 18:46:43 +00:00
if not is_account_dir(account):
2019-10-17 09:58:30 +00:00
continue
2020-04-04 11:27:51 +00:00
nickname = account.split('@')[0]
domain = account.split('@')[1]
2022-01-03 16:58:50 +00:00
for shares_file_type in get_shares_files_list():
2021-12-29 21:55:09 +00:00
_expire_shares_for_account(base_dir, nickname, domain,
2022-01-03 16:58:50 +00:00
shares_file_type)
2020-12-13 22:13:45 +00:00
break
2020-04-04 11:27:51 +00:00
2019-10-17 09:58:30 +00:00
2021-12-29 21:55:09 +00:00
def _expire_shares_for_account(base_dir: str, nickname: str, domain: str,
2022-01-03 16:58:50 +00:00
shares_file_type: str) -> None:
2020-09-22 16:03:31 +00:00
"""Removes expired items from shares for a particular account
2019-07-23 12:33:09 +00:00
"""
2022-01-03 16:58:50 +00:00
handle_domain = remove_domain_port(domain)
handle = nickname + '@' + handle_domain
shares_filename = \
base_dir + '/accounts/' + handle + '/' + shares_file_type + '.json'
if not os.path.isfile(shares_filename):
2021-07-24 22:08:11 +00:00
return
2022-01-03 16:58:50 +00:00
shares_json = load_json(shares_filename, 1, 2)
if not shares_json:
2021-07-24 22:08:11 +00:00
return
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
2022-01-03 16:58:50 +00:00
delete_item_id = []
for item_id, item in shares_json.items():
2021-12-26 13:17:46 +00:00
if curr_time > item['expire']:
2022-01-03 16:58:50 +00:00
delete_item_id.append(item_id)
if not delete_item_id:
2021-07-24 22:08:11 +00:00
return
2022-01-03 16:58:50 +00:00
for item_id in delete_item_id:
del shares_json[item_id]
2021-07-24 22:08:11 +00:00
# remove any associated images
2022-01-03 16:58:50 +00:00
item_idfile = base_dir + '/sharefiles/' + nickname + '/' + item_id
2021-12-26 14:26:16 +00:00
formats = get_image_extensions()
2021-07-24 22:08:11 +00:00
for ext in formats:
2022-01-03 16:58:50 +00:00
if os.path.isfile(item_idfile + '.' + ext):
try:
2022-01-03 16:58:50 +00:00
os.remove(item_idfile + '.' + ext)
2021-11-25 18:42:38 +00:00
except OSError:
2021-12-29 21:55:09 +00:00
print('EX: _expire_shares_for_account unable to delete ' +
2022-01-03 16:58:50 +00:00
item_idfile + '.' + ext)
save_json(shares_json, shares_filename)
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def get_shares_feed_for_person(base_dir: str,
domain: str, port: int,
path: str, http_prefix: str,
2022-01-03 16:58:50 +00:00
shares_file_type: str,
2021-12-29 21:55:09 +00:00
shares_per_page: int) -> {}:
2019-07-23 12:33:09 +00:00
"""Returns the shares for an account from GET requests
"""
2022-01-03 16:58:50 +00:00
if '/' + shares_file_type not in path:
2019-07-23 12:33:09 +00:00
return None
# handle page numbers
2022-01-03 16:58:50 +00:00
header_only = True
page_number = None
2019-07-23 12:33:09 +00:00
if '?page=' in path:
2022-01-03 16:58:50 +00:00
page_number = path.split('?page=')[1]
if page_number == 'true':
page_number = 1
2019-07-23 12:33:09 +00:00
else:
try:
2022-01-03 16:58:50 +00:00
page_number = int(page_number)
2020-04-04 11:27:51 +00:00
except BaseException:
2021-12-29 21:55:09 +00:00
print('EX: get_shares_feed_for_person ' +
2022-01-03 16:58:50 +00:00
'unable to convert to int ' + str(page_number))
2020-04-04 11:27:51 +00:00
path = path.split('?page=')[0]
2022-01-03 16:58:50 +00:00
header_only = False
2020-03-22 21:16:02 +00:00
2022-01-03 16:58:50 +00:00
if not path.endswith('/' + shares_file_type):
2019-07-23 12:33:09 +00:00
return None
2020-04-04 11:27:51 +00:00
nickname = None
2019-07-23 12:33:09 +00:00
if path.startswith('/users/'):
nickname = \
2022-01-03 16:58:50 +00:00
path.replace('/users/', '', 1).replace('/' + shares_file_type, '')
2019-07-23 12:33:09 +00:00
if path.startswith('/@'):
nickname = \
2022-01-03 16:58:50 +00:00
path.replace('/@', '', 1).replace('/' + shares_file_type, '')
2019-07-23 12:33:09 +00:00
if not nickname:
return None
2021-12-28 14:41:10 +00:00
if not valid_nickname(domain, nickname):
2019-07-23 12:33:09 +00:00
return None
2019-07-24 09:53:07 +00:00
2021-12-26 12:45:03 +00:00
domain = get_full_domain(domain, port)
2019-07-23 12:33:09 +00:00
2022-01-03 16:58:50 +00:00
handle_domain = remove_domain_port(domain)
shares_filename = \
acct_dir(base_dir, nickname, handle_domain) + '/' + \
shares_file_type + '.json'
if header_only:
no_of_shares = 0
if os.path.isfile(shares_filename):
shares_json = load_json(shares_filename)
if shares_json:
no_of_shares = len(shares_json.items())
id_str = local_actor_url(http_prefix, nickname, domain)
2020-04-04 11:27:51 +00:00
shares = {
2019-07-23 12:33:09 +00:00
'@context': 'https://www.w3.org/ns/activitystreams',
2022-01-03 16:58:50 +00:00
'first': id_str + '/' + shares_file_type + '?page=1',
'id': id_str + '/' + shares_file_type,
'totalItems': str(no_of_shares),
2020-03-22 20:36:19 +00:00
'type': 'OrderedCollection'
}
2019-07-23 12:33:09 +00:00
return shares
2022-01-03 16:58:50 +00:00
if not page_number:
page_number = 1
2019-07-23 12:33:09 +00:00
2022-01-03 16:58:50 +00:00
next_page_number = int(page_number + 1)
id_str = local_actor_url(http_prefix, nickname, domain)
2020-04-04 11:27:51 +00:00
shares = {
2019-07-23 12:33:09 +00:00
'@context': 'https://www.w3.org/ns/activitystreams',
2022-01-03 16:58:50 +00:00
'id': id_str + '/' + shares_file_type + '?page=' + str(page_number),
2019-07-23 12:33:09 +00:00
'orderedItems': [],
2022-01-03 16:58:50 +00:00
'partOf': id_str + '/' + shares_file_type,
2019-07-23 12:33:09 +00:00
'totalItems': 0,
2020-03-22 20:36:19 +00:00
'type': 'OrderedCollectionPage'
}
2019-07-23 12:33:09 +00:00
2022-01-03 16:58:50 +00:00
if not os.path.isfile(shares_filename):
2019-07-23 12:33:09 +00:00
return shares
2022-01-03 16:58:50 +00:00
curr_page = 1
page_ctr = 0
total_ctr = 0
shares_json = load_json(shares_filename)
if shares_json:
for item_id, item in shares_json.items():
page_ctr += 1
total_ctr += 1
if curr_page == page_number:
item['shareId'] = item_id
2019-07-23 12:33:09 +00:00
shares['orderedItems'].append(item)
2022-01-03 16:58:50 +00:00
if page_ctr >= shares_per_page:
page_ctr = 0
curr_page += 1
shares['totalItems'] = total_ctr
last_page = int(total_ctr / shares_per_page)
if last_page < 1:
last_page = 1
if next_page_number > last_page:
2020-04-04 11:27:51 +00:00
shares['next'] = \
2021-12-26 10:19:59 +00:00
local_actor_url(http_prefix, nickname, domain) + \
2022-01-03 16:58:50 +00:00
'/' + shares_file_type + '?page=' + str(last_page)
2019-07-23 12:33:09 +00:00
return shares
2019-07-23 19:02:26 +00:00
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def send_share_via_server(base_dir, session,
2022-01-03 16:58:50 +00:00
from_nickname: str, password: str,
from_domain: str, fromPort: int,
http_prefix: str, display_name: str,
2021-12-29 21:55:09 +00:00
summary: str, image_filename: str,
2022-01-03 16:58:50 +00:00
item_qty: float, item_type: str, item_category: str,
2021-12-29 21:55:09 +00:00
location: str, duration: str,
cached_webfingers: {}, person_cache: {},
debug: bool, project_version: str,
2022-01-03 16:58:50 +00:00
itemPrice: str, item_currency: str,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem: str) -> {}:
2019-07-23 19:02:26 +00:00
"""Creates an item share via c2s
"""
if not session:
2021-12-29 21:55:09 +00:00
print('WARN: No session for send_share_via_server')
2019-07-23 19:02:26 +00:00
return 6
# convert $4.23 to 4.23 USD
2022-01-03 16:58:50 +00:00
new_item_price, new_item_currency = get_price_from_string(itemPrice)
if new_item_price != itemPrice:
itemPrice = new_item_price
if not item_currency:
if new_item_currency != item_currency:
item_currency = new_item_currency
2022-01-03 16:58:50 +00:00
from_domain_full = get_full_domain(from_domain, fromPort)
2019-07-23 19:02:26 +00:00
2022-01-03 16:58:50 +00:00
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
to_url = 'https://www.w3.org/ns/activitystreams#Public'
cc_url = actor + '/followers'
2019-07-23 19:02:26 +00:00
2022-01-03 16:58:50 +00:00
new_share_json = {
2019-08-18 11:07:06 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
2019-07-23 19:02:26 +00:00
'type': 'Add',
2020-04-04 11:27:51 +00:00
'actor': actor,
2021-06-22 12:29:17 +00:00
'target': actor + '/shares',
2019-07-23 19:02:26 +00:00
'object': {
"type": "Offer",
2022-01-03 16:58:50 +00:00
"displayName": display_name,
2019-07-23 19:02:26 +00:00
"summary": summary,
2022-01-03 16:58:50 +00:00
"itemQty": float(item_qty),
"itemType": item_type,
"category": item_category,
2019-07-23 19:02:26 +00:00
"location": location,
"duration": duration,
2021-07-24 22:08:11 +00:00
"itemPrice": itemPrice,
2022-01-03 16:58:50 +00:00
"itemCurrency": item_currency,
'to': [to_url],
'cc': [cc_url]
2019-07-23 19:02:26 +00:00
},
2022-01-03 16:58:50 +00:00
'to': [to_url],
'cc': [cc_url]
2019-07-23 19:02:26 +00:00
}
2022-01-03 16:58:50 +00:00
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
2019-07-23 19:02:26 +00:00
# lookup the inbox for the To handle
2022-01-02 14:51:02 +00:00
wf_request = \
2021-12-29 21:55:09 +00:00
webfinger_handle(session, handle, http_prefix,
cached_webfingers,
2022-01-03 16:58:50 +00:00
from_domain, project_version, debug, False,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem)
2022-01-02 14:51:02 +00:00
if not wf_request:
2019-07-23 19:02:26 +00:00
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: share webfinger failed for ' + handle)
2019-07-23 19:02:26 +00:00
return 1
2022-01-02 14:51:02 +00:00
if not isinstance(wf_request, dict):
2021-03-18 10:01:01 +00:00
print('WARN: share webfinger for ' + handle +
2022-01-02 14:51:02 +00:00
' did not return a dict. ' + str(wf_request))
2020-06-23 10:41:12 +00:00
return 1
2019-07-23 19:02:26 +00:00
2022-01-03 16:58:50 +00:00
post_to_box = 'outbox'
2019-07-23 19:02:26 +00:00
# get the actor inbox for the To handle
2022-01-03 16:58:50 +00:00
origin_domain = from_domain
(inbox_url, _, _, from_person_id, _, _,
display_name, _) = get_person_box(signing_priv_key_pem,
origin_domain,
base_dir, session, wf_request,
person_cache, project_version,
http_prefix, from_nickname,
from_domain, post_to_box,
83653)
if not inbox_url:
2019-07-23 19:02:26 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: share no ' + post_to_box +
2021-03-18 10:01:01 +00:00
' was found for ' + handle)
2019-07-23 19:02:26 +00:00
return 3
2022-01-03 16:58:50 +00:00
if not from_person_id:
2019-07-23 19:02:26 +00:00
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: share no actor was found for ' + handle)
2019-07-23 19:02:26 +00:00
return 4
2020-03-22 21:16:02 +00:00
2022-01-03 16:58:50 +00:00
auth_header = create_basic_auth_header(from_nickname, password)
2019-07-23 19:02:26 +00:00
2021-12-26 14:42:21 +00:00
if image_filename:
2020-04-04 11:27:51 +00:00
headers = {
2022-01-03 16:58:50 +00:00
'host': from_domain,
'Authorization': auth_header
2020-03-22 20:36:19 +00:00
}
2022-02-28 11:55:36 +00:00
inbox_url_str = inbox_url.replace('/' + post_to_box, '/shares')
2022-01-03 16:58:50 +00:00
post_result = \
2022-02-28 11:55:36 +00:00
post_image(session, image_filename, [], inbox_url_str,
headers, http_prefix, from_domain_full)
2020-04-04 11:27:51 +00:00
headers = {
2022-01-03 16:58:50 +00:00
'host': from_domain,
2020-04-04 11:27:51 +00:00
'Content-type': 'application/json',
2022-01-03 16:58:50 +00:00
'Authorization': auth_header
2020-03-22 20:36:19 +00:00
}
2022-01-03 16:58:50 +00:00
post_result = \
post_json(http_prefix, from_domain_full,
session, new_share_json, [], inbox_url, headers, 30, True)
if not post_result:
2020-04-04 11:27:51 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: POST share failed for c2s to ' + inbox_url)
2020-04-04 11:27:51 +00:00
# return 5
2019-07-23 19:02:26 +00:00
if debug:
2019-07-23 21:14:16 +00:00
print('DEBUG: c2s POST share item success')
2019-07-23 19:02:26 +00:00
2022-01-03 16:58:50 +00:00
return new_share_json
2019-07-23 20:00:17 +00:00
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def send_undo_share_via_server(base_dir: str, session,
2022-01-03 16:58:50 +00:00
from_nickname: str, password: str,
from_domain: str, fromPort: int,
http_prefix: str, display_name: str,
2021-12-29 21:55:09 +00:00
cached_webfingers: {}, person_cache: {},
debug: bool, project_version: str,
signing_priv_key_pem: str) -> {}:
2019-07-23 21:14:16 +00:00
"""Undoes a share via c2s
"""
if not session:
2021-12-29 21:55:09 +00:00
print('WARN: No session for send_undo_share_via_server')
2019-07-23 21:14:16 +00:00
return 6
2022-01-03 16:58:50 +00:00
from_domain_full = get_full_domain(from_domain, fromPort)
2019-07-23 21:14:16 +00:00
2022-01-03 16:58:50 +00:00
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
to_url = 'https://www.w3.org/ns/activitystreams#Public'
cc_url = actor + '/followers'
2019-07-23 21:14:16 +00:00
2022-01-03 16:58:50 +00:00
undo_share_json = {
2019-08-18 11:07:06 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
2019-07-23 21:14:16 +00:00
'type': 'Remove',
2020-04-04 11:27:51 +00:00
'actor': actor,
'target': actor + '/shares',
2019-07-23 21:14:16 +00:00
'object': {
"type": "Offer",
2022-01-03 16:58:50 +00:00
"displayName": display_name,
'to': [to_url],
'cc': [cc_url]
2019-07-23 21:14:16 +00:00
},
2022-01-03 16:58:50 +00:00
'to': [to_url],
'cc': [cc_url]
2019-07-23 21:14:16 +00:00
}
2022-01-03 16:58:50 +00:00
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
2019-07-23 21:14:16 +00:00
# lookup the inbox for the To handle
2022-01-02 14:51:02 +00:00
wf_request = \
2021-12-29 21:55:09 +00:00
webfinger_handle(session, handle, http_prefix, cached_webfingers,
2022-01-03 16:58:50 +00:00
from_domain, project_version, debug, False,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem)
2022-01-02 14:51:02 +00:00
if not wf_request:
2019-07-23 21:14:16 +00:00
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: unshare webfinger failed for ' + handle)
2019-07-23 21:14:16 +00:00
return 1
2022-01-02 14:51:02 +00:00
if not isinstance(wf_request, dict):
2021-03-18 10:01:01 +00:00
print('WARN: unshare webfinger for ' + handle +
2022-01-02 14:51:02 +00:00
' did not return a dict. ' + str(wf_request))
2020-06-23 10:41:12 +00:00
return 1
2019-07-23 21:14:16 +00:00
2022-01-03 16:58:50 +00:00
post_to_box = 'outbox'
2019-07-23 21:14:16 +00:00
# get the actor inbox for the To handle
2022-01-03 16:58:50 +00:00
origin_domain = from_domain
(inbox_url, _, _, from_person_id, _, _,
display_name, _) = get_person_box(signing_priv_key_pem,
origin_domain,
base_dir, session, wf_request,
person_cache, project_version,
http_prefix, from_nickname,
from_domain, post_to_box,
12663)
if not inbox_url:
2019-07-23 21:14:16 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: unshare no ' + post_to_box +
2021-03-18 10:01:01 +00:00
' was found for ' + handle)
2019-07-23 21:14:16 +00:00
return 3
2022-01-03 16:58:50 +00:00
if not from_person_id:
2019-07-23 21:14:16 +00:00
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: unshare no actor was found for ' + handle)
2019-07-23 21:14:16 +00:00
return 4
2020-03-22 21:16:02 +00:00
2022-01-03 16:58:50 +00:00
auth_header = create_basic_auth_header(from_nickname, password)
2020-03-22 21:16:02 +00:00
2020-04-04 11:27:51 +00:00
headers = {
2022-01-03 16:58:50 +00:00
'host': from_domain,
2020-04-04 11:27:51 +00:00
'Content-type': 'application/json',
2022-01-03 16:58:50 +00:00
'Authorization': auth_header
2020-03-22 20:36:19 +00:00
}
2022-01-03 16:58:50 +00:00
post_result = \
post_json(http_prefix, from_domain_full,
session, undo_share_json, [], inbox_url,
2021-12-29 21:55:09 +00:00
headers, 30, True)
2022-01-03 16:58:50 +00:00
if not post_result:
2020-04-04 11:27:51 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: POST unshare failed for c2s to ' + inbox_url)
2020-04-04 11:27:51 +00:00
# return 5
2019-07-23 21:14:16 +00:00
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: c2s POST unshare success')
2019-07-23 21:14:16 +00:00
2022-01-03 16:58:50 +00:00
return undo_share_json
2019-07-23 21:14:16 +00:00
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def send_wanted_via_server(base_dir, session,
2022-01-03 16:58:50 +00:00
from_nickname: str, password: str,
from_domain: str, fromPort: int,
http_prefix: str, display_name: str,
2021-12-29 21:55:09 +00:00
summary: str, image_filename: str,
2022-01-03 16:58:50 +00:00
item_qty: float, item_type: str, item_category: str,
2021-12-29 21:55:09 +00:00
location: str, duration: str,
cached_webfingers: {}, person_cache: {},
debug: bool, project_version: str,
2022-01-03 16:58:50 +00:00
itemMaxPrice: str, item_currency: str,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem: str) -> {}:
2021-08-09 19:37:18 +00:00
"""Creates a wanted item via c2s
"""
if not session:
2021-12-29 21:55:09 +00:00
print('WARN: No session for send_wanted_via_server')
2021-08-09 19:37:18 +00:00
return 6
# convert $4.23 to 4.23 USD
2022-01-03 16:58:50 +00:00
new_item_max_price, new_item_currency = get_price_from_string(itemMaxPrice)
if new_item_max_price != itemMaxPrice:
itemMaxPrice = new_item_max_price
if not item_currency:
if new_item_currency != item_currency:
item_currency = new_item_currency
2021-08-09 19:37:18 +00:00
2022-01-03 16:58:50 +00:00
from_domain_full = get_full_domain(from_domain, fromPort)
2021-08-09 19:37:18 +00:00
2022-01-03 16:58:50 +00:00
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
to_url = 'https://www.w3.org/ns/activitystreams#Public'
cc_url = actor + '/followers'
2021-08-09 19:37:18 +00:00
2022-01-03 16:58:50 +00:00
new_share_json = {
2021-08-09 19:37:18 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Add',
'actor': actor,
'target': actor + '/wanted',
'object': {
"type": "Offer",
2022-01-03 16:58:50 +00:00
"displayName": display_name,
2021-08-09 19:37:18 +00:00
"summary": summary,
2022-01-03 16:58:50 +00:00
"itemQty": float(item_qty),
"itemType": item_type,
"category": item_category,
2021-08-09 19:37:18 +00:00
"location": location,
"duration": duration,
"itemPrice": itemMaxPrice,
2022-01-03 16:58:50 +00:00
"itemCurrency": item_currency,
'to': [to_url],
'cc': [cc_url]
2021-08-09 19:37:18 +00:00
},
2022-01-03 16:58:50 +00:00
'to': [to_url],
'cc': [cc_url]
2021-08-09 19:37:18 +00:00
}
2022-01-03 16:58:50 +00:00
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
2021-08-09 19:37:18 +00:00
# lookup the inbox for the To handle
2022-01-02 14:51:02 +00:00
wf_request = \
2021-12-29 21:55:09 +00:00
webfinger_handle(session, handle, http_prefix,
cached_webfingers,
2022-01-03 16:58:50 +00:00
from_domain, project_version, debug, False,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem)
2022-01-02 14:51:02 +00:00
if not wf_request:
2021-08-09 19:37:18 +00:00
if debug:
print('DEBUG: share webfinger failed for ' + handle)
return 1
2022-01-02 14:51:02 +00:00
if not isinstance(wf_request, dict):
2021-08-09 19:37:18 +00:00
print('WARN: wanted webfinger for ' + handle +
2022-01-02 14:51:02 +00:00
' did not return a dict. ' + str(wf_request))
2021-08-09 19:37:18 +00:00
return 1
2022-01-03 16:58:50 +00:00
post_to_box = 'outbox'
2021-08-09 19:37:18 +00:00
# get the actor inbox for the To handle
2022-01-03 16:58:50 +00:00
origin_domain = from_domain
(inbox_url, _, _, from_person_id, _, _,
display_name, _) = get_person_box(signing_priv_key_pem,
origin_domain,
base_dir, session, wf_request,
person_cache, project_version,
http_prefix, from_nickname,
from_domain, post_to_box,
23653)
if not inbox_url:
2021-08-09 19:37:18 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: wanted no ' + post_to_box +
2021-08-09 19:37:18 +00:00
' was found for ' + handle)
return 3
2022-01-03 16:58:50 +00:00
if not from_person_id:
2021-08-09 19:37:18 +00:00
if debug:
print('DEBUG: wanted no actor was found for ' + handle)
return 4
2022-01-03 16:58:50 +00:00
auth_header = create_basic_auth_header(from_nickname, password)
2021-08-09 19:37:18 +00:00
2021-12-26 14:42:21 +00:00
if image_filename:
2021-08-09 19:37:18 +00:00
headers = {
2022-01-03 16:58:50 +00:00
'host': from_domain,
'Authorization': auth_header
2021-08-09 19:37:18 +00:00
}
2022-02-28 11:55:36 +00:00
inbox_url_str = inbox_url.replace('/' + post_to_box, '/wanted')
2022-01-03 16:58:50 +00:00
post_result = \
2022-02-28 11:55:36 +00:00
post_image(session, image_filename, [], inbox_url_str,
headers, http_prefix, from_domain_full)
2021-08-09 19:37:18 +00:00
headers = {
2022-01-03 16:58:50 +00:00
'host': from_domain,
2021-08-09 19:37:18 +00:00
'Content-type': 'application/json',
2022-01-03 16:58:50 +00:00
'Authorization': auth_header
2021-08-09 19:37:18 +00:00
}
2022-01-03 16:58:50 +00:00
post_result = \
post_json(http_prefix, from_domain_full,
session, new_share_json, [], inbox_url, headers, 30, True)
if not post_result:
2021-08-09 19:37:18 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: POST wanted failed for c2s to ' + inbox_url)
2021-08-09 19:37:18 +00:00
# return 5
if debug:
print('DEBUG: c2s POST wanted item success')
2022-01-03 16:58:50 +00:00
return new_share_json
2021-08-09 19:37:18 +00:00
2021-12-29 21:55:09 +00:00
def send_undo_wanted_via_server(base_dir: str, session,
2022-01-03 16:58:50 +00:00
from_nickname: str, password: str,
from_domain: str, fromPort: int,
http_prefix: str, display_name: str,
2021-12-29 21:55:09 +00:00
cached_webfingers: {}, person_cache: {},
debug: bool, project_version: str,
signing_priv_key_pem: str) -> {}:
2021-08-09 19:37:18 +00:00
"""Undoes a wanted item via c2s
"""
if not session:
2021-12-29 21:55:09 +00:00
print('WARN: No session for send_undo_wanted_via_server')
2021-08-09 19:37:18 +00:00
return 6
2022-01-03 16:58:50 +00:00
from_domain_full = get_full_domain(from_domain, fromPort)
2021-08-09 19:37:18 +00:00
2022-01-03 16:58:50 +00:00
actor = local_actor_url(http_prefix, from_nickname, from_domain_full)
to_url = 'https://www.w3.org/ns/activitystreams#Public'
cc_url = actor + '/followers'
2021-08-09 19:37:18 +00:00
2022-01-03 16:58:50 +00:00
undo_share_json = {
2021-08-09 19:37:18 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
'type': 'Remove',
'actor': actor,
'target': actor + '/wanted',
'object': {
"type": "Offer",
2022-01-03 16:58:50 +00:00
"displayName": display_name,
'to': [to_url],
'cc': [cc_url]
2021-08-09 19:37:18 +00:00
},
2022-01-03 16:58:50 +00:00
'to': [to_url],
'cc': [cc_url]
2021-08-09 19:37:18 +00:00
}
2022-01-03 16:58:50 +00:00
handle = http_prefix + '://' + from_domain_full + '/@' + from_nickname
2021-08-09 19:37:18 +00:00
# lookup the inbox for the To handle
2022-01-02 14:51:02 +00:00
wf_request = \
2021-12-29 21:55:09 +00:00
webfinger_handle(session, handle, http_prefix, cached_webfingers,
2022-01-03 16:58:50 +00:00
from_domain, project_version, debug, False,
2021-12-29 21:55:09 +00:00
signing_priv_key_pem)
2022-01-02 14:51:02 +00:00
if not wf_request:
2021-08-09 19:37:18 +00:00
if debug:
print('DEBUG: unwant webfinger failed for ' + handle)
return 1
2022-01-02 14:51:02 +00:00
if not isinstance(wf_request, dict):
2021-08-09 19:37:18 +00:00
print('WARN: unwant webfinger for ' + handle +
2022-01-02 14:51:02 +00:00
' did not return a dict. ' + str(wf_request))
2021-08-09 19:37:18 +00:00
return 1
2022-01-03 16:58:50 +00:00
post_to_box = 'outbox'
2021-08-09 19:37:18 +00:00
# get the actor inbox for the To handle
2022-01-03 16:58:50 +00:00
origin_domain = from_domain
(inbox_url, _, _, from_person_id, _, _,
display_name, _) = get_person_box(signing_priv_key_pem,
origin_domain,
base_dir, session, wf_request,
person_cache, project_version,
http_prefix, from_nickname,
from_domain, post_to_box,
12693)
if not inbox_url:
2021-08-09 19:37:18 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: unwant no ' + post_to_box +
2021-08-09 19:37:18 +00:00
' was found for ' + handle)
return 3
2022-01-03 16:58:50 +00:00
if not from_person_id:
2021-08-09 19:37:18 +00:00
if debug:
print('DEBUG: unwant no actor was found for ' + handle)
return 4
2022-01-03 16:58:50 +00:00
auth_header = create_basic_auth_header(from_nickname, password)
2021-08-09 19:37:18 +00:00
headers = {
2022-01-03 16:58:50 +00:00
'host': from_domain,
2021-08-09 19:37:18 +00:00
'Content-type': 'application/json',
2022-01-03 16:58:50 +00:00
'Authorization': auth_header
2021-08-09 19:37:18 +00:00
}
2022-01-03 16:58:50 +00:00
post_result = \
post_json(http_prefix, from_domain_full,
session, undo_share_json, [], inbox_url,
2021-12-29 21:55:09 +00:00
headers, 30, True)
2022-01-03 16:58:50 +00:00
if not post_result:
2021-08-09 19:37:18 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('DEBUG: POST unwant failed for c2s to ' + inbox_url)
2021-08-09 19:37:18 +00:00
# return 5
if debug:
print('DEBUG: c2s POST unwant success')
2022-01-03 16:58:50 +00:00
return undo_share_json
2021-08-09 19:37:18 +00:00
2021-12-29 21:55:09 +00:00
def get_shared_items_catalog_via_server(base_dir, session,
nickname: str, password: str,
domain: str, port: int,
http_prefix: str, debug: bool,
signing_priv_key_pem: str) -> {}:
2021-08-04 12:04:35 +00:00
"""Returns the shared items catalog via c2s
"""
if not session:
2021-12-29 21:55:09 +00:00
print('WARN: No session for get_shared_items_catalog_via_server')
2021-08-04 12:04:35 +00:00
return 6
2022-01-03 16:58:50 +00:00
auth_header = create_basic_auth_header(nickname, password)
2021-08-04 12:04:35 +00:00
headers = {
'host': domain,
'Content-type': 'application/json',
2022-01-03 16:58:50 +00:00
'Authorization': auth_header,
2021-08-04 12:04:35 +00:00
'Accept': 'application/json'
}
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-26 10:19:59 +00:00
url = local_actor_url(http_prefix, nickname, domain_full) + '/catalog'
2021-08-04 12:04:35 +00:00
if debug:
print('Shared items catalog request to: ' + url)
2022-01-03 16:58:50 +00:00
catalog_json = get_json(signing_priv_key_pem, session, url, headers, None,
debug, __version__, http_prefix, None)
if not catalog_json:
2021-08-04 12:04:35 +00:00
if debug:
print('DEBUG: GET shared items catalog failed for c2s to ' + url)
# return 5
if debug:
print('DEBUG: c2s GET shared items catalog success')
2022-01-03 16:58:50 +00:00
return catalog_json
2021-08-04 12:04:35 +00:00
2021-12-29 21:55:09 +00:00
def outbox_share_upload(base_dir: str, http_prefix: str,
nickname: str, domain: str, port: int,
message_json: {}, debug: bool, city: str,
system_language: str, translate: {},
low_bandwidth: bool,
content_license_url: str) -> None:
2019-07-23 20:00:17 +00:00
""" When a shared item is received by the outbox from c2s
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2019-07-23 20:00:17 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['type'] == 'Add':
2019-07-23 20:00:17 +00:00
return
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2019-07-23 20:00:17 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['object']['type'] == 'Offer':
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: not an Offer activity')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('displayName'):
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: displayName missing from Offer')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('summary'):
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: summary missing from Offer')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('itemQty'):
2021-07-24 11:30:46 +00:00
if debug:
print('DEBUG: itemQty missing from Offer')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('itemType'):
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: itemType missing from Offer')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('category'):
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: category missing from Offer')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('duration'):
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: duration missing from Offer')
return
2022-01-03 16:58:50 +00:00
item_qty = float(message_json['object']['itemQty'])
2021-07-28 21:17:02 +00:00
location = ''
2021-12-25 23:51:19 +00:00
if message_json['object'].get('location'):
location = message_json['object']['location']
2021-12-26 14:42:21 +00:00
image_filename = None
if message_json['object'].get('image_filename'):
image_filename = message_json['object']['image_filename']
2021-08-04 12:04:35 +00:00
if debug:
print('Adding shared item')
2021-12-25 23:51:19 +00:00
pprint(message_json)
2021-12-29 21:55:09 +00:00
add_share(base_dir,
http_prefix, nickname, domain, port,
message_json['object']['displayName'],
message_json['object']['summary'],
image_filename,
2022-01-03 16:58:50 +00:00
item_qty,
2021-12-29 21:55:09 +00:00
message_json['object']['itemType'],
message_json['object']['category'],
location,
message_json['object']['duration'],
debug, city,
message_json['object']['itemPrice'],
message_json['object']['itemCurrency'],
system_language, translate, 'shares',
low_bandwidth, content_license_url)
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: shared item received via c2s')
2019-07-23 21:14:16 +00:00
2020-04-04 11:27:51 +00:00
2021-12-29 21:55:09 +00:00
def outbox_undo_share_upload(base_dir: str, http_prefix: str,
nickname: str, domain: str, port: int,
message_json: {}, debug: bool) -> None:
2019-07-23 21:14:16 +00:00
""" When a shared item is removed via c2s
"""
2021-12-25 23:51:19 +00:00
if not message_json.get('type'):
2019-07-23 21:14:16 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['type'] == 'Remove':
2019-07-23 21:14:16 +00:00
return
2021-12-26 17:12:07 +00:00
if not has_object_stringType(message_json, debug):
2019-07-23 21:14:16 +00:00
return
2021-12-25 23:51:19 +00:00
if not message_json['object']['type'] == 'Offer':
2019-07-23 21:14:16 +00:00
if debug:
print('DEBUG: not an Offer activity')
return
2021-12-25 23:51:19 +00:00
if not message_json['object'].get('displayName'):
2019-07-23 21:14:16 +00:00
if debug:
print('DEBUG: displayName missing from Offer')
return
2021-12-26 12:45:03 +00:00
domain_full = get_full_domain(domain, port)
2021-12-29 21:55:09 +00:00
remove_shared_item(base_dir, nickname, domain,
message_json['object']['displayName'],
http_prefix, domain_full, 'shares')
2019-07-23 21:14:16 +00:00
if debug:
print('DEBUG: shared item removed via c2s')
2021-07-24 22:08:11 +00:00
2021-12-29 21:55:09 +00:00
def _shares_catalog_params(path: str) -> (bool, float, float, str):
2021-07-25 13:09:39 +00:00
"""Returns parameters when accessing the shares catalog
"""
today = False
2022-01-03 16:58:50 +00:00
min_price = 0
max_price = 9999999
match_pattern = None
2021-07-25 13:09:39 +00:00
if '?' not in path:
2022-01-03 16:58:50 +00:00
return today, min_price, max_price, match_pattern
2021-07-25 13:09:39 +00:00
args = path.split('?', 1)[1]
2022-01-03 16:58:50 +00:00
arg_list = args.split(';')
for arg in arg_list:
2021-07-25 13:09:39 +00:00
if '=' not in arg:
continue
key = arg.split('=')[0].lower()
value = arg.split('=')[1]
if key == 'today':
value = value.lower()
2021-07-25 13:50:03 +00:00
if 't' in value or 'y' in value or '1' in value:
2021-07-25 13:09:39 +00:00
today = True
elif key.startswith('min'):
2021-12-26 18:03:39 +00:00
if is_float(value):
2022-01-03 16:58:50 +00:00
min_price = float(value)
2021-07-25 13:09:39 +00:00
elif key.startswith('max'):
2021-12-26 18:03:39 +00:00
if is_float(value):
2022-01-03 16:58:50 +00:00
max_price = float(value)
2021-07-25 13:09:39 +00:00
elif key.startswith('match'):
2022-01-03 16:58:50 +00:00
match_pattern = value
return today, min_price, max_price, match_pattern
2021-07-25 13:09:39 +00:00
2021-12-29 21:55:09 +00:00
def shares_catalog_account_endpoint(base_dir: str, http_prefix: str,
nickname: str, domain: str,
domain_full: str,
path: str, debug: bool,
2022-01-03 16:58:50 +00:00
shares_file_type: str) -> {}:
2021-07-24 22:08:11 +00:00
"""Returns the endpoint for the shares catalog of a particular account
2021-07-24 22:12:26 +00:00
See https://github.com/datafoodconsortium/ontology
2021-09-12 16:09:31 +00:00
Also the subdirectory ontology/DFC
2021-07-24 22:08:11 +00:00
"""
2022-01-03 16:58:50 +00:00
today, min_price, max_price, match_pattern = _shares_catalog_params(path)
dfc_url = \
2021-12-26 10:00:46 +00:00
http_prefix + '://' + domain_full + '/ontologies/DFC_FullModel.owl#'
2022-01-03 16:58:50 +00:00
dfc_pt_url = \
2021-12-26 10:00:46 +00:00
http_prefix + '://' + domain_full + \
2021-09-12 16:31:18 +00:00
'/ontologies/DFC_ProductGlossary.rdf#'
2021-12-26 10:19:59 +00:00
owner = local_actor_url(http_prefix, nickname, domain_full)
2022-01-03 16:58:50 +00:00
if shares_file_type == 'shares':
dfc_instance_id = owner + '/catalog'
else:
2022-01-03 16:58:50 +00:00
dfc_instance_id = owner + '/wantedItems'
2021-07-24 22:08:11 +00:00
endpoint = {
"@context": {
2022-01-03 16:58:50 +00:00
"DFC": dfc_url,
"dfc-pt": dfc_pt_url,
2021-07-24 22:08:11 +00:00
"@base": "http://maPlateformeNationale"
},
2022-01-03 16:58:50 +00:00
"@id": dfc_instance_id,
2021-07-24 22:08:11 +00:00
"@type": "DFC:Entreprise",
"DFC:supplies": []
}
2022-01-03 16:58:50 +00:00
curr_date = datetime.datetime.utcnow()
curr_date_str = curr_date.strftime("%Y-%m-%d")
2021-07-25 09:56:57 +00:00
2022-01-03 16:58:50 +00:00
shares_filename = \
acct_dir(base_dir, nickname, domain) + '/' + shares_file_type + '.json'
if not os.path.isfile(shares_filename):
if debug:
2022-01-03 16:58:50 +00:00
print(shares_file_type + '.json file not found: ' +
shares_filename)
2021-07-24 22:08:11 +00:00
return endpoint
2022-01-03 16:58:50 +00:00
shares_json = load_json(shares_filename, 1, 2)
if not shares_json:
if debug:
2022-01-03 16:58:50 +00:00
print('Unable to load json for ' + shares_filename)
2021-07-24 22:08:11 +00:00
return endpoint
2022-01-03 16:58:50 +00:00
for item_id, item in shares_json.items():
2021-07-24 22:08:11 +00:00
if not item.get('dfcId'):
if debug:
2022-01-03 16:58:50 +00:00
print('Item does not have dfcId: ' + item_id)
2021-07-24 22:08:11 +00:00
continue
2021-07-25 09:33:59 +00:00
if '#' not in item['dfcId']:
continue
2021-07-25 09:56:57 +00:00
if today:
2022-01-03 16:58:50 +00:00
if not item['published'].startswith(curr_date_str):
2021-07-25 09:56:57 +00:00
continue
2022-01-03 16:58:50 +00:00
if min_price is not None:
if float(item['itemPrice']) < min_price:
2021-07-25 10:17:39 +00:00
continue
2022-01-03 16:58:50 +00:00
if max_price is not None:
if float(item['itemPrice']) > max_price:
2021-07-25 10:17:39 +00:00
continue
description = item['displayName'] + ': ' + item['summary']
2022-01-03 16:58:50 +00:00
if match_pattern:
if not re.match(match_pattern, description):
2021-07-25 10:17:39 +00:00
continue
2021-07-24 22:08:11 +00:00
2022-01-03 16:58:50 +00:00
expire_date = datetime.datetime.fromtimestamp(item['expire'])
expire_date_str = expire_date.strftime("%Y-%m-%dT%H:%M:%SZ")
2021-07-24 22:08:11 +00:00
2022-01-03 16:58:50 +00:00
share_id = _get_valid_shared_item_id(owner, item['displayName'])
2021-07-27 21:59:49 +00:00
if item['dfcId'].startswith('epicyon#'):
2022-01-03 16:58:50 +00:00
dfc_id = "epicyon:" + item['dfcId'].split('#')[1]
2021-07-27 21:59:49 +00:00
else:
2022-01-03 16:58:50 +00:00
dfc_id = "dfc-pt:" + item['dfcId'].split('#')[1]
price_str = item['itemPrice'] + ' ' + item['itemCurrency']
catalog_item = {
"@id": share_id,
2021-07-25 09:33:59 +00:00
"@type": "DFC:SuppliedProduct",
2022-01-03 16:58:50 +00:00
"DFC:hasType": dfc_id,
2021-07-24 22:08:11 +00:00
"DFC:startDate": item['published'],
2022-01-03 16:58:50 +00:00
"DFC:expiryDate": expire_date_str,
2021-07-28 09:44:19 +00:00
"DFC:quantity": float(item['itemQty']),
2022-01-03 16:58:50 +00:00
"DFC:price": price_str,
2021-07-24 22:08:11 +00:00
"DFC:Image": item['imageUrl'],
2021-07-25 10:17:39 +00:00
"DFC:description": description
2021-07-24 22:08:11 +00:00
}
2022-01-03 16:58:50 +00:00
endpoint['DFC:supplies'].append(catalog_item)
2021-07-24 22:08:11 +00:00
return endpoint
2021-12-29 21:55:09 +00:00
def shares_catalog_endpoint(base_dir: str, http_prefix: str,
domain_full: str,
2022-01-03 16:58:50 +00:00
path: str, shares_file_type: str) -> {}:
2021-07-24 22:08:11 +00:00
"""Returns the endpoint for the shares catalog for the instance
2021-07-24 22:12:26 +00:00
See https://github.com/datafoodconsortium/ontology
2021-09-12 16:09:31 +00:00
Also the subdirectory ontology/DFC
2021-07-24 22:08:11 +00:00
"""
2022-01-03 16:58:50 +00:00
today, min_price, max_price, match_pattern = _shares_catalog_params(path)
dfc_url = \
2021-12-26 10:00:46 +00:00
http_prefix + '://' + domain_full + '/ontologies/DFC_FullModel.owl#'
2022-01-03 16:58:50 +00:00
dfc_pt_url = \
2021-12-26 10:00:46 +00:00
http_prefix + '://' + domain_full + \
2021-09-12 16:31:18 +00:00
'/ontologies/DFC_ProductGlossary.rdf#'
2022-01-03 16:58:50 +00:00
dfc_instance_id = http_prefix + '://' + domain_full + '/catalog'
2021-07-24 22:08:11 +00:00
endpoint = {
"@context": {
2022-01-03 16:58:50 +00:00
"DFC": dfc_url,
"dfc-pt": dfc_pt_url,
2021-07-24 22:08:11 +00:00
"@base": "http://maPlateformeNationale"
},
2022-01-03 16:58:50 +00:00
"@id": dfc_instance_id,
2021-07-24 22:08:11 +00:00
"@type": "DFC:Entreprise",
"DFC:supplies": []
}
2022-01-03 16:58:50 +00:00
curr_date = datetime.datetime.utcnow()
curr_date_str = curr_date.strftime("%Y-%m-%d")
2021-07-25 09:56:57 +00:00
2022-01-03 16:58:50 +00:00
for _, dirs, _ in os.walk(base_dir + '/accounts'):
2021-07-24 22:08:11 +00:00
for acct in dirs:
2021-12-26 18:46:43 +00:00
if not is_account_dir(acct):
2021-07-24 22:08:11 +00:00
continue
nickname = acct.split('@')[0]
domain = acct.split('@')[1]
2021-12-26 10:19:59 +00:00
owner = local_actor_url(http_prefix, nickname, domain_full)
2021-07-24 22:08:11 +00:00
2022-01-03 16:58:50 +00:00
shares_filename = \
2021-12-26 12:02:29 +00:00
acct_dir(base_dir, nickname, domain) + '/' + \
2022-01-03 16:58:50 +00:00
shares_file_type + '.json'
if not os.path.isfile(shares_filename):
2021-07-24 22:08:11 +00:00
continue
2022-01-03 16:58:50 +00:00
print('Test 78363 ' + shares_filename)
shares_json = load_json(shares_filename, 1, 2)
if not shares_json:
2021-07-24 22:08:11 +00:00
continue
2022-01-03 16:58:50 +00:00
for _, item in shares_json.items():
2021-07-24 22:08:11 +00:00
if not item.get('dfcId'):
continue
2021-07-25 09:33:59 +00:00
if '#' not in item['dfcId']:
continue
2021-07-25 09:56:57 +00:00
if today:
2022-01-03 16:58:50 +00:00
if not item['published'].startswith(curr_date_str):
2021-07-25 09:56:57 +00:00
continue
2022-01-03 16:58:50 +00:00
if min_price is not None:
if float(item['itemPrice']) < min_price:
2021-07-25 10:17:39 +00:00
continue
2022-01-03 16:58:50 +00:00
if max_price is not None:
if float(item['itemPrice']) > max_price:
2021-07-25 10:17:39 +00:00
continue
description = item['displayName'] + ': ' + item['summary']
2022-01-03 16:58:50 +00:00
if match_pattern:
if not re.match(match_pattern, description):
2021-07-25 10:17:39 +00:00
continue
2021-07-24 22:08:11 +00:00
2022-01-03 16:58:50 +00:00
start_date_str = date_seconds_to_string(item['published'])
expire_date_str = date_seconds_to_string(item['expire'])
share_id = \
_get_valid_shared_item_id(owner, item['displayName'])
2021-07-27 21:59:49 +00:00
if item['dfcId'].startswith('epicyon#'):
2022-01-03 16:58:50 +00:00
dfc_id = "epicyon:" + item['dfcId'].split('#')[1]
2021-07-27 21:59:49 +00:00
else:
2022-01-03 16:58:50 +00:00
dfc_id = "dfc-pt:" + item['dfcId'].split('#')[1]
price_str = item['itemPrice'] + ' ' + item['itemCurrency']
catalog_item = {
"@id": share_id,
2021-07-25 09:33:59 +00:00
"@type": "DFC:SuppliedProduct",
2022-01-03 16:58:50 +00:00
"DFC:hasType": dfc_id,
"DFC:startDate": start_date_str,
"DFC:expiryDate": expire_date_str,
2021-07-28 09:44:19 +00:00
"DFC:quantity": float(item['itemQty']),
2022-01-03 16:58:50 +00:00
"DFC:price": price_str,
2021-07-24 22:08:11 +00:00
"DFC:Image": item['imageUrl'],
"DFC:description": description
}
2022-01-03 16:58:50 +00:00
endpoint['DFC:supplies'].append(catalog_item)
2021-07-24 22:08:11 +00:00
return endpoint
2021-07-25 13:30:42 +00:00
2021-12-29 21:55:09 +00:00
def shares_catalog_csv_endpoint(base_dir: str, http_prefix: str,
domain_full: str,
2022-01-03 16:58:50 +00:00
path: str, shares_file_type: str) -> str:
2021-07-25 13:30:42 +00:00
"""Returns a CSV version of the shares catalog
"""
2022-01-03 16:58:50 +00:00
catalog_json = \
2021-12-29 21:55:09 +00:00
shares_catalog_endpoint(base_dir, http_prefix, domain_full, path,
2022-01-03 16:58:50 +00:00
shares_file_type)
if not catalog_json:
2021-07-25 13:30:42 +00:00
return ''
2022-01-03 16:58:50 +00:00
if not catalog_json.get('DFC:supplies'):
2021-07-25 13:30:42 +00:00
return ''
2022-01-03 16:58:50 +00:00
csv_str = \
2021-07-25 13:30:42 +00:00
'id,type,hasType,startDate,expiryDate,' + \
2021-07-28 10:04:47 +00:00
'quantity,price,currency,Image,description,\n'
2022-01-03 16:58:50 +00:00
for item in catalog_json['DFC:supplies']:
csv_str += '"' + item['@id'] + '",'
csv_str += '"' + item['@type'] + '",'
csv_str += '"' + item['DFC:hasType'] + '",'
csv_str += '"' + item['DFC:startDate'] + '",'
csv_str += '"' + item['DFC:expiryDate'] + '",'
csv_str += str(item['DFC:quantity']) + ','
csv_str += item['DFC:price'].split(' ')[0] + ','
csv_str += '"' + item['DFC:price'].split(' ')[1] + '",'
2021-09-19 14:34:54 +00:00
if item.get('DFC:Image'):
2022-01-03 16:58:50 +00:00
csv_str += '"' + item['DFC:Image'] + '",'
2021-07-28 09:50:27 +00:00
description = item['DFC:description'].replace('"', "'")
2022-01-03 16:58:50 +00:00
csv_str += '"' + description + '",\n'
return csv_str
2021-07-26 09:40:51 +00:00
2021-12-29 21:55:09 +00:00
def generate_shared_item_federation_tokens(shared_items_federated_domains: [],
base_dir: str) -> {}:
2021-07-26 09:40:51 +00:00
"""Generates tokens for shared item federated domains
"""
2021-12-25 18:05:01 +00:00
if not shared_items_federated_domains:
2021-07-26 12:20:07 +00:00
return {}
2022-01-03 16:58:50 +00:00
tokens_json = {}
2021-12-25 16:17:53 +00:00
if base_dir:
2022-01-03 16:58:50 +00:00
tokens_filename = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/sharedItemsFederationTokens.json'
2022-01-03 16:58:50 +00:00
if os.path.isfile(tokens_filename):
tokens_json = load_json(tokens_filename, 1, 2)
if tokens_json is None:
tokens_json = {}
2022-01-03 16:58:50 +00:00
tokens_added = False
2021-12-26 10:00:46 +00:00
for domain_full in shared_items_federated_domains:
2022-01-03 16:58:50 +00:00
if not tokens_json.get(domain_full):
tokens_json[domain_full] = ''
tokens_added = True
2022-01-03 16:58:50 +00:00
if not tokens_added:
return tokens_json
2021-12-25 16:17:53 +00:00
if base_dir:
2022-01-03 16:58:50 +00:00
save_json(tokens_json, tokens_filename)
return tokens_json
2021-12-29 21:55:09 +00:00
def update_shared_item_federation_token(base_dir: str,
2022-01-03 16:58:50 +00:00
token_domain_full: str, new_token: str,
2021-12-29 21:55:09 +00:00
debug: bool,
2022-01-03 16:58:50 +00:00
tokens_json: {} = None) -> {}:
"""Updates an individual token for shared item federation
2021-07-26 12:20:07 +00:00
"""
2021-08-05 11:24:24 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('Updating shared items token for ' + token_domain_full)
if not tokens_json:
tokens_json = {}
2021-12-25 16:17:53 +00:00
if base_dir:
2022-01-03 16:58:50 +00:00
tokens_filename = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/sharedItemsFederationTokens.json'
2022-01-03 16:58:50 +00:00
if os.path.isfile(tokens_filename):
2021-08-05 11:24:24 +00:00
if debug:
2022-01-03 16:58:50 +00:00
print('Update loading tokens for ' + token_domain_full)
tokens_json = load_json(tokens_filename, 1, 2)
if tokens_json is None:
tokens_json = {}
update_required = False
if tokens_json.get(token_domain_full):
if tokens_json[token_domain_full] != new_token:
update_required = True
else:
2022-01-03 16:58:50 +00:00
update_required = True
if update_required:
tokens_json[token_domain_full] = new_token
2021-12-25 16:17:53 +00:00
if base_dir:
2022-01-03 16:58:50 +00:00
save_json(tokens_json, tokens_filename)
return tokens_json
2021-07-26 12:20:07 +00:00
2021-12-29 21:55:09 +00:00
def merge_shared_item_tokens(base_dir: str, domain_full: str,
2022-01-03 16:58:50 +00:00
new_shared_items_federated_domains: [],
tokens_json: {}) -> {}:
"""When the shared item federation domains list has changed, update
the tokens dict accordingly
"""
removals = []
changed = False
2022-01-03 16:58:50 +00:00
for token_domain_full, _ in tokens_json.items():
2021-12-26 10:00:46 +00:00
if domain_full:
2022-01-03 16:58:50 +00:00
if token_domain_full.startswith(domain_full):
continue
2022-01-03 16:58:50 +00:00
if token_domain_full not in new_shared_items_federated_domains:
removals.append(token_domain_full)
# remove domains no longer in the federation list
2022-01-03 16:58:50 +00:00
for token_domain_full in removals:
del tokens_json[token_domain_full]
changed = True
# add new domains from the federation list
2022-01-03 16:58:50 +00:00
for token_domain_full in new_shared_items_federated_domains:
if token_domain_full not in tokens_json:
tokens_json[token_domain_full] = ''
changed = True
2021-12-25 16:17:53 +00:00
if base_dir and changed:
2022-01-03 16:58:50 +00:00
tokens_filename = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/sharedItemsFederationTokens.json'
2022-01-03 16:58:50 +00:00
save_json(tokens_json, tokens_filename)
return tokens_json
2021-12-29 21:55:09 +00:00
def create_shared_item_federation_token(base_dir: str,
2022-01-03 16:58:50 +00:00
token_domain_full: str,
2021-12-29 21:55:09 +00:00
force: bool,
2022-01-03 16:58:50 +00:00
tokens_json: {} = None) -> {}:
"""Updates an individual token for shared item federation
2021-07-26 12:20:07 +00:00
"""
2022-01-03 16:58:50 +00:00
if not tokens_json:
tokens_json = {}
2021-12-25 16:17:53 +00:00
if base_dir:
2022-01-03 16:58:50 +00:00
tokens_filename = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/sharedItemsFederationTokens.json'
2022-01-03 16:58:50 +00:00
if os.path.isfile(tokens_filename):
tokens_json = load_json(tokens_filename, 1, 2)
if tokens_json is None:
tokens_json = {}
if force or not tokens_json.get(token_domain_full):
tokens_json[token_domain_full] = secrets.token_urlsafe(64)
2021-12-25 16:17:53 +00:00
if base_dir:
2022-01-03 16:58:50 +00:00
save_json(tokens_json, tokens_filename)
return tokens_json
2021-07-26 12:20:07 +00:00
2021-12-28 21:36:27 +00:00
def authorize_shared_items(shared_items_federated_domains: [],
base_dir: str,
2022-01-03 16:58:50 +00:00
origin_domain_full: str,
2021-12-28 21:36:27 +00:00
calling_domainFull: str,
2022-01-03 16:58:50 +00:00
auth_header: str,
2021-12-28 21:36:27 +00:00
debug: bool,
2022-01-03 16:58:50 +00:00
tokens_json: {} = None) -> bool:
"""HTTP simple token check for shared item federation
"""
2021-12-25 18:05:01 +00:00
if not shared_items_federated_domains:
# no shared item federation
return False
2022-01-03 16:58:50 +00:00
if origin_domain_full not in shared_items_federated_domains:
if debug:
2022-01-03 16:58:50 +00:00
print(origin_domain_full +
' is not in the shared items federation list ' +
2021-12-25 18:05:01 +00:00
str(shared_items_federated_domains))
return False
2022-01-03 16:58:50 +00:00
if 'Basic ' in auth_header:
if debug:
print('DEBUG: shared item federation should not use basic auth')
return False
2022-01-03 16:58:50 +00:00
provided_token = auth_header.replace('\n', '').replace('\r', '').strip()
if not provided_token:
if debug:
print('DEBUG: shared item federation token is empty')
return False
2022-01-03 16:58:50 +00:00
if len(provided_token) < 60:
if debug:
print('DEBUG: shared item federation token is too small ' +
2022-01-03 16:58:50 +00:00
provided_token)
return False
2022-01-03 16:58:50 +00:00
if not tokens_json:
tokens_filename = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/sharedItemsFederationTokens.json'
2022-01-03 16:58:50 +00:00
if not os.path.isfile(tokens_filename):
if debug:
print('DEBUG: shared item federation tokens file missing ' +
2022-01-03 16:58:50 +00:00
tokens_filename)
return False
2022-01-03 16:58:50 +00:00
tokens_json = load_json(tokens_filename, 1, 2)
if not tokens_json:
return False
2022-01-03 16:58:50 +00:00
if not tokens_json.get(calling_domainFull):
if debug:
print('DEBUG: shared item federation token ' +
2021-12-26 18:29:39 +00:00
'check failed for ' + calling_domainFull)
return False
2022-01-03 16:58:50 +00:00
if not constant_time_string_check(tokens_json[calling_domainFull],
provided_token):
if debug:
print('DEBUG: shared item federation token ' +
2021-12-26 18:29:39 +00:00
'mismatch for ' + calling_domainFull)
return False
return True
2021-07-26 17:54:13 +00:00
2021-12-29 21:55:09 +00:00
def _update_federated_shares_cache(session, shared_items_federated_domains: [],
base_dir: str, domain_full: str,
http_prefix: str,
2022-01-03 16:58:50 +00:00
tokens_json: {}, debug: bool,
2021-12-29 21:55:09 +00:00
system_language: str,
2022-01-03 16:58:50 +00:00
shares_file_type: str) -> None:
2021-07-26 17:54:13 +00:00
"""Updates the cache of federated shares for the instance.
This enables shared items to be available even when other instances
might not be online
"""
# create directories where catalogs will be stored
2022-01-03 16:58:50 +00:00
cache_dir = base_dir + '/cache'
if not os.path.isdir(cache_dir):
os.mkdir(cache_dir)
if shares_file_type == 'shares':
catalogs_dir = cache_dir + '/catalogs'
else:
2022-01-03 16:58:50 +00:00
catalogs_dir = cache_dir + '/wantedItems'
if not os.path.isdir(catalogs_dir):
os.mkdir(catalogs_dir)
2021-07-26 17:54:13 +00:00
2022-01-03 16:58:50 +00:00
as_header = {
2021-08-05 11:24:24 +00:00
"Accept": "application/ld+json",
2021-12-26 10:00:46 +00:00
"Origin": domain_full
2021-07-26 17:54:13 +00:00
}
2022-01-03 16:58:50 +00:00
for federated_domain_full in shared_items_federated_domains:
2021-07-27 11:29:07 +00:00
# NOTE: federatedDomain does not have a port extension,
2021-07-26 17:54:13 +00:00
# so may not work in some situations
2022-01-03 16:58:50 +00:00
if federated_domain_full.startswith(domain_full):
2021-07-26 17:54:13 +00:00
# only download from instances other than this one
continue
2022-01-03 16:58:50 +00:00
if not tokens_json.get(federated_domain_full):
2021-07-26 17:54:13 +00:00
# token has been obtained for the other domain
continue
2022-01-03 16:58:50 +00:00
if not site_is_active(http_prefix + '://' + federated_domain_full, 10):
continue
2022-01-03 16:58:50 +00:00
if shares_file_type == 'shares':
url = http_prefix + '://' + federated_domain_full + '/catalog'
else:
2022-01-03 16:58:50 +00:00
url = http_prefix + '://' + federated_domain_full + '/wantedItems'
as_header['Authorization'] = tokens_json[federated_domain_full]
catalog_json = get_json(session, url, as_header, None,
debug, __version__, http_prefix, None)
if not catalog_json:
2021-07-26 17:54:13 +00:00
print('WARN: failed to download shared items catalog for ' +
2022-01-03 16:58:50 +00:00
federated_domain_full)
2021-07-26 17:54:13 +00:00
continue
2022-01-03 16:58:50 +00:00
catalog_filename = catalogs_dir + '/' + federated_domain_full + '.json'
if save_json(catalog_json, catalog_filename):
print('Downloaded shared items catalog for ' +
federated_domain_full)
shares_json = _dfc_to_shares_format(catalog_json,
base_dir, system_language,
http_prefix, domain_full)
if shares_json:
shares_filename = \
catalogs_dir + '/' + federated_domain_full + '.' + \
shares_file_type + '.json'
save_json(shares_json, shares_filename)
print('Converted shares catalog for ' + federated_domain_full)
2021-07-26 17:54:13 +00:00
else:
time.sleep(2)
2021-12-29 21:55:09 +00:00
def run_federated_shares_watchdog(project_version: str, httpd) -> None:
2021-07-26 17:54:13 +00:00
"""This tries to keep the federated shares update thread
running even if it dies
"""
print('Starting federated shares watchdog')
2022-01-03 16:58:50 +00:00
federated_shares_original = \
2021-12-29 21:55:09 +00:00
httpd.thrPostSchedule.clone(run_federated_shares_daemon)
2021-07-26 17:54:13 +00:00
httpd.thrFederatedSharesDaemon.start()
while True:
time.sleep(55)
if httpd.thrFederatedSharesDaemon.is_alive():
continue
httpd.thrFederatedSharesDaemon.kill()
httpd.thrFederatedSharesDaemon = \
2022-01-03 16:58:50 +00:00
federated_shares_original.clone(run_federated_shares_daemon)
2021-07-26 17:54:13 +00:00
httpd.thrFederatedSharesDaemon.start()
print('Restarting federated shares daemon...')
2021-12-29 21:55:09 +00:00
def _generate_next_shares_token_update(base_dir: str,
2022-01-03 16:58:50 +00:00
min_days: int, max_days: int) -> None:
"""Creates a file containing the next date when the shared items token
for this instance will be updated
"""
2022-01-03 16:58:50 +00:00
token_update_dir = base_dir + '/accounts'
2021-12-25 16:17:53 +00:00
if not os.path.isdir(base_dir):
os.mkdir(base_dir)
2022-01-03 16:58:50 +00:00
if not os.path.isdir(token_update_dir):
os.mkdir(token_update_dir)
token_update_filename = token_update_dir + '/.tokenUpdate'
next_update_sec = None
if os.path.isfile(token_update_filename):
with open(token_update_filename, 'r') as fp_tok:
next_update_str = fp_tok.read()
if next_update_str:
if next_update_str.isdigit():
next_update_sec = int(next_update_str)
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
updated = False
2022-01-03 16:58:50 +00:00
if next_update_sec:
if curr_time > next_update_sec:
next_update_days = randint(min_days, max_days)
next_update_interval = int(60 * 60 * 24 * next_update_days)
next_update_sec += next_update_interval
updated = True
else:
2022-01-03 16:58:50 +00:00
next_update_days = randint(min_days, max_days)
next_update_interval = int(60 * 60 * 24 * next_update_days)
next_update_sec = curr_time + next_update_interval
updated = True
if updated:
2022-01-03 16:58:50 +00:00
with open(token_update_filename, 'w+') as fp_tok:
fp_tok.write(str(next_update_sec))
2021-12-29 21:55:09 +00:00
def _regenerate_shares_token(base_dir: str, domain_full: str,
2022-01-03 16:58:50 +00:00
min_days: int, max_days: int, httpd) -> None:
"""Occasionally the shared items token for your instance is updated.
Scenario:
- You share items with $FriendlyInstance
- Some time later under new management
$FriendlyInstance becomes $HostileInstance
- You block $HostileInstance and remove them from your
federated shares domains list
- $HostileInstance still knows your shared items token,
and can still have access to your shared items if it presents a
spoofed Origin header together with the token
By rotating the token occasionally $HostileInstance will eventually
lose access to your federated shares. If other instances within your
federated shares list of domains continue to follow and communicate
then they will receive the new token automatically
"""
2022-01-03 16:58:50 +00:00
token_update_filename = base_dir + '/accounts/.tokenUpdate'
if not os.path.isfile(token_update_filename):
return
2022-01-03 16:58:50 +00:00
next_update_sec = None
with open(token_update_filename, 'r') as fp_tok:
next_update_str = fp_tok.read()
if next_update_str:
if next_update_str.isdigit():
next_update_sec = int(next_update_str)
if not next_update_sec:
return
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
2022-01-03 16:58:50 +00:00
if curr_time <= next_update_sec:
return
2021-12-29 21:55:09 +00:00
create_shared_item_federation_token(base_dir, domain_full, True, None)
2022-01-03 16:58:50 +00:00
_generate_next_shares_token_update(base_dir, min_days, max_days)
# update the tokens used within the daemon
2021-12-25 18:05:01 +00:00
shared_fed_domains = httpd.shared_items_federated_domains
2022-01-01 15:11:42 +00:00
httpd.shared_item_federation_tokens = \
2021-12-29 21:55:09 +00:00
generate_shared_item_federation_tokens(shared_fed_domains,
base_dir)
2021-12-29 21:55:09 +00:00
def run_federated_shares_daemon(base_dir: str, httpd, http_prefix: str,
domain_full: str, proxy_type: str, debug: bool,
system_language: str) -> None:
2021-07-26 17:54:13 +00:00
"""Runs the daemon used to update federated shared items
"""
2022-01-03 16:58:50 +00:00
seconds_per_hour = 60 * 60
file_check_interval_sec = 120
2021-07-26 17:54:13 +00:00
time.sleep(60)
# the token for this instance will be changed every 7-14 days
2022-01-03 16:58:50 +00:00
min_days = 7
max_days = 14
_generate_next_shares_token_update(base_dir, min_days, max_days)
2021-07-26 17:54:13 +00:00
while True:
2022-01-03 16:58:50 +00:00
shared_items_federated_domains_str = \
get_config_param(base_dir, 'sharedItemsFederatedDomains')
if not shared_items_federated_domains_str:
time.sleep(file_check_interval_sec)
2021-07-26 17:54:13 +00:00
continue
# occasionally change the federated shared items token
# for this instance
2021-12-29 21:55:09 +00:00
_regenerate_shares_token(base_dir, domain_full,
2022-01-03 16:58:50 +00:00
min_days, max_days, httpd)
2021-07-26 17:54:13 +00:00
# get a list of the domains within the shared items federation
2021-12-25 18:05:01 +00:00
shared_items_federated_domains = []
fed_domains_list = \
2022-01-03 16:58:50 +00:00
shared_items_federated_domains_str.split(',')
2021-12-25 18:05:01 +00:00
for shared_fed_domain in fed_domains_list:
shared_items_federated_domains.append(shared_fed_domain.strip())
if not shared_items_federated_domains:
2022-01-03 16:58:50 +00:00
time.sleep(file_check_interval_sec)
2021-07-26 17:54:13 +00:00
continue
# load the tokens
2022-01-03 16:58:50 +00:00
tokens_filename = \
2021-12-25 16:17:53 +00:00
base_dir + '/accounts/sharedItemsFederationTokens.json'
2022-01-03 16:58:50 +00:00
if not os.path.isfile(tokens_filename):
time.sleep(file_check_interval_sec)
2021-07-26 17:54:13 +00:00
continue
2022-01-03 16:58:50 +00:00
tokens_json = load_json(tokens_filename, 1, 2)
if not tokens_json:
time.sleep(file_check_interval_sec)
2021-07-26 17:54:13 +00:00
continue
2021-12-28 16:56:57 +00:00
session = create_session(proxy_type)
2022-01-03 16:58:50 +00:00
for shares_file_type in get_shares_files_list():
2021-12-29 21:55:09 +00:00
_update_federated_shares_cache(session,
shared_items_federated_domains,
base_dir, domain_full, http_prefix,
2022-01-03 16:58:50 +00:00
tokens_json, debug, system_language,
shares_file_type)
time.sleep(seconds_per_hour * 6)
2022-01-03 16:58:50 +00:00
def _dfc_to_shares_format(catalog_json: {},
2021-12-29 21:55:09 +00:00
base_dir: str, system_language: str,
http_prefix: str, domain_full: str) -> {}:
"""Converts DFC format into the internal formal used to store shared items.
This simplifies subsequent search and display
"""
2022-01-03 16:58:50 +00:00
if not catalog_json.get('DFC:supplies'):
return {}
2022-01-03 16:58:50 +00:00
shares_json = {}
2022-01-03 16:58:50 +00:00
dfc_ids = {}
product_types_list = get_category_types(base_dir)
for product_type in product_types_list:
dfc_ids[product_type] = \
_load_dfc_ids(base_dir, system_language, product_type,
2021-12-29 21:55:09 +00:00
http_prefix, domain_full)
2021-12-26 13:17:46 +00:00
curr_time = int(time.time())
2022-01-03 16:58:50 +00:00
for item in catalog_json['DFC:supplies']:
if not item.get('@id') or \
not item.get('@type') or \
not item.get('DFC:hasType') or \
not item.get('DFC:startDate') or \
not item.get('DFC:expiryDate') or \
not item.get('DFC:quantity') or \
not item.get('DFC:price') or \
not item.get('DFC:description'):
continue
if ' ' not in item['DFC:price']:
continue
if ':' not in item['DFC:description']:
continue
if ':' not in item['DFC:hasType']:
continue
2022-01-03 16:58:50 +00:00
start_time_sec = date_string_to_seconds(item['DFC:startDate'])
if not start_time_sec:
2021-07-28 09:35:21 +00:00
continue
2022-01-03 16:58:50 +00:00
expiry_time_sec = date_string_to_seconds(item['DFC:expiryDate'])
if not expiry_time_sec:
continue
2022-01-03 16:58:50 +00:00
if expiry_time_sec < curr_time:
# has expired
continue
2021-07-27 22:06:35 +00:00
if item['DFC:hasType'].startswith('epicyon:'):
2022-01-03 16:58:50 +00:00
item_type = item['DFC:hasType'].split(':')[1]
item_type = item_type.replace('_', ' ')
item_category = 'non-food'
product_type = None
2021-07-27 22:06:35 +00:00
else:
2022-01-03 16:58:50 +00:00
has_type = item['DFC:hasType'].split(':')[1]
item_type = None
product_type = None
for prod_type in product_types_list:
item_type = \
_getshare_type_from_dfc_id(has_type, dfc_ids[prod_type])
if item_type:
product_type = prod_type
break
2022-01-03 16:58:50 +00:00
item_category = 'food'
if not item_type:
continue
2021-07-28 21:28:41 +00:00
2022-01-03 16:58:50 +00:00
all_text = \
item['DFC:description'] + ' ' + item_type + ' ' + item_category
if is_filtered_globally(base_dir, all_text):
2021-07-28 21:28:41 +00:00
continue
2022-01-03 16:58:50 +00:00
dfc_id = None
if product_type:
dfc_id = dfc_ids[product_type][item_type]
item_id = item['@id']
description = item['DFC:description'].split(':', 1)[1].strip()
2021-07-28 21:28:41 +00:00
2022-01-03 16:58:50 +00:00
image_url = ''
2021-09-19 14:34:54 +00:00
if item.get('DFC:Image'):
2022-01-03 16:58:50 +00:00
image_url = item['DFC:Image']
shares_json[item_id] = {
"displayName": item['DFC:description'].split(':')[0],
"summary": description,
2022-01-03 16:58:50 +00:00
"imageUrl": image_url,
2021-07-28 09:44:19 +00:00
"itemQty": float(item['DFC:quantity']),
2022-01-03 16:58:50 +00:00
"dfcId": dfc_id,
"itemType": item_type,
"category": item_category,
"location": "",
2022-01-03 16:58:50 +00:00
"published": start_time_sec,
"expire": expiry_time_sec,
2021-07-27 20:26:10 +00:00
"itemPrice": item['DFC:price'].split(' ')[0],
"itemCurrency": item['DFC:price'].split(' ')[1]
}
2022-01-03 16:58:50 +00:00
return shares_json
2021-09-19 13:59:31 +00:00
2021-12-29 21:55:09 +00:00
def share_category_icon(category: str) -> str:
2021-09-19 13:59:31 +00:00
"""Returns unicode icon for the given category
"""
2022-01-03 16:58:50 +00:00
category_icons = {
2021-09-19 13:59:31 +00:00
'accommodation': '🏠',
'clothes': '👚',
'tools': '🔧',
'food': '🍏'
}
2022-01-03 16:58:50 +00:00
if category_icons.get(category):
return category_icons[category]
2021-09-19 13:59:31 +00:00
return ''