2020-04-04 11:27:51 +00:00
|
|
|
|
__filename__ = "shares.py"
|
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
|
__license__ = "AGPL3+"
|
2021-01-26 10:07:42 +00:00
|
|
|
|
__version__ = "1.2.0"
|
2020-04-04 11:27:51 +00:00
|
|
|
|
__maintainer__ = "Bob Mottram"
|
2021-09-10 16:14:50 +00:00
|
|
|
|
__email__ = "bob@libreserver.org"
|
2020-04-04 11:27:51 +00:00
|
|
|
|
__status__ = "Production"
|
2021-06-15 15:08:12 +00:00
|
|
|
|
__module_group__ = "Timeline"
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2019-07-23 12:33:09 +00:00
|
|
|
|
import os
|
2021-07-25 10:17:39 +00:00
|
|
|
|
import re
|
2021-07-26 09:40:51 +00:00
|
|
|
|
import secrets
|
2019-07-23 12:33:09 +00:00
|
|
|
|
import time
|
2021-07-24 22:08:11 +00:00
|
|
|
|
import datetime
|
2021-08-07 10:29:40 +00:00
|
|
|
|
from random import randint
|
2021-08-04 12:04:35 +00:00
|
|
|
|
from pprint import pprint
|
2021-07-26 17:54:13 +00:00
|
|
|
|
from session import getJson
|
2019-07-23 12:33:09 +00:00
|
|
|
|
from webfinger import webfingerHandle
|
|
|
|
|
from auth import createBasicAuthHeader
|
2021-07-26 10:00:54 +00:00
|
|
|
|
from auth import constantTimeStringCheck
|
2019-07-23 12:33:09 +00:00
|
|
|
|
from posts import getPersonBox
|
|
|
|
|
from session import postJson
|
2020-04-04 11:27:51 +00:00
|
|
|
|
from session import postImage
|
2021-07-26 17:54:13 +00:00
|
|
|
|
from session import createSession
|
2021-10-13 10:11:02 +00:00
|
|
|
|
from utils import hasObjectStringType
|
2021-07-28 09:35:21 +00:00
|
|
|
|
from utils import dateStringToSeconds
|
|
|
|
|
from utils import dateSecondsToString
|
2021-07-26 17:54:13 +00:00
|
|
|
|
from utils import getConfigParam
|
2020-12-16 11:19:16 +00:00
|
|
|
|
from utils import getFullDomain
|
2019-07-27 22:48:34 +00:00
|
|
|
|
from utils import validNickname
|
2019-10-22 11:55:06 +00:00
|
|
|
|
from utils import loadJson
|
|
|
|
|
from utils import saveJson
|
2020-11-21 11:54:29 +00:00
|
|
|
|
from utils import getImageExtensions
|
2021-06-26 14:21:24 +00:00
|
|
|
|
from utils import removeDomainPort
|
2021-07-04 18:01:31 +00:00
|
|
|
|
from utils import isAccountDir
|
2021-07-13 21:59:53 +00:00
|
|
|
|
from utils import acctDir
|
2021-07-25 13:09:39 +00:00
|
|
|
|
from utils import isfloat
|
2021-08-08 18:39:03 +00:00
|
|
|
|
from utils import getCategoryTypes
|
2021-08-09 13:07:32 +00:00
|
|
|
|
from utils import getSharesFilesList
|
2021-08-14 11:13:39 +00:00
|
|
|
|
from utils import localActorUrl
|
2021-05-09 12:17:55 +00:00
|
|
|
|
from media import processMetaData
|
2021-08-13 20:18:36 +00:00
|
|
|
|
from media import convertImageToLowBandwidth
|
2021-07-28 21:28:41 +00:00
|
|
|
|
from filters import isFilteredGlobally
|
2021-07-29 19:27:36 +00:00
|
|
|
|
from siteactive import siteIsActive
|
2021-08-07 17:25:07 +00:00
|
|
|
|
from content import getPriceFromString
|
2021-08-09 11:00:23 +00:00
|
|
|
|
from blocking import isBlocked
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-08-03 19:08:28 +00:00
|
|
|
|
def _loadDfcIds(baseDir: str, systemLanguage: str,
|
2021-09-12 17:10:15 +00:00
|
|
|
|
productType: str,
|
|
|
|
|
httpPrefix: str, domainFull: str) -> {}:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
"""Loads the product types ontology
|
|
|
|
|
This is used to add an id to shared items
|
|
|
|
|
"""
|
2021-08-03 19:08:28 +00:00
|
|
|
|
productTypesFilename = \
|
|
|
|
|
baseDir + '/ontology/custom' + productType.title() + 'Types.json'
|
2021-07-24 14:38:43 +00:00
|
|
|
|
if not os.path.isfile(productTypesFilename):
|
2021-08-03 19:08:28 +00:00
|
|
|
|
productTypesFilename = \
|
|
|
|
|
baseDir + '/ontology/' + productType + 'Types.json'
|
2021-07-24 14:38:43 +00:00
|
|
|
|
productTypes = loadJson(productTypesFilename)
|
|
|
|
|
if not productTypes:
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('Unable to load ontology: ' + productTypesFilename)
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
|
|
|
|
if not productTypes.get('@graph'):
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('No @graph list within ontology')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
|
|
|
|
if len(productTypes['@graph']) == 0:
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('@graph list has no contents')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
|
|
|
|
if not productTypes['@graph'][0].get('rdfs:label'):
|
2021-08-06 16:52:12 +00:00
|
|
|
|
print('@graph list entry has no rdfs:label')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
return None
|
|
|
|
|
languageExists = False
|
|
|
|
|
for label in productTypes['@graph'][0]['rdfs:label']:
|
|
|
|
|
if not label.get('@language'):
|
|
|
|
|
continue
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if label['@language'] == systemLanguage:
|
2021-07-24 14:38:43 +00:00
|
|
|
|
languageExists = True
|
|
|
|
|
break
|
|
|
|
|
if not languageExists:
|
|
|
|
|
print('productTypes ontology does not contain the language ' +
|
|
|
|
|
systemLanguage)
|
|
|
|
|
return None
|
2021-07-24 14:43:52 +00:00
|
|
|
|
dfcIds = {}
|
2021-07-24 14:38:43 +00:00
|
|
|
|
for item in productTypes['@graph']:
|
|
|
|
|
if not item.get('@id'):
|
|
|
|
|
continue
|
|
|
|
|
if not item.get('rdfs:label'):
|
|
|
|
|
continue
|
|
|
|
|
for label in item['rdfs:label']:
|
|
|
|
|
if not label.get('@language'):
|
|
|
|
|
continue
|
|
|
|
|
if not label.get('@value'):
|
|
|
|
|
continue
|
|
|
|
|
if label['@language'] == systemLanguage:
|
2021-09-12 17:10:15 +00:00
|
|
|
|
itemId = \
|
|
|
|
|
item['@id'].replace('http://static.datafoodconsortium.org',
|
|
|
|
|
httpPrefix + '://' + domainFull)
|
|
|
|
|
dfcIds[label['@value'].lower()] = itemId
|
2021-07-24 14:38:43 +00:00
|
|
|
|
break
|
2021-07-24 14:43:52 +00:00
|
|
|
|
return dfcIds
|
2021-07-24 14:38:43 +00:00
|
|
|
|
|
|
|
|
|
|
2021-07-28 16:29:40 +00:00
|
|
|
|
def _getValidSharedItemID(actor: str, displayName: str) -> str:
|
2019-11-02 10:24:25 +00:00
|
|
|
|
"""Removes any invalid characters from the display name to
|
|
|
|
|
produce an item ID
|
|
|
|
|
"""
|
2021-07-27 19:13:55 +00:00
|
|
|
|
removeChars = (' ', '\n', '\r', '#')
|
2020-06-11 22:04:41 +00:00
|
|
|
|
for ch in removeChars:
|
|
|
|
|
displayName = displayName.replace(ch, '')
|
|
|
|
|
removeChars2 = ('+', '/', '\\', '?', '&')
|
|
|
|
|
for ch in removeChars2:
|
|
|
|
|
displayName = displayName.replace(ch, '-')
|
|
|
|
|
displayName = displayName.replace('.', '_')
|
2020-05-22 11:32:38 +00:00
|
|
|
|
displayName = displayName.replace("’", "'")
|
2021-07-27 20:14:13 +00:00
|
|
|
|
actor = actor.replace('://', '___')
|
|
|
|
|
actor = actor.replace('/', '--')
|
2021-07-27 20:17:36 +00:00
|
|
|
|
return actor + '--shareditems--' + displayName
|
2019-11-02 10:24:25 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-07-25 09:33:59 +00:00
|
|
|
|
def removeSharedItem(baseDir: str, nickname: str, domain: str,
|
2021-07-28 14:27:20 +00:00
|
|
|
|
itemID: str,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
httpPrefix: str, domainFull: str,
|
|
|
|
|
sharesFileType: str) -> None:
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""Removes a share for a person
|
|
|
|
|
"""
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesFilename = \
|
|
|
|
|
acctDir(baseDir, nickname, domain) + '/' + sharesFileType + '.json'
|
2019-11-03 10:04:28 +00:00
|
|
|
|
if not os.path.isfile(sharesFilename):
|
2021-08-09 13:07:32 +00:00
|
|
|
|
print('ERROR: remove shared item, missing ' +
|
|
|
|
|
sharesFileType + '.json ' + sharesFilename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
return
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
if not sharesJson:
|
2021-08-09 13:07:32 +00:00
|
|
|
|
print('ERROR: remove shared item, ' +
|
|
|
|
|
sharesFileType + '.json could not be loaded from ' +
|
|
|
|
|
sharesFilename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
return
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
|
|
|
|
if sharesJson.get(itemID):
|
|
|
|
|
# remove any image for the item
|
2020-04-04 11:27:51 +00:00
|
|
|
|
itemIDfile = baseDir + '/sharefiles/' + nickname + '/' + itemID
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if sharesJson[itemID]['imageUrl']:
|
2020-11-21 11:54:29 +00:00
|
|
|
|
formats = getImageExtensions()
|
2020-09-22 15:59:47 +00:00
|
|
|
|
for ext in formats:
|
|
|
|
|
if sharesJson[itemID]['imageUrl'].endswith('.' + ext):
|
|
|
|
|
if os.path.isfile(itemIDfile + '.' + ext):
|
2021-09-05 10:17:43 +00:00
|
|
|
|
try:
|
|
|
|
|
os.remove(itemIDfile + '.' + ext)
|
2021-11-25 18:42:38 +00:00
|
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
|
print('EX: removeSharedItem unable to delete ' +
|
|
|
|
|
itemIDfile + '.' + ext)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
# remove the item itself
|
|
|
|
|
del sharesJson[itemID]
|
2020-04-04 11:27:51 +00:00
|
|
|
|
saveJson(sharesJson, sharesFilename)
|
2019-11-03 10:04:28 +00:00
|
|
|
|
else:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
print('ERROR: share index "' + itemID +
|
|
|
|
|
'" does not exist in ' + sharesFilename)
|
|
|
|
|
|
|
|
|
|
|
2021-07-27 12:55:44 +00:00
|
|
|
|
def _addShareDurationSec(duration: str, published: int) -> int:
|
2021-06-26 19:01:48 +00:00
|
|
|
|
"""Returns the duration for the shared item in seconds
|
|
|
|
|
"""
|
|
|
|
|
if ' ' not in duration:
|
|
|
|
|
return 0
|
|
|
|
|
durationList = duration.split(' ')
|
|
|
|
|
if not durationList[0].isdigit():
|
|
|
|
|
return 0
|
|
|
|
|
if 'hour' in durationList[1]:
|
|
|
|
|
return published + (int(durationList[0]) * 60 * 60)
|
|
|
|
|
if 'day' in durationList[1]:
|
|
|
|
|
return published + (int(durationList[0]) * 60 * 60 * 24)
|
|
|
|
|
if 'week' in durationList[1]:
|
|
|
|
|
return published + (int(durationList[0]) * 60 * 60 * 24 * 7)
|
|
|
|
|
if 'month' in durationList[1]:
|
|
|
|
|
return published + (int(durationList[0]) * 60 * 60 * 24 * 30)
|
|
|
|
|
if 'year' in durationList[1]:
|
|
|
|
|
return published + (int(durationList[0]) * 60 * 60 * 24 * 365)
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
2021-08-08 18:39:03 +00:00
|
|
|
|
def _dfcProductTypeFromCategory(baseDir: str,
|
|
|
|
|
itemCategory: str, translate: {}) -> str:
|
2021-08-03 19:08:28 +00:00
|
|
|
|
"""Does the shared item category match a DFC product type?
|
|
|
|
|
If so then return the product type.
|
|
|
|
|
This will be used to select an appropriate ontology file
|
|
|
|
|
such as ontology/foodTypes.json
|
|
|
|
|
"""
|
2021-08-08 18:39:03 +00:00
|
|
|
|
productTypesList = getCategoryTypes(baseDir)
|
2021-08-03 19:08:28 +00:00
|
|
|
|
categoryLower = itemCategory.lower()
|
|
|
|
|
for productType in productTypesList:
|
2021-08-04 12:44:24 +00:00
|
|
|
|
if translate.get(productType):
|
|
|
|
|
if translate[productType] in categoryLower:
|
|
|
|
|
return productType
|
|
|
|
|
else:
|
|
|
|
|
if productType in categoryLower:
|
|
|
|
|
return productType
|
2021-08-03 19:08:28 +00:00
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2021-07-24 15:05:21 +00:00
|
|
|
|
def _getshareDfcId(baseDir: str, systemLanguage: str,
|
|
|
|
|
itemType: str, itemCategory: str,
|
2021-09-12 17:10:15 +00:00
|
|
|
|
translate: {},
|
|
|
|
|
httpPrefix: str, domainFull: str,
|
|
|
|
|
dfcIds: {} = None) -> str:
|
2021-07-24 14:43:52 +00:00
|
|
|
|
"""Attempts to obtain a DFC Id for the shared item,
|
2021-07-24 14:38:43 +00:00
|
|
|
|
based upon productTypes ontology.
|
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
|
|
|
|
"""
|
2021-08-03 19:08:28 +00:00
|
|
|
|
# does the category field match any prodyct type ontology
|
|
|
|
|
# files in the ontology subdirectory?
|
2021-08-08 18:39:03 +00:00
|
|
|
|
matchedProductType = \
|
|
|
|
|
_dfcProductTypeFromCategory(baseDir, itemCategory, translate)
|
2021-08-03 19:08:28 +00:00
|
|
|
|
if not matchedProductType:
|
2021-07-27 21:59:49 +00:00
|
|
|
|
itemType = itemType.replace(' ', '_')
|
|
|
|
|
itemType = itemType.replace('.', '')
|
|
|
|
|
return 'epicyon#' + itemType
|
2021-07-24 14:43:52 +00:00
|
|
|
|
if not dfcIds:
|
2021-09-12 17:10:15 +00:00
|
|
|
|
dfcIds = _loadDfcIds(baseDir, systemLanguage, matchedProductType,
|
|
|
|
|
httpPrefix, domainFull)
|
2021-07-27 12:55:44 +00:00
|
|
|
|
if not dfcIds:
|
|
|
|
|
return ''
|
2021-07-24 14:38:43 +00:00
|
|
|
|
itemTypeLower = itemType.lower()
|
|
|
|
|
matchName = ''
|
|
|
|
|
matchId = ''
|
2021-07-24 14:43:52 +00:00
|
|
|
|
for name, uri in dfcIds.items():
|
2021-07-24 14:38:43 +00:00
|
|
|
|
if name not in itemTypeLower:
|
|
|
|
|
continue
|
|
|
|
|
if len(name) > len(matchName):
|
|
|
|
|
matchName = name
|
|
|
|
|
matchId = uri
|
|
|
|
|
if not matchId:
|
|
|
|
|
# bag of words match
|
|
|
|
|
maxMatchedWords = 0
|
2021-07-24 14:43:52 +00:00
|
|
|
|
for name, uri in dfcIds.items():
|
2021-08-06 16:52:12 +00:00
|
|
|
|
name = name.replace('-', ' ')
|
2021-07-24 14:38:43 +00:00
|
|
|
|
words = name.split(' ')
|
|
|
|
|
score = 0
|
|
|
|
|
for wrd in words:
|
|
|
|
|
if wrd in itemTypeLower:
|
|
|
|
|
score += 1
|
|
|
|
|
if score > maxMatchedWords:
|
|
|
|
|
maxMatchedWords = score
|
|
|
|
|
matchId = uri
|
|
|
|
|
return matchId
|
|
|
|
|
|
|
|
|
|
|
2021-07-27 12:55:44 +00:00
|
|
|
|
def _getshareTypeFromDfcId(dfcUri: str, dfcIds: {}) -> str:
|
|
|
|
|
"""Attempts to obtain a share item type from its DFC Id,
|
|
|
|
|
based upon productTypes ontology.
|
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
|
|
|
|
"""
|
2021-07-27 21:59:49 +00:00
|
|
|
|
if dfcUri.startswith('epicyon#'):
|
|
|
|
|
itemType = dfcUri.split('#')[1]
|
|
|
|
|
itemType = itemType.replace('_', ' ')
|
|
|
|
|
return itemType
|
|
|
|
|
|
2021-07-27 12:55:44 +00:00
|
|
|
|
for name, uri in dfcIds.items():
|
|
|
|
|
if uri.endswith('#' + dfcUri):
|
|
|
|
|
return name
|
|
|
|
|
elif uri == dfcUri:
|
|
|
|
|
return name
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2021-07-25 09:33:59 +00:00
|
|
|
|
def _indicateNewShareAvailable(baseDir: str, httpPrefix: str,
|
2021-08-09 11:00:23 +00:00
|
|
|
|
nickname: str, domain: str,
|
2021-08-09 18:41:05 +00:00
|
|
|
|
domainFull: str, sharesFileType: str) -> None:
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"""Indicate to each account that a new share is available
|
|
|
|
|
"""
|
|
|
|
|
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
|
|
|
|
for handle in dirs:
|
|
|
|
|
if not isAccountDir(handle):
|
|
|
|
|
continue
|
|
|
|
|
accountDir = baseDir + '/accounts/' + handle
|
2021-08-09 18:41:05 +00:00
|
|
|
|
if sharesFileType == 'shares':
|
|
|
|
|
newShareFile = accountDir + '/.newShare'
|
|
|
|
|
else:
|
|
|
|
|
newShareFile = accountDir + '/.newWanted'
|
2021-07-25 09:33:59 +00:00
|
|
|
|
if os.path.isfile(newShareFile):
|
|
|
|
|
continue
|
2021-08-09 11:00:23 +00:00
|
|
|
|
accountNickname = handle.split('@')[0]
|
|
|
|
|
# does this account block you?
|
|
|
|
|
if accountNickname != nickname:
|
|
|
|
|
if isBlocked(baseDir, accountNickname, domain,
|
|
|
|
|
nickname, domain, None):
|
|
|
|
|
continue
|
2021-08-14 11:13:39 +00:00
|
|
|
|
localActor = localActorUrl(httpPrefix, accountNickname, domainFull)
|
2021-07-25 09:33:59 +00:00
|
|
|
|
try:
|
|
|
|
|
with open(newShareFile, 'w+') as fp:
|
2021-08-09 18:41:05 +00:00
|
|
|
|
if sharesFileType == 'shares':
|
2021-08-14 11:13:39 +00:00
|
|
|
|
fp.write(localActor + '/tlshares')
|
2021-08-09 18:41:05 +00:00
|
|
|
|
else:
|
2021-08-14 11:13:39 +00:00
|
|
|
|
fp.write(localActor + '/tlwanted')
|
2021-11-25 22:22:54 +00:00
|
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
|
print('EX: _indicateNewShareAvailable unable to write ' +
|
|
|
|
|
str(newShareFile))
|
2021-07-25 09:33:59 +00:00
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
def addShare(baseDir: str,
|
|
|
|
|
httpPrefix: str, nickname: str, domain: str, port: int,
|
|
|
|
|
displayName: str, summary: str, imageFilename: str,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
itemQty: float, itemType: str, itemCategory: str, location: str,
|
2021-07-24 14:38:43 +00:00
|
|
|
|
duration: str, debug: bool, city: str,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
price: str, currency: str,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
systemLanguage: str, translate: {},
|
2021-11-08 18:09:24 +00:00
|
|
|
|
sharesFileType: str, lowBandwidth: bool,
|
|
|
|
|
contentLicenseUrl: str) -> None:
|
2020-09-22 15:55:21 +00:00
|
|
|
|
"""Adds a new share
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""
|
2021-07-28 21:28:41 +00:00
|
|
|
|
if isFilteredGlobally(baseDir,
|
|
|
|
|
displayName + ' ' + summary + ' ' +
|
|
|
|
|
itemType + ' ' + itemCategory):
|
2021-07-28 21:12:39 +00:00
|
|
|
|
print('Shared item was filtered due to content')
|
|
|
|
|
return
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesFilename = \
|
|
|
|
|
acctDir(baseDir, nickname, domain) + '/' + sharesFileType + '.json'
|
2020-04-04 11:27:51 +00:00
|
|
|
|
sharesJson = {}
|
2019-09-30 22:39:02 +00:00
|
|
|
|
if os.path.isfile(sharesFilename):
|
2021-07-26 12:20:07 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename, 1, 2)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
duration = duration.lower()
|
|
|
|
|
published = int(time.time())
|
2021-06-26 19:01:48 +00:00
|
|
|
|
durationSec = _addShareDurationSec(duration, published)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2021-07-25 09:33:59 +00:00
|
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-08-14 11:13:39 +00:00
|
|
|
|
actor = localActorUrl(httpPrefix, nickname, domainFull)
|
2021-07-28 16:29:40 +00:00
|
|
|
|
itemID = _getValidSharedItemID(actor, displayName)
|
2021-07-24 15:05:21 +00:00
|
|
|
|
dfcId = _getshareDfcId(baseDir, systemLanguage,
|
2021-09-12 17:10:15 +00:00
|
|
|
|
itemType, itemCategory, translate,
|
|
|
|
|
httpPrefix, domainFull)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2019-07-23 19:02:26 +00:00
|
|
|
|
# has an image for this share been uploaded?
|
2020-04-04 11:27:51 +00:00
|
|
|
|
imageUrl = None
|
|
|
|
|
moveImage = False
|
2019-07-23 19:02:26 +00:00
|
|
|
|
if not imageFilename:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
sharesImageFilename = \
|
2021-07-13 21:59:53 +00:00
|
|
|
|
acctDir(baseDir, nickname, domain) + '/upload'
|
2020-11-21 11:54:29 +00:00
|
|
|
|
formats = getImageExtensions()
|
2020-09-22 15:59:47 +00:00
|
|
|
|
for ext in formats:
|
|
|
|
|
if os.path.isfile(sharesImageFilename + '.' + ext):
|
|
|
|
|
imageFilename = sharesImageFilename + '.' + ext
|
|
|
|
|
moveImage = True
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2020-12-16 11:19:16 +00:00
|
|
|
|
domainFull = getFullDomain(domain, port)
|
2019-11-02 12:11:59 +00:00
|
|
|
|
|
2019-07-23 19:02:26 +00:00
|
|
|
|
# copy or move the image for the shared item to its destination
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if imageFilename:
|
|
|
|
|
if os.path.isfile(imageFilename):
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not os.path.isdir(baseDir + '/sharefiles'):
|
|
|
|
|
os.mkdir(baseDir + '/sharefiles')
|
|
|
|
|
if not os.path.isdir(baseDir + '/sharefiles/' + nickname):
|
|
|
|
|
os.mkdir(baseDir + '/sharefiles/' + nickname)
|
|
|
|
|
itemIDfile = baseDir + '/sharefiles/' + nickname + '/' + itemID
|
2020-11-21 11:54:29 +00:00
|
|
|
|
formats = getImageExtensions()
|
2020-09-22 15:59:47 +00:00
|
|
|
|
for ext in formats:
|
2021-07-04 18:01:31 +00:00
|
|
|
|
if not imageFilename.endswith('.' + ext):
|
|
|
|
|
continue
|
2021-08-13 20:18:36 +00:00
|
|
|
|
if lowBandwidth:
|
|
|
|
|
convertImageToLowBandwidth(imageFilename)
|
2021-07-04 18:01:31 +00:00
|
|
|
|
processMetaData(baseDir, nickname, domain,
|
|
|
|
|
imageFilename, itemIDfile + '.' + ext,
|
2021-11-08 18:09:24 +00:00
|
|
|
|
city, contentLicenseUrl)
|
2021-07-04 18:01:31 +00:00
|
|
|
|
if moveImage:
|
2021-09-05 10:17:43 +00:00
|
|
|
|
try:
|
|
|
|
|
os.remove(imageFilename)
|
2021-11-25 18:42:38 +00:00
|
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
|
print('EX: addShare unable to delete ' +
|
|
|
|
|
str(imageFilename))
|
2021-07-04 18:01:31 +00:00
|
|
|
|
imageUrl = \
|
|
|
|
|
httpPrefix + '://' + domainFull + \
|
|
|
|
|
'/sharefiles/' + nickname + '/' + itemID + '.' + ext
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
sharesJson[itemID] = {
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"displayName": displayName,
|
|
|
|
|
"summary": summary,
|
|
|
|
|
"imageUrl": imageUrl,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"itemQty": float(itemQty),
|
2021-07-24 14:43:52 +00:00
|
|
|
|
"dfcId": dfcId,
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"itemType": itemType,
|
2019-07-23 22:12:19 +00:00
|
|
|
|
"category": itemCategory,
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"location": location,
|
|
|
|
|
"published": published,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"expire": durationSec,
|
2021-07-27 20:26:10 +00:00
|
|
|
|
"itemPrice": price,
|
|
|
|
|
"itemCurrency": currency
|
2019-07-23 12:33:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
saveJson(sharesJson, sharesFilename)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2021-08-09 11:00:23 +00:00
|
|
|
|
_indicateNewShareAvailable(baseDir, httpPrefix,
|
2021-08-09 18:41:05 +00:00
|
|
|
|
nickname, domain, domainFull,
|
|
|
|
|
sharesFileType)
|
2019-11-02 11:15:13 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2019-10-17 09:58:30 +00:00
|
|
|
|
def expireShares(baseDir: str) -> None:
|
|
|
|
|
"""Removes expired items from shares
|
|
|
|
|
"""
|
2020-04-04 11:27:51 +00:00
|
|
|
|
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
2019-10-17 09:58:30 +00:00
|
|
|
|
for account in dirs:
|
2021-07-04 18:01:31 +00:00
|
|
|
|
if not isAccountDir(account):
|
2019-10-17 09:58:30 +00:00
|
|
|
|
continue
|
2020-04-04 11:27:51 +00:00
|
|
|
|
nickname = account.split('@')[0]
|
|
|
|
|
domain = account.split('@')[1]
|
2021-08-09 13:07:32 +00:00
|
|
|
|
for sharesFileType in getSharesFilesList():
|
|
|
|
|
_expireSharesForAccount(baseDir, nickname, domain,
|
|
|
|
|
sharesFileType)
|
2020-12-13 22:13:45 +00:00
|
|
|
|
break
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2019-10-17 09:58:30 +00:00
|
|
|
|
|
2021-08-09 13:07:32 +00:00
|
|
|
|
def _expireSharesForAccount(baseDir: str, nickname: str, domain: str,
|
|
|
|
|
sharesFileType: str) -> None:
|
2020-09-22 16:03:31 +00:00
|
|
|
|
"""Removes expired items from shares for a particular account
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""
|
2021-06-23 21:31:50 +00:00
|
|
|
|
handleDomain = removeDomainPort(domain)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
handle = nickname + '@' + handleDomain
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesFilename = \
|
|
|
|
|
baseDir + '/accounts/' + handle + '/' + sharesFileType + '.json'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not os.path.isfile(sharesFilename):
|
|
|
|
|
return
|
2021-07-26 12:20:07 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename, 1, 2)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not sharesJson:
|
|
|
|
|
return
|
|
|
|
|
currTime = int(time.time())
|
|
|
|
|
deleteItemID = []
|
|
|
|
|
for itemID, item in sharesJson.items():
|
|
|
|
|
if currTime > item['expire']:
|
|
|
|
|
deleteItemID.append(itemID)
|
|
|
|
|
if not deleteItemID:
|
|
|
|
|
return
|
|
|
|
|
for itemID in deleteItemID:
|
|
|
|
|
del sharesJson[itemID]
|
|
|
|
|
# remove any associated images
|
|
|
|
|
itemIDfile = baseDir + '/sharefiles/' + nickname + '/' + itemID
|
|
|
|
|
formats = getImageExtensions()
|
|
|
|
|
for ext in formats:
|
|
|
|
|
if os.path.isfile(itemIDfile + '.' + ext):
|
2021-09-05 10:17:43 +00:00
|
|
|
|
try:
|
|
|
|
|
os.remove(itemIDfile + '.' + ext)
|
2021-11-25 18:42:38 +00:00
|
|
|
|
except OSError:
|
2021-10-29 18:48:15 +00:00
|
|
|
|
print('EX: _expireSharesForAccount unable to delete ' +
|
|
|
|
|
itemIDfile + '.' + ext)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
saveJson(sharesJson, sharesFilename)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def getSharesFeedForPerson(baseDir: str,
|
|
|
|
|
domain: str, port: int,
|
|
|
|
|
path: str, httpPrefix: str,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesFileType: str,
|
2021-10-29 22:40:09 +00:00
|
|
|
|
sharesPerPage: int) -> {}:
|
2019-07-23 12:33:09 +00:00
|
|
|
|
"""Returns the shares for an account from GET requests
|
|
|
|
|
"""
|
2021-08-09 13:07:32 +00:00
|
|
|
|
if '/' + sharesFileType not in path:
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return None
|
|
|
|
|
# handle page numbers
|
2020-04-04 11:27:51 +00:00
|
|
|
|
headerOnly = True
|
|
|
|
|
pageNumber = None
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if '?page=' in path:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
pageNumber = path.split('?page=')[1]
|
|
|
|
|
if pageNumber == 'true':
|
|
|
|
|
pageNumber = 1
|
2019-07-23 12:33:09 +00:00
|
|
|
|
else:
|
|
|
|
|
try:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
pageNumber = int(pageNumber)
|
|
|
|
|
except BaseException:
|
2021-10-29 18:48:15 +00:00
|
|
|
|
print('EX: getSharesFeedForPerson unable to convert to int ' +
|
|
|
|
|
str(pageNumber))
|
2019-07-23 12:33:09 +00:00
|
|
|
|
pass
|
2020-04-04 11:27:51 +00:00
|
|
|
|
path = path.split('?page=')[0]
|
|
|
|
|
headerOnly = False
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2021-08-09 13:07:32 +00:00
|
|
|
|
if not path.endswith('/' + sharesFileType):
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return None
|
2020-04-04 11:27:51 +00:00
|
|
|
|
nickname = None
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if path.startswith('/users/'):
|
2021-08-09 13:07:32 +00:00
|
|
|
|
nickname = \
|
|
|
|
|
path.replace('/users/', '', 1).replace('/' + sharesFileType, '')
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if path.startswith('/@'):
|
2021-08-09 13:07:32 +00:00
|
|
|
|
nickname = \
|
|
|
|
|
path.replace('/@', '', 1).replace('/' + sharesFileType, '')
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if not nickname:
|
|
|
|
|
return None
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not validNickname(domain, nickname):
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return None
|
2019-07-24 09:53:07 +00:00
|
|
|
|
|
2020-12-16 11:19:16 +00:00
|
|
|
|
domain = getFullDomain(domain, port)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2021-06-23 21:31:50 +00:00
|
|
|
|
handleDomain = removeDomainPort(domain)
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesFilename = \
|
|
|
|
|
acctDir(baseDir, nickname, handleDomain) + '/' + \
|
|
|
|
|
sharesFileType + '.json'
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
|
|
|
|
if headerOnly:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
noOfShares = 0
|
2019-07-23 12:33:09 +00:00
|
|
|
|
if os.path.isfile(sharesFilename):
|
2020-04-04 11:27:51 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename)
|
2019-10-22 11:55:06 +00:00
|
|
|
|
if sharesJson:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
noOfShares = len(sharesJson.items())
|
2021-08-14 11:13:39 +00:00
|
|
|
|
idStr = localActorUrl(httpPrefix, nickname, domain)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
shares = {
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2021-08-09 13:07:32 +00:00
|
|
|
|
'first': idStr + '/' + sharesFileType + '?page=1',
|
|
|
|
|
'id': idStr + '/' + sharesFileType,
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'totalItems': str(noOfShares),
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'type': 'OrderedCollection'
|
|
|
|
|
}
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return shares
|
|
|
|
|
|
|
|
|
|
if not pageNumber:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
pageNumber = 1
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
nextPageNumber = int(pageNumber + 1)
|
2021-08-14 11:13:39 +00:00
|
|
|
|
idStr = localActorUrl(httpPrefix, nickname, domain)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
shares = {
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2021-08-09 13:07:32 +00:00
|
|
|
|
'id': idStr + '/' + sharesFileType + '?page=' + str(pageNumber),
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'orderedItems': [],
|
2021-08-09 13:07:32 +00:00
|
|
|
|
'partOf': idStr + '/' + sharesFileType,
|
2019-07-23 12:33:09 +00:00
|
|
|
|
'totalItems': 0,
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'type': 'OrderedCollectionPage'
|
|
|
|
|
}
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
|
|
|
|
if not os.path.isfile(sharesFilename):
|
|
|
|
|
return shares
|
2020-04-04 11:27:51 +00:00
|
|
|
|
currPage = 1
|
|
|
|
|
pageCtr = 0
|
|
|
|
|
totalCtr = 0
|
2019-07-23 12:33:09 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename)
|
2019-09-30 22:39:02 +00:00
|
|
|
|
if sharesJson:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
for itemID, item in sharesJson.items():
|
2019-07-23 12:33:09 +00:00
|
|
|
|
pageCtr += 1
|
|
|
|
|
totalCtr += 1
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if currPage == pageNumber:
|
2021-07-28 08:53:53 +00:00
|
|
|
|
item['shareId'] = itemID
|
2019-07-23 12:33:09 +00:00
|
|
|
|
shares['orderedItems'].append(item)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if pageCtr >= sharesPerPage:
|
|
|
|
|
pageCtr = 0
|
2019-07-23 12:33:09 +00:00
|
|
|
|
currPage += 1
|
2020-04-04 11:27:51 +00:00
|
|
|
|
shares['totalItems'] = totalCtr
|
|
|
|
|
lastPage = int(totalCtr / sharesPerPage)
|
|
|
|
|
if lastPage < 1:
|
|
|
|
|
lastPage = 1
|
|
|
|
|
if nextPageNumber > lastPage:
|
|
|
|
|
shares['next'] = \
|
2021-08-14 11:13:39 +00:00
|
|
|
|
localActorUrl(httpPrefix, nickname, domain) + \
|
2021-08-09 13:07:32 +00:00
|
|
|
|
'/' + sharesFileType + '?page=' + str(lastPage)
|
2019-07-23 12:33:09 +00:00
|
|
|
|
return shares
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
def sendShareViaServer(baseDir, session,
|
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
|
httpPrefix: str, displayName: str,
|
|
|
|
|
summary: str, imageFilename: str,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
itemQty: float, itemType: str, itemCategory: str,
|
2020-04-04 11:27:51 +00:00
|
|
|
|
location: str, duration: str,
|
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-07-24 22:08:11 +00:00
|
|
|
|
debug: bool, projectVersion: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
itemPrice: str, itemCurrency: str,
|
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"""Creates an item share via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for sendShareViaServer')
|
|
|
|
|
return 6
|
|
|
|
|
|
2021-08-07 17:25:07 +00:00
|
|
|
|
# convert $4.23 to 4.23 USD
|
|
|
|
|
newItemPrice, newItemCurrency = getPriceFromString(itemPrice)
|
|
|
|
|
if newItemPrice != itemPrice:
|
|
|
|
|
itemPrice = newItemPrice
|
|
|
|
|
if not itemCurrency:
|
|
|
|
|
if newItemCurrency != itemCurrency:
|
|
|
|
|
itemCurrency = newItemCurrency
|
|
|
|
|
|
2020-12-16 11:19:16 +00:00
|
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
|
actor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
|
ccUrl = actor + '/followers'
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
newShareJson = {
|
2019-08-18 11:07:06 +00:00
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
2019-07-23 19:02:26 +00:00
|
|
|
|
'type': 'Add',
|
2020-04-04 11:27:51 +00:00
|
|
|
|
'actor': actor,
|
2021-06-22 12:29:17 +00:00
|
|
|
|
'target': actor + '/shares',
|
2019-07-23 19:02:26 +00:00
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
|
|
|
|
"displayName": displayName,
|
|
|
|
|
"summary": summary,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"itemQty": float(itemQty),
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"itemType": itemType,
|
2020-04-04 11:27:51 +00:00
|
|
|
|
"category": itemCategory,
|
2019-07-23 19:02:26 +00:00
|
|
|
|
"location": location,
|
|
|
|
|
"duration": duration,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"itemPrice": itemPrice,
|
|
|
|
|
"itemCurrency": itemCurrency,
|
2019-07-23 19:02:26 +00:00
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
},
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2020-04-04 11:27:51 +00:00
|
|
|
|
wfRequest = \
|
|
|
|
|
webfingerHandle(session, handle, httpPrefix,
|
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
|
signingPrivateKeyPem)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
if not wfRequest:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: share webfinger failed for ' + handle)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('WARN: share webfinger for ' + handle +
|
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
|
return 1
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
postToBox = 'outbox'
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
|
|
|
|
displayName, _) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
|
originDomain,
|
|
|
|
|
baseDir, session, wfRequest,
|
|
|
|
|
personCache, projectVersion,
|
|
|
|
|
httpPrefix, fromNickname,
|
|
|
|
|
fromDomain, postToBox,
|
|
|
|
|
83653)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2019-07-23 19:02:26 +00:00
|
|
|
|
if not inboxUrl:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: share no ' + postToBox +
|
|
|
|
|
' was found for ' + handle)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 3
|
|
|
|
|
if not fromPersonId:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: share no actor was found for ' + handle)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
return 4
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
if imageFilename:
|
2020-04-04 11:27:51 +00:00
|
|
|
|
headers = {
|
|
|
|
|
'host': fromDomain,
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'Authorization': authHeader
|
|
|
|
|
}
|
2020-04-04 11:27:51 +00:00
|
|
|
|
postResult = \
|
|
|
|
|
postImage(session, imageFilename, [],
|
|
|
|
|
inboxUrl.replace('/' + postToBox, '/shares'),
|
2020-09-27 19:27:24 +00:00
|
|
|
|
headers)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': fromDomain,
|
|
|
|
|
'Content-type': 'application/json',
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'Authorization': authHeader
|
|
|
|
|
}
|
2020-04-04 11:27:51 +00:00
|
|
|
|
postResult = \
|
2021-06-20 13:39:53 +00:00
|
|
|
|
postJson(httpPrefix, fromDomainFull,
|
|
|
|
|
session, newShareJson, [], inboxUrl, headers, 30, True)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not postResult:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: POST share failed for c2s to ' + inboxUrl)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
# return 5
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
if debug:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
print('DEBUG: c2s POST share item success')
|
2019-07-23 19:02:26 +00:00
|
|
|
|
|
|
|
|
|
return newShareJson
|
2019-07-23 20:00:17 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
def sendUndoShareViaServer(baseDir: str, session,
|
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
|
httpPrefix: str, displayName: str,
|
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
"""Undoes a share via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for sendUndoShareViaServer')
|
|
|
|
|
return 6
|
|
|
|
|
|
2020-12-16 11:19:16 +00:00
|
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
|
actor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
|
ccUrl = actor + '/followers'
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
undoShareJson = {
|
2019-08-18 11:07:06 +00:00
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
2019-07-23 21:14:16 +00:00
|
|
|
|
'type': 'Remove',
|
2020-04-04 11:27:51 +00:00
|
|
|
|
'actor': actor,
|
|
|
|
|
'target': actor + '/shares',
|
2019-07-23 21:14:16 +00:00
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
|
|
|
|
"displayName": displayName,
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
},
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
2020-04-04 11:27:51 +00:00
|
|
|
|
wfRequest = \
|
|
|
|
|
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
|
signingPrivateKeyPem)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if not wfRequest:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: unshare webfinger failed for ' + handle)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 1
|
2020-06-23 10:41:12 +00:00
|
|
|
|
if not isinstance(wfRequest, dict):
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('WARN: unshare webfinger for ' + handle +
|
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
2020-06-23 10:41:12 +00:00
|
|
|
|
return 1
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
postToBox = 'outbox'
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
|
|
|
|
displayName, _) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
|
originDomain,
|
|
|
|
|
baseDir, session, wfRequest,
|
|
|
|
|
personCache, projectVersion,
|
|
|
|
|
httpPrefix, fromNickname,
|
|
|
|
|
fromDomain, postToBox,
|
|
|
|
|
12663)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if not inboxUrl:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: unshare no ' + postToBox +
|
|
|
|
|
' was found for ' + handle)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 3
|
|
|
|
|
if not fromPersonId:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: unshare no actor was found for ' + handle)
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return 4
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
2020-03-22 21:16:02 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
headers = {
|
|
|
|
|
'host': fromDomain,
|
|
|
|
|
'Content-type': 'application/json',
|
2020-03-22 20:36:19 +00:00
|
|
|
|
'Authorization': authHeader
|
|
|
|
|
}
|
2020-04-04 11:27:51 +00:00
|
|
|
|
postResult = \
|
2021-06-20 13:39:53 +00:00
|
|
|
|
postJson(httpPrefix, fromDomainFull,
|
|
|
|
|
session, undoShareJson, [], inboxUrl,
|
2021-03-10 19:24:52 +00:00
|
|
|
|
headers, 30, True)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not postResult:
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: POST unshare failed for c2s to ' + inboxUrl)
|
2020-04-04 11:27:51 +00:00
|
|
|
|
# return 5
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
if debug:
|
2021-03-18 10:01:01 +00:00
|
|
|
|
print('DEBUG: c2s POST unshare success')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
|
|
|
|
return undoShareJson
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
2021-08-09 19:37:18 +00:00
|
|
|
|
def sendWantedViaServer(baseDir, session,
|
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
|
httpPrefix: str, displayName: str,
|
|
|
|
|
summary: str, imageFilename: str,
|
|
|
|
|
itemQty: float, itemType: str, itemCategory: str,
|
|
|
|
|
location: str, duration: str,
|
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
|
|
|
|
debug: bool, projectVersion: str,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
itemMaxPrice: str, itemCurrency: str,
|
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"""Creates a wanted item via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for sendWantedViaServer')
|
|
|
|
|
return 6
|
|
|
|
|
|
|
|
|
|
# convert $4.23 to 4.23 USD
|
|
|
|
|
newItemMaxPrice, newItemCurrency = getPriceFromString(itemMaxPrice)
|
|
|
|
|
if newItemMaxPrice != itemMaxPrice:
|
|
|
|
|
itemMaxPrice = newItemMaxPrice
|
|
|
|
|
if not itemCurrency:
|
|
|
|
|
if newItemCurrency != itemCurrency:
|
|
|
|
|
itemCurrency = newItemCurrency
|
|
|
|
|
|
|
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
|
actor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
|
ccUrl = actor + '/followers'
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
newShareJson = {
|
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
|
'type': 'Add',
|
|
|
|
|
'actor': actor,
|
|
|
|
|
'target': actor + '/wanted',
|
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
|
|
|
|
"displayName": displayName,
|
|
|
|
|
"summary": summary,
|
|
|
|
|
"itemQty": float(itemQty),
|
|
|
|
|
"itemType": itemType,
|
|
|
|
|
"category": itemCategory,
|
|
|
|
|
"location": location,
|
|
|
|
|
"duration": duration,
|
|
|
|
|
"itemPrice": itemMaxPrice,
|
|
|
|
|
"itemCurrency": itemCurrency,
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
},
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
|
|
|
|
wfRequest = \
|
|
|
|
|
webfingerHandle(session, handle, httpPrefix,
|
|
|
|
|
cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
|
signingPrivateKeyPem)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if not wfRequest:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: share webfinger failed for ' + handle)
|
|
|
|
|
return 1
|
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
|
print('WARN: wanted webfinger for ' + handle +
|
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
|
|
|
|
displayName, _) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
|
originDomain,
|
|
|
|
|
baseDir, session, wfRequest,
|
|
|
|
|
personCache, projectVersion,
|
|
|
|
|
httpPrefix, fromNickname,
|
|
|
|
|
fromDomain, postToBox,
|
|
|
|
|
23653)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: wanted no ' + postToBox +
|
|
|
|
|
' was found for ' + handle)
|
|
|
|
|
return 3
|
|
|
|
|
if not fromPersonId:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: wanted no actor was found for ' + handle)
|
|
|
|
|
return 4
|
|
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
|
|
|
|
|
|
|
|
|
if imageFilename:
|
|
|
|
|
headers = {
|
|
|
|
|
'host': fromDomain,
|
|
|
|
|
'Authorization': authHeader
|
|
|
|
|
}
|
|
|
|
|
postResult = \
|
|
|
|
|
postImage(session, imageFilename, [],
|
|
|
|
|
inboxUrl.replace('/' + postToBox, '/wanted'),
|
|
|
|
|
headers)
|
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': fromDomain,
|
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
|
'Authorization': authHeader
|
|
|
|
|
}
|
|
|
|
|
postResult = \
|
|
|
|
|
postJson(httpPrefix, fromDomainFull,
|
|
|
|
|
session, newShareJson, [], inboxUrl, headers, 30, True)
|
|
|
|
|
if not postResult:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: POST wanted failed for c2s to ' + inboxUrl)
|
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s POST wanted item success')
|
|
|
|
|
|
|
|
|
|
return newShareJson
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sendUndoWantedViaServer(baseDir: str, session,
|
|
|
|
|
fromNickname: str, password: str,
|
|
|
|
|
fromDomain: str, fromPort: int,
|
|
|
|
|
httpPrefix: str, displayName: str,
|
|
|
|
|
cachedWebfingers: {}, personCache: {},
|
2021-08-31 14:17:11 +00:00
|
|
|
|
debug: bool, projectVersion: str,
|
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2021-08-09 19:37:18 +00:00
|
|
|
|
"""Undoes a wanted item via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for sendUndoWantedViaServer')
|
|
|
|
|
return 6
|
|
|
|
|
|
|
|
|
|
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
|
|
|
|
|
2021-08-14 11:13:39 +00:00
|
|
|
|
actor = localActorUrl(httpPrefix, fromNickname, fromDomainFull)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
2021-08-14 11:13:39 +00:00
|
|
|
|
ccUrl = actor + '/followers'
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
undoShareJson = {
|
|
|
|
|
"@context": "https://www.w3.org/ns/activitystreams",
|
|
|
|
|
'type': 'Remove',
|
|
|
|
|
'actor': actor,
|
|
|
|
|
'target': actor + '/wanted',
|
|
|
|
|
'object': {
|
|
|
|
|
"type": "Offer",
|
|
|
|
|
"displayName": displayName,
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
},
|
|
|
|
|
'to': [toUrl],
|
|
|
|
|
'cc': [ccUrl]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
|
|
|
|
|
|
|
|
|
# lookup the inbox for the To handle
|
|
|
|
|
wfRequest = \
|
|
|
|
|
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
fromDomain, projectVersion, debug, False,
|
|
|
|
|
signingPrivateKeyPem)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
if not wfRequest:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: unwant webfinger failed for ' + handle)
|
|
|
|
|
return 1
|
|
|
|
|
if not isinstance(wfRequest, dict):
|
|
|
|
|
print('WARN: unwant webfinger for ' + handle +
|
|
|
|
|
' did not return a dict. ' + str(wfRequest))
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
postToBox = 'outbox'
|
|
|
|
|
|
|
|
|
|
# get the actor inbox for the To handle
|
2021-09-15 14:05:08 +00:00
|
|
|
|
originDomain = fromDomain
|
2021-09-22 09:29:48 +00:00
|
|
|
|
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
|
|
|
|
|
displayName, _) = getPersonBox(signingPrivateKeyPem,
|
|
|
|
|
originDomain,
|
|
|
|
|
baseDir, session, wfRequest,
|
|
|
|
|
personCache, projectVersion,
|
|
|
|
|
httpPrefix, fromNickname,
|
|
|
|
|
fromDomain, postToBox,
|
|
|
|
|
12693)
|
2021-08-09 19:37:18 +00:00
|
|
|
|
|
|
|
|
|
if not inboxUrl:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: unwant no ' + postToBox +
|
|
|
|
|
' was found for ' + handle)
|
|
|
|
|
return 3
|
|
|
|
|
if not fromPersonId:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: unwant no actor was found for ' + handle)
|
|
|
|
|
return 4
|
|
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(fromNickname, password)
|
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': fromDomain,
|
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
|
'Authorization': authHeader
|
|
|
|
|
}
|
|
|
|
|
postResult = \
|
|
|
|
|
postJson(httpPrefix, fromDomainFull,
|
|
|
|
|
session, undoShareJson, [], inboxUrl,
|
|
|
|
|
headers, 30, True)
|
|
|
|
|
if not postResult:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: POST unwant failed for c2s to ' + inboxUrl)
|
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s POST unwant success')
|
|
|
|
|
|
|
|
|
|
return undoShareJson
|
|
|
|
|
|
|
|
|
|
|
2021-08-04 12:04:35 +00:00
|
|
|
|
def getSharedItemsCatalogViaServer(baseDir, session,
|
|
|
|
|
nickname: str, password: str,
|
|
|
|
|
domain: str, port: int,
|
2021-08-31 14:17:11 +00:00
|
|
|
|
httpPrefix: str, debug: bool,
|
|
|
|
|
signingPrivateKeyPem: str) -> {}:
|
2021-08-04 12:04:35 +00:00
|
|
|
|
"""Returns the shared items catalog via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not session:
|
|
|
|
|
print('WARN: No session for getSharedItemsCatalogViaServer')
|
|
|
|
|
return 6
|
|
|
|
|
|
|
|
|
|
authHeader = createBasicAuthHeader(nickname, password)
|
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
'host': domain,
|
|
|
|
|
'Content-type': 'application/json',
|
|
|
|
|
'Authorization': authHeader,
|
|
|
|
|
'Accept': 'application/json'
|
|
|
|
|
}
|
|
|
|
|
domainFull = getFullDomain(domain, port)
|
2021-08-14 11:13:39 +00:00
|
|
|
|
url = localActorUrl(httpPrefix, nickname, domainFull) + '/catalog'
|
2021-08-04 12:04:35 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Shared items catalog request to: ' + url)
|
2021-08-31 14:17:11 +00:00
|
|
|
|
catalogJson = getJson(signingPrivateKeyPem, session, url, headers, None,
|
|
|
|
|
debug, __version__, httpPrefix, None)
|
2021-08-04 12:04:35 +00:00
|
|
|
|
if not catalogJson:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: GET shared items catalog failed for c2s to ' + url)
|
|
|
|
|
# return 5
|
|
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: c2s GET shared items catalog success')
|
|
|
|
|
|
|
|
|
|
return catalogJson
|
|
|
|
|
|
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
def outboxShareUpload(baseDir: str, httpPrefix: str,
|
|
|
|
|
nickname: str, domain: str, port: int,
|
2021-07-24 14:38:43 +00:00
|
|
|
|
messageJson: {}, debug: bool, city: str,
|
2021-08-13 20:18:36 +00:00
|
|
|
|
systemLanguage: str, translate: {},
|
2021-11-08 18:09:24 +00:00
|
|
|
|
lowBandwidth: bool,
|
|
|
|
|
contentLicenseUrl: str) -> None:
|
2019-07-23 20:00:17 +00:00
|
|
|
|
""" When a shared item is received by the outbox from c2s
|
|
|
|
|
"""
|
|
|
|
|
if not messageJson.get('type'):
|
|
|
|
|
return
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not messageJson['type'] == 'Add':
|
2019-07-23 20:00:17 +00:00
|
|
|
|
return
|
2021-10-13 10:11:02 +00:00
|
|
|
|
if not hasObjectStringType(messageJson, debug):
|
2019-07-23 20:00:17 +00:00
|
|
|
|
return
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not messageJson['object']['type'] == 'Offer':
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: not an Offer activity')
|
|
|
|
|
return
|
|
|
|
|
if not messageJson['object'].get('displayName'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: displayName missing from Offer')
|
|
|
|
|
return
|
|
|
|
|
if not messageJson['object'].get('summary'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: summary missing from Offer')
|
|
|
|
|
return
|
2021-07-24 11:30:46 +00:00
|
|
|
|
if not messageJson['object'].get('itemQty'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: itemQty missing from Offer')
|
|
|
|
|
return
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if not messageJson['object'].get('itemType'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: itemType missing from Offer')
|
|
|
|
|
return
|
|
|
|
|
if not messageJson['object'].get('category'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: category missing from Offer')
|
|
|
|
|
return
|
|
|
|
|
if not messageJson['object'].get('duration'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: duration missing from Offer')
|
|
|
|
|
return
|
2021-07-28 11:26:03 +00:00
|
|
|
|
itemQty = float(messageJson['object']['itemQty'])
|
2021-07-28 21:17:02 +00:00
|
|
|
|
location = ''
|
|
|
|
|
if messageJson['object'].get('location'):
|
|
|
|
|
location = messageJson['object']['location']
|
2021-08-04 12:04:35 +00:00
|
|
|
|
imageFilename = None
|
|
|
|
|
if messageJson['object'].get('imageFilename'):
|
|
|
|
|
imageFilename = messageJson['object']['imageFilename']
|
|
|
|
|
if debug:
|
|
|
|
|
print('Adding shared item')
|
|
|
|
|
pprint(messageJson)
|
2021-08-04 17:14:23 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
addShare(baseDir,
|
|
|
|
|
httpPrefix, nickname, domain, port,
|
|
|
|
|
messageJson['object']['displayName'],
|
|
|
|
|
messageJson['object']['summary'],
|
2021-08-04 12:04:35 +00:00
|
|
|
|
imageFilename,
|
2021-07-28 11:26:03 +00:00
|
|
|
|
itemQty,
|
2020-04-04 11:27:51 +00:00
|
|
|
|
messageJson['object']['itemType'],
|
2021-08-04 12:04:35 +00:00
|
|
|
|
messageJson['object']['category'],
|
2021-07-28 21:17:02 +00:00
|
|
|
|
location,
|
2020-04-04 11:27:51 +00:00
|
|
|
|
messageJson['object']['duration'],
|
2021-07-24 22:08:11 +00:00
|
|
|
|
debug, city,
|
|
|
|
|
messageJson['object']['itemPrice'],
|
|
|
|
|
messageJson['object']['itemCurrency'],
|
2021-08-13 20:18:36 +00:00
|
|
|
|
systemLanguage, translate, 'shares',
|
2021-11-08 18:09:24 +00:00
|
|
|
|
lowBandwidth, contentLicenseUrl)
|
2019-07-23 20:00:17 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item received via c2s')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
|
2020-04-04 11:27:51 +00:00
|
|
|
|
|
|
|
|
|
def outboxUndoShareUpload(baseDir: str, httpPrefix: str,
|
|
|
|
|
nickname: str, domain: str, port: int,
|
|
|
|
|
messageJson: {}, debug: bool) -> None:
|
2019-07-23 21:14:16 +00:00
|
|
|
|
""" When a shared item is removed via c2s
|
|
|
|
|
"""
|
|
|
|
|
if not messageJson.get('type'):
|
|
|
|
|
return
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not messageJson['type'] == 'Remove':
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return
|
2021-10-13 10:11:02 +00:00
|
|
|
|
if not hasObjectStringType(messageJson, debug):
|
2019-07-23 21:14:16 +00:00
|
|
|
|
return
|
2020-04-04 11:27:51 +00:00
|
|
|
|
if not messageJson['object']['type'] == 'Offer':
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: not an Offer activity')
|
|
|
|
|
return
|
|
|
|
|
if not messageJson['object'].get('displayName'):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: displayName missing from Offer')
|
|
|
|
|
return
|
2021-07-25 09:33:59 +00:00
|
|
|
|
domainFull = getFullDomain(domain, port)
|
|
|
|
|
removeSharedItem(baseDir, nickname, domain,
|
|
|
|
|
messageJson['object']['displayName'],
|
2021-08-09 13:07:32 +00:00
|
|
|
|
httpPrefix, domainFull, 'shares')
|
2019-07-23 21:14:16 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item removed via c2s')
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
|
|
|
|
|
2021-07-25 13:09:39 +00:00
|
|
|
|
def _sharesCatalogParams(path: str) -> (bool, float, float, str):
|
|
|
|
|
"""Returns parameters when accessing the shares catalog
|
|
|
|
|
"""
|
|
|
|
|
today = False
|
|
|
|
|
minPrice = 0
|
|
|
|
|
maxPrice = 9999999
|
|
|
|
|
matchPattern = None
|
|
|
|
|
if '?' not in path:
|
|
|
|
|
return today, minPrice, maxPrice, matchPattern
|
|
|
|
|
args = path.split('?', 1)[1]
|
2021-07-25 13:48:55 +00:00
|
|
|
|
argList = args.split(';')
|
2021-07-25 13:09:39 +00:00
|
|
|
|
for arg in argList:
|
|
|
|
|
if '=' not in arg:
|
|
|
|
|
continue
|
|
|
|
|
key = arg.split('=')[0].lower()
|
|
|
|
|
value = arg.split('=')[1]
|
|
|
|
|
if key == 'today':
|
|
|
|
|
value = value.lower()
|
2021-07-25 13:50:03 +00:00
|
|
|
|
if 't' in value or 'y' in value or '1' in value:
|
2021-07-25 13:09:39 +00:00
|
|
|
|
today = True
|
|
|
|
|
elif key.startswith('min'):
|
|
|
|
|
if isfloat(value):
|
|
|
|
|
minPrice = float(value)
|
|
|
|
|
elif key.startswith('max'):
|
|
|
|
|
if isfloat(value):
|
|
|
|
|
maxPrice = float(value)
|
|
|
|
|
elif key.startswith('match'):
|
|
|
|
|
matchPattern = value
|
|
|
|
|
return today, minPrice, maxPrice, matchPattern
|
|
|
|
|
|
|
|
|
|
|
2021-07-24 22:08:11 +00:00
|
|
|
|
def sharesCatalogAccountEndpoint(baseDir: str, httpPrefix: str,
|
|
|
|
|
nickname: str, domain: str,
|
|
|
|
|
domainFull: str,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
path: str, debug: bool,
|
|
|
|
|
sharesFileType: str) -> {}:
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""Returns the endpoint for the shares catalog of a particular account
|
2021-07-24 22:12:26 +00:00
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
2021-09-12 16:09:31 +00:00
|
|
|
|
Also the subdirectory ontology/DFC
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""
|
2021-07-25 13:09:39 +00:00
|
|
|
|
today, minPrice, maxPrice, matchPattern = _sharesCatalogParams(path)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
dfcUrl = \
|
2021-09-12 16:09:31 +00:00
|
|
|
|
httpPrefix + '://' + domainFull + '/ontologies/DFC_FullModel.owl#'
|
2021-07-25 09:33:59 +00:00
|
|
|
|
dfcPtUrl = \
|
2021-09-12 16:29:18 +00:00
|
|
|
|
httpPrefix + '://' + domainFull + \
|
2021-09-12 16:31:18 +00:00
|
|
|
|
'/ontologies/DFC_ProductGlossary.rdf#'
|
2021-08-14 11:13:39 +00:00
|
|
|
|
owner = localActorUrl(httpPrefix, nickname, domainFull)
|
2021-08-09 13:07:32 +00:00
|
|
|
|
if sharesFileType == 'shares':
|
|
|
|
|
dfcInstanceId = owner + '/catalog'
|
|
|
|
|
else:
|
|
|
|
|
dfcInstanceId = owner + '/wantedItems'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
endpoint = {
|
|
|
|
|
"@context": {
|
|
|
|
|
"DFC": dfcUrl,
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"dfc-pt": dfcPtUrl,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"@base": "http://maPlateformeNationale"
|
|
|
|
|
},
|
|
|
|
|
"@id": dfcInstanceId,
|
|
|
|
|
"@type": "DFC:Entreprise",
|
|
|
|
|
"DFC:supplies": []
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-25 09:56:57 +00:00
|
|
|
|
currDate = datetime.datetime.utcnow()
|
|
|
|
|
currDateStr = currDate.strftime("%Y-%m-%d")
|
|
|
|
|
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesFilename = \
|
|
|
|
|
acctDir(baseDir, nickname, domain) + '/' + sharesFileType + '.json'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not os.path.isfile(sharesFilename):
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if debug:
|
2021-08-09 13:07:32 +00:00
|
|
|
|
print(sharesFileType + '.json file not found: ' + sharesFilename)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
return endpoint
|
2021-07-26 12:20:07 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename, 1, 2)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not sharesJson:
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Unable to load json for ' + sharesFilename)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
return endpoint
|
|
|
|
|
|
|
|
|
|
for itemID, item in sharesJson.items():
|
|
|
|
|
if not item.get('dfcId'):
|
2021-08-04 17:14:23 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Item does not have dfcId: ' + itemID)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
continue
|
2021-07-25 09:33:59 +00:00
|
|
|
|
if '#' not in item['dfcId']:
|
|
|
|
|
continue
|
2021-07-25 09:56:57 +00:00
|
|
|
|
if today:
|
|
|
|
|
if not item['published'].startswith(currDateStr):
|
|
|
|
|
continue
|
2021-07-25 10:17:39 +00:00
|
|
|
|
if minPrice is not None:
|
|
|
|
|
if float(item['itemPrice']) < minPrice:
|
|
|
|
|
continue
|
|
|
|
|
if maxPrice is not None:
|
|
|
|
|
if float(item['itemPrice']) > maxPrice:
|
|
|
|
|
continue
|
|
|
|
|
description = item['displayName'] + ': ' + item['summary']
|
|
|
|
|
if matchPattern:
|
|
|
|
|
if not re.match(matchPattern, description):
|
|
|
|
|
continue
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
2021-07-28 08:58:12 +00:00
|
|
|
|
expireDate = datetime.datetime.fromtimestamp(item['expire'])
|
2021-07-24 22:08:11 +00:00
|
|
|
|
expireDateStr = expireDate.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
|
|
2021-07-28 16:29:40 +00:00
|
|
|
|
shareId = _getValidSharedItemID(owner, item['displayName'])
|
2021-07-27 21:59:49 +00:00
|
|
|
|
if item['dfcId'].startswith('epicyon#'):
|
|
|
|
|
dfcId = "epicyon:" + item['dfcId'].split('#')[1]
|
|
|
|
|
else:
|
|
|
|
|
dfcId = "dfc-pt:" + item['dfcId'].split('#')[1]
|
2021-07-27 20:35:55 +00:00
|
|
|
|
priceStr = item['itemPrice'] + ' ' + item['itemCurrency']
|
2021-07-24 22:08:11 +00:00
|
|
|
|
catalogItem = {
|
2021-07-25 09:34:49 +00:00
|
|
|
|
"@id": shareId,
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"@type": "DFC:SuppliedProduct",
|
2021-07-27 21:59:49 +00:00
|
|
|
|
"DFC:hasType": dfcId,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:startDate": item['published'],
|
|
|
|
|
"DFC:expiryDate": expireDateStr,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"DFC:quantity": float(item['itemQty']),
|
2021-07-25 09:39:21 +00:00
|
|
|
|
"DFC:price": priceStr,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:Image": item['imageUrl'],
|
2021-07-25 10:17:39 +00:00
|
|
|
|
"DFC:description": description
|
2021-07-24 22:08:11 +00:00
|
|
|
|
}
|
|
|
|
|
endpoint['DFC:supplies'].append(catalogItem)
|
|
|
|
|
|
|
|
|
|
return endpoint
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sharesCatalogEndpoint(baseDir: str, httpPrefix: str,
|
|
|
|
|
domainFull: str,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
path: str, sharesFileType: str) -> {}:
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""Returns the endpoint for the shares catalog for the instance
|
2021-07-24 22:12:26 +00:00
|
|
|
|
See https://github.com/datafoodconsortium/ontology
|
2021-09-12 16:09:31 +00:00
|
|
|
|
Also the subdirectory ontology/DFC
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"""
|
2021-07-25 13:09:39 +00:00
|
|
|
|
today, minPrice, maxPrice, matchPattern = _sharesCatalogParams(path)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
dfcUrl = \
|
2021-09-12 16:09:31 +00:00
|
|
|
|
httpPrefix + '://' + domainFull + '/ontologies/DFC_FullModel.owl#'
|
2021-07-25 09:33:59 +00:00
|
|
|
|
dfcPtUrl = \
|
2021-09-12 16:29:18 +00:00
|
|
|
|
httpPrefix + '://' + domainFull + \
|
2021-09-12 16:31:18 +00:00
|
|
|
|
'/ontologies/DFC_ProductGlossary.rdf#'
|
2021-07-26 18:06:08 +00:00
|
|
|
|
dfcInstanceId = httpPrefix + '://' + domainFull + '/catalog'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
endpoint = {
|
|
|
|
|
"@context": {
|
|
|
|
|
"DFC": dfcUrl,
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"dfc-pt": dfcPtUrl,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"@base": "http://maPlateformeNationale"
|
|
|
|
|
},
|
|
|
|
|
"@id": dfcInstanceId,
|
|
|
|
|
"@type": "DFC:Entreprise",
|
|
|
|
|
"DFC:supplies": []
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-25 09:56:57 +00:00
|
|
|
|
currDate = datetime.datetime.utcnow()
|
|
|
|
|
currDateStr = currDate.strftime("%Y-%m-%d")
|
|
|
|
|
|
2021-07-24 22:08:11 +00:00
|
|
|
|
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
|
|
|
|
for acct in dirs:
|
|
|
|
|
if not isAccountDir(acct):
|
|
|
|
|
continue
|
|
|
|
|
nickname = acct.split('@')[0]
|
|
|
|
|
domain = acct.split('@')[1]
|
2021-08-14 11:13:39 +00:00
|
|
|
|
owner = localActorUrl(httpPrefix, nickname, domainFull)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
|
|
|
|
sharesFilename = \
|
2021-08-09 13:07:32 +00:00
|
|
|
|
acctDir(baseDir, nickname, domain) + '/' + \
|
|
|
|
|
sharesFileType + '.json'
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not os.path.isfile(sharesFilename):
|
|
|
|
|
continue
|
2021-08-04 17:14:23 +00:00
|
|
|
|
print('Test 78363 ' + sharesFilename)
|
2021-07-26 12:20:07 +00:00
|
|
|
|
sharesJson = loadJson(sharesFilename, 1, 2)
|
2021-07-24 22:08:11 +00:00
|
|
|
|
if not sharesJson:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
for itemID, item in sharesJson.items():
|
|
|
|
|
if not item.get('dfcId'):
|
|
|
|
|
continue
|
2021-07-25 09:33:59 +00:00
|
|
|
|
if '#' not in item['dfcId']:
|
|
|
|
|
continue
|
2021-07-25 09:56:57 +00:00
|
|
|
|
if today:
|
|
|
|
|
if not item['published'].startswith(currDateStr):
|
|
|
|
|
continue
|
2021-07-25 10:17:39 +00:00
|
|
|
|
if minPrice is not None:
|
|
|
|
|
if float(item['itemPrice']) < minPrice:
|
|
|
|
|
continue
|
|
|
|
|
if maxPrice is not None:
|
|
|
|
|
if float(item['itemPrice']) > maxPrice:
|
|
|
|
|
continue
|
|
|
|
|
description = item['displayName'] + ': ' + item['summary']
|
|
|
|
|
if matchPattern:
|
|
|
|
|
if not re.match(matchPattern, description):
|
|
|
|
|
continue
|
2021-07-24 22:08:11 +00:00
|
|
|
|
|
2021-07-28 09:35:21 +00:00
|
|
|
|
startDateStr = dateSecondsToString(item['published'])
|
|
|
|
|
expireDateStr = dateSecondsToString(item['expire'])
|
2021-07-28 16:29:40 +00:00
|
|
|
|
shareId = _getValidSharedItemID(owner, item['displayName'])
|
2021-07-27 21:59:49 +00:00
|
|
|
|
if item['dfcId'].startswith('epicyon#'):
|
|
|
|
|
dfcId = "epicyon:" + item['dfcId'].split('#')[1]
|
|
|
|
|
else:
|
|
|
|
|
dfcId = "dfc-pt:" + item['dfcId'].split('#')[1]
|
2021-07-27 20:35:55 +00:00
|
|
|
|
priceStr = item['itemPrice'] + ' ' + item['itemCurrency']
|
2021-07-24 22:08:11 +00:00
|
|
|
|
catalogItem = {
|
2021-07-25 09:33:59 +00:00
|
|
|
|
"@id": shareId,
|
|
|
|
|
"@type": "DFC:SuppliedProduct",
|
2021-07-27 21:59:49 +00:00
|
|
|
|
"DFC:hasType": dfcId,
|
2021-07-28 09:35:21 +00:00
|
|
|
|
"DFC:startDate": startDateStr,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:expiryDate": expireDateStr,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"DFC:quantity": float(item['itemQty']),
|
2021-07-25 09:39:21 +00:00
|
|
|
|
"DFC:price": priceStr,
|
2021-07-24 22:08:11 +00:00
|
|
|
|
"DFC:Image": item['imageUrl'],
|
|
|
|
|
"DFC:description": description
|
|
|
|
|
}
|
|
|
|
|
endpoint['DFC:supplies'].append(catalogItem)
|
|
|
|
|
|
|
|
|
|
return endpoint
|
2021-07-25 13:30:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def sharesCatalogCSVEndpoint(baseDir: str, httpPrefix: str,
|
|
|
|
|
domainFull: str,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
path: str, sharesFileType: str) -> str:
|
2021-07-25 13:30:42 +00:00
|
|
|
|
"""Returns a CSV version of the shares catalog
|
|
|
|
|
"""
|
|
|
|
|
catalogJson = \
|
2021-08-09 13:07:32 +00:00
|
|
|
|
sharesCatalogEndpoint(baseDir, httpPrefix, domainFull, path,
|
|
|
|
|
sharesFileType)
|
2021-07-25 13:30:42 +00:00
|
|
|
|
if not catalogJson:
|
|
|
|
|
return ''
|
|
|
|
|
if not catalogJson.get('DFC:supplies'):
|
|
|
|
|
return ''
|
|
|
|
|
csvStr = \
|
|
|
|
|
'id,type,hasType,startDate,expiryDate,' + \
|
2021-07-28 10:04:47 +00:00
|
|
|
|
'quantity,price,currency,Image,description,\n'
|
2021-07-25 13:30:42 +00:00
|
|
|
|
for item in catalogJson['DFC:supplies']:
|
2021-07-28 09:49:14 +00:00
|
|
|
|
csvStr += '"' + item['@id'] + '",'
|
|
|
|
|
csvStr += '"' + item['@type'] + '",'
|
|
|
|
|
csvStr += '"' + item['DFC:hasType'] + '",'
|
|
|
|
|
csvStr += '"' + item['DFC:startDate'] + '",'
|
|
|
|
|
csvStr += '"' + item['DFC:expiryDate'] + '",'
|
2021-07-28 09:37:29 +00:00
|
|
|
|
csvStr += str(item['DFC:quantity']) + ','
|
2021-07-28 09:56:28 +00:00
|
|
|
|
csvStr += item['DFC:price'].split(' ')[0] + ','
|
|
|
|
|
csvStr += '"' + item['DFC:price'].split(' ')[1] + '",'
|
2021-09-19 14:34:54 +00:00
|
|
|
|
if item.get('DFC:Image'):
|
|
|
|
|
csvStr += '"' + item['DFC:Image'] + '",'
|
2021-07-28 09:50:27 +00:00
|
|
|
|
description = item['DFC:description'].replace('"', "'")
|
2021-07-28 10:04:47 +00:00
|
|
|
|
csvStr += '"' + description + '",\n'
|
2021-07-25 13:30:42 +00:00
|
|
|
|
return csvStr
|
2021-07-26 09:40:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def generateSharedItemFederationTokens(sharedItemsFederatedDomains: [],
|
2021-07-26 10:17:06 +00:00
|
|
|
|
baseDir: str) -> {}:
|
2021-07-26 09:40:51 +00:00
|
|
|
|
"""Generates tokens for shared item federated domains
|
|
|
|
|
"""
|
|
|
|
|
if not sharedItemsFederatedDomains:
|
2021-07-26 12:20:07 +00:00
|
|
|
|
return {}
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
|
|
|
|
tokensJson = {}
|
2021-07-26 10:17:06 +00:00
|
|
|
|
if baseDir:
|
|
|
|
|
tokensFilename = \
|
|
|
|
|
baseDir + '/accounts/sharedItemsFederationTokens.json'
|
2021-07-26 12:20:07 +00:00
|
|
|
|
if os.path.isfile(tokensFilename):
|
|
|
|
|
tokensJson = loadJson(tokensFilename, 1, 2)
|
|
|
|
|
if tokensJson is None:
|
|
|
|
|
tokensJson = {}
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
2021-07-26 09:40:51 +00:00
|
|
|
|
tokensAdded = False
|
2021-08-05 11:24:24 +00:00
|
|
|
|
for domainFull in sharedItemsFederatedDomains:
|
|
|
|
|
if not tokensJson.get(domainFull):
|
|
|
|
|
tokensJson[domainFull] = ''
|
2021-07-26 09:40:51 +00:00
|
|
|
|
tokensAdded = True
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
2021-07-26 09:40:51 +00:00
|
|
|
|
if not tokensAdded:
|
2021-07-26 10:17:06 +00:00
|
|
|
|
return tokensJson
|
|
|
|
|
if baseDir:
|
|
|
|
|
saveJson(tokensJson, tokensFilename)
|
|
|
|
|
return tokensJson
|
2021-07-26 10:00:54 +00:00
|
|
|
|
|
|
|
|
|
|
2021-07-26 12:20:07 +00:00
|
|
|
|
def updateSharedItemFederationToken(baseDir: str,
|
2021-08-05 11:24:24 +00:00
|
|
|
|
tokenDomainFull: str, newToken: str,
|
|
|
|
|
debug: bool,
|
2021-07-26 12:20:07 +00:00
|
|
|
|
tokensJson: {} = None) -> {}:
|
2021-07-26 21:01:48 +00:00
|
|
|
|
"""Updates an individual token for shared item federation
|
2021-07-26 12:20:07 +00:00
|
|
|
|
"""
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Updating shared items token for ' + tokenDomainFull)
|
2021-07-26 12:20:07 +00:00
|
|
|
|
if not tokensJson:
|
|
|
|
|
tokensJson = {}
|
|
|
|
|
if baseDir:
|
|
|
|
|
tokensFilename = \
|
|
|
|
|
baseDir + '/accounts/sharedItemsFederationTokens.json'
|
|
|
|
|
if os.path.isfile(tokensFilename):
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('Update loading tokens for ' + tokenDomainFull)
|
2021-07-26 12:20:07 +00:00
|
|
|
|
tokensJson = loadJson(tokensFilename, 1, 2)
|
|
|
|
|
if tokensJson is None:
|
|
|
|
|
tokensJson = {}
|
2021-07-26 14:41:06 +00:00
|
|
|
|
updateRequired = False
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if tokensJson.get(tokenDomainFull):
|
|
|
|
|
if tokensJson[tokenDomainFull] != newToken:
|
2021-07-26 14:41:06 +00:00
|
|
|
|
updateRequired = True
|
|
|
|
|
else:
|
|
|
|
|
updateRequired = True
|
|
|
|
|
if updateRequired:
|
2021-08-05 11:24:24 +00:00
|
|
|
|
tokensJson[tokenDomainFull] = newToken
|
2021-07-26 14:41:06 +00:00
|
|
|
|
if baseDir:
|
|
|
|
|
saveJson(tokensJson, tokensFilename)
|
2021-07-26 12:20:07 +00:00
|
|
|
|
return tokensJson
|
|
|
|
|
|
|
|
|
|
|
2021-08-05 11:24:24 +00:00
|
|
|
|
def mergeSharedItemTokens(baseDir: str, domainFull: str,
|
2021-07-26 21:01:48 +00:00
|
|
|
|
newSharedItemsFederatedDomains: [],
|
|
|
|
|
tokensJson: {}) -> {}:
|
|
|
|
|
"""When the shared item federation domains list has changed, update
|
|
|
|
|
the tokens dict accordingly
|
|
|
|
|
"""
|
|
|
|
|
removals = []
|
|
|
|
|
changed = False
|
2021-08-05 11:24:24 +00:00
|
|
|
|
for tokenDomainFull, tok in tokensJson.items():
|
|
|
|
|
if domainFull:
|
|
|
|
|
if tokenDomainFull.startswith(domainFull):
|
2021-07-26 21:01:48 +00:00
|
|
|
|
continue
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if tokenDomainFull not in newSharedItemsFederatedDomains:
|
|
|
|
|
removals.append(tokenDomainFull)
|
2021-07-26 21:01:48 +00:00
|
|
|
|
# remove domains no longer in the federation list
|
2021-08-05 11:24:24 +00:00
|
|
|
|
for tokenDomainFull in removals:
|
|
|
|
|
del tokensJson[tokenDomainFull]
|
2021-07-26 21:01:48 +00:00
|
|
|
|
changed = True
|
|
|
|
|
# add new domains from the federation list
|
2021-08-05 11:24:24 +00:00
|
|
|
|
for tokenDomainFull in newSharedItemsFederatedDomains:
|
|
|
|
|
if tokenDomainFull not in tokensJson:
|
|
|
|
|
tokensJson[tokenDomainFull] = ''
|
2021-07-26 21:01:48 +00:00
|
|
|
|
changed = True
|
|
|
|
|
if baseDir and changed:
|
|
|
|
|
tokensFilename = \
|
|
|
|
|
baseDir + '/accounts/sharedItemsFederationTokens.json'
|
|
|
|
|
saveJson(tokensJson, tokensFilename)
|
|
|
|
|
return tokensJson
|
|
|
|
|
|
|
|
|
|
|
2021-07-26 12:20:07 +00:00
|
|
|
|
def createSharedItemFederationToken(baseDir: str,
|
2021-08-05 11:24:24 +00:00
|
|
|
|
tokenDomainFull: str,
|
2021-08-07 10:29:40 +00:00
|
|
|
|
force: bool,
|
2021-07-26 12:20:07 +00:00
|
|
|
|
tokensJson: {} = None) -> {}:
|
2021-07-26 21:01:48 +00:00
|
|
|
|
"""Updates an individual token for shared item federation
|
2021-07-26 12:20:07 +00:00
|
|
|
|
"""
|
|
|
|
|
if not tokensJson:
|
|
|
|
|
tokensJson = {}
|
|
|
|
|
if baseDir:
|
|
|
|
|
tokensFilename = \
|
|
|
|
|
baseDir + '/accounts/sharedItemsFederationTokens.json'
|
|
|
|
|
if os.path.isfile(tokensFilename):
|
|
|
|
|
tokensJson = loadJson(tokensFilename, 1, 2)
|
|
|
|
|
if tokensJson is None:
|
|
|
|
|
tokensJson = {}
|
2021-08-07 10:29:40 +00:00
|
|
|
|
if force or not tokensJson.get(tokenDomainFull):
|
2021-08-05 11:24:24 +00:00
|
|
|
|
tokensJson[tokenDomainFull] = secrets.token_urlsafe(64)
|
2021-07-26 12:20:07 +00:00
|
|
|
|
if baseDir:
|
|
|
|
|
saveJson(tokensJson, tokensFilename)
|
|
|
|
|
return tokensJson
|
|
|
|
|
|
|
|
|
|
|
2021-07-26 10:00:54 +00:00
|
|
|
|
def authorizeSharedItems(sharedItemsFederatedDomains: [],
|
|
|
|
|
baseDir: str,
|
2021-08-05 11:24:24 +00:00
|
|
|
|
originDomainFull: str,
|
2021-08-05 12:43:21 +00:00
|
|
|
|
callingDomainFull: str,
|
2021-07-26 10:00:54 +00:00
|
|
|
|
authHeader: str,
|
|
|
|
|
debug: bool,
|
|
|
|
|
tokensJson: {} = None) -> bool:
|
|
|
|
|
"""HTTP simple token check for shared item federation
|
|
|
|
|
"""
|
|
|
|
|
if not sharedItemsFederatedDomains:
|
|
|
|
|
# no shared item federation
|
|
|
|
|
return False
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if originDomainFull not in sharedItemsFederatedDomains:
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
2021-08-05 11:24:24 +00:00
|
|
|
|
print(originDomainFull +
|
2021-08-05 12:43:21 +00:00
|
|
|
|
' is not in the shared items federation list ' +
|
|
|
|
|
str(sharedItemsFederatedDomains))
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
|
|
|
|
if 'Basic ' in authHeader:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation should not use basic auth')
|
|
|
|
|
return False
|
|
|
|
|
providedToken = authHeader.replace('\n', '').replace('\r', '').strip()
|
|
|
|
|
if not providedToken:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token is empty')
|
|
|
|
|
return False
|
|
|
|
|
if len(providedToken) < 60:
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token is too small ' +
|
|
|
|
|
providedToken)
|
|
|
|
|
return False
|
|
|
|
|
if not tokensJson:
|
|
|
|
|
tokensFilename = \
|
|
|
|
|
baseDir + '/accounts/sharedItemsFederationTokens.json'
|
|
|
|
|
if not os.path.isfile(tokensFilename):
|
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation tokens file missing ' +
|
|
|
|
|
tokensFilename)
|
|
|
|
|
return False
|
2021-07-26 12:20:07 +00:00
|
|
|
|
tokensJson = loadJson(tokensFilename, 1, 2)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if not tokensJson:
|
|
|
|
|
return False
|
2021-08-05 12:43:21 +00:00
|
|
|
|
if not tokensJson.get(callingDomainFull):
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token ' +
|
2021-08-05 12:43:21 +00:00
|
|
|
|
'check failed for ' + callingDomainFull)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
2021-08-05 12:43:21 +00:00
|
|
|
|
if not constantTimeStringCheck(tokensJson[callingDomainFull],
|
2021-08-05 11:24:24 +00:00
|
|
|
|
providedToken):
|
2021-07-26 10:00:54 +00:00
|
|
|
|
if debug:
|
|
|
|
|
print('DEBUG: shared item federation token ' +
|
2021-08-05 12:43:21 +00:00
|
|
|
|
'mismatch for ' + callingDomainFull)
|
2021-07-26 10:00:54 +00:00
|
|
|
|
return False
|
|
|
|
|
return True
|
2021-07-26 17:54:13 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _updateFederatedSharesCache(session, sharedItemsFederatedDomains: [],
|
2021-08-05 11:24:24 +00:00
|
|
|
|
baseDir: str, domainFull: str,
|
2021-07-26 17:54:13 +00:00
|
|
|
|
httpPrefix: str,
|
2021-07-27 12:55:44 +00:00
|
|
|
|
tokensJson: {}, debug: bool,
|
2021-08-09 13:07:32 +00:00
|
|
|
|
systemLanguage: str,
|
|
|
|
|
sharesFileType: str) -> None:
|
2021-07-26 17:54:13 +00:00
|
|
|
|
"""Updates the cache of federated shares for the instance.
|
|
|
|
|
This enables shared items to be available even when other instances
|
|
|
|
|
might not be online
|
|
|
|
|
"""
|
|
|
|
|
# create directories where catalogs will be stored
|
|
|
|
|
cacheDir = baseDir + '/cache'
|
|
|
|
|
if not os.path.isdir(cacheDir):
|
|
|
|
|
os.mkdir(cacheDir)
|
2021-08-09 13:07:32 +00:00
|
|
|
|
if sharesFileType == 'shares':
|
|
|
|
|
catalogsDir = cacheDir + '/catalogs'
|
|
|
|
|
else:
|
|
|
|
|
catalogsDir = cacheDir + '/wantedItems'
|
2021-07-26 17:54:13 +00:00
|
|
|
|
if not os.path.isdir(catalogsDir):
|
|
|
|
|
os.mkdir(catalogsDir)
|
|
|
|
|
|
|
|
|
|
asHeader = {
|
2021-08-05 11:24:24 +00:00
|
|
|
|
"Accept": "application/ld+json",
|
|
|
|
|
"Origin": domainFull
|
2021-07-26 17:54:13 +00:00
|
|
|
|
}
|
2021-08-05 11:24:24 +00:00
|
|
|
|
for federatedDomainFull in sharedItemsFederatedDomains:
|
2021-07-27 11:29:07 +00:00
|
|
|
|
# NOTE: federatedDomain does not have a port extension,
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# so may not work in some situations
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if federatedDomainFull.startswith(domainFull):
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# only download from instances other than this one
|
|
|
|
|
continue
|
2021-08-05 11:24:24 +00:00
|
|
|
|
if not tokensJson.get(federatedDomainFull):
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# token has been obtained for the other domain
|
|
|
|
|
continue
|
2021-10-29 22:40:09 +00:00
|
|
|
|
if not siteIsActive(httpPrefix + '://' + federatedDomainFull, 10):
|
2021-07-29 19:27:36 +00:00
|
|
|
|
continue
|
2021-08-09 13:07:32 +00:00
|
|
|
|
if sharesFileType == 'shares':
|
|
|
|
|
url = httpPrefix + '://' + federatedDomainFull + '/catalog'
|
|
|
|
|
else:
|
|
|
|
|
url = httpPrefix + '://' + federatedDomainFull + '/wantedItems'
|
2021-08-05 11:24:24 +00:00
|
|
|
|
asHeader['Authorization'] = tokensJson[federatedDomainFull]
|
2021-07-26 17:54:13 +00:00
|
|
|
|
catalogJson = getJson(session, url, asHeader, None,
|
|
|
|
|
debug, __version__, httpPrefix, None)
|
|
|
|
|
if not catalogJson:
|
|
|
|
|
print('WARN: failed to download shared items catalog for ' +
|
2021-08-05 11:24:24 +00:00
|
|
|
|
federatedDomainFull)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
continue
|
2021-08-05 11:24:24 +00:00
|
|
|
|
catalogFilename = catalogsDir + '/' + federatedDomainFull + '.json'
|
2021-07-26 17:54:13 +00:00
|
|
|
|
if saveJson(catalogJson, catalogFilename):
|
2021-08-05 11:24:24 +00:00
|
|
|
|
print('Downloaded shared items catalog for ' + federatedDomainFull)
|
2021-07-27 12:55:44 +00:00
|
|
|
|
sharesJson = _dfcToSharesFormat(catalogJson,
|
2021-09-12 17:10:15 +00:00
|
|
|
|
baseDir, systemLanguage,
|
|
|
|
|
httpPrefix, domainFull)
|
2021-07-27 12:55:44 +00:00
|
|
|
|
if sharesJson:
|
|
|
|
|
sharesFilename = \
|
2021-08-09 13:07:32 +00:00
|
|
|
|
catalogsDir + '/' + federatedDomainFull + '.' + \
|
|
|
|
|
sharesFileType + '.json'
|
2021-07-27 12:55:44 +00:00
|
|
|
|
saveJson(sharesJson, sharesFilename)
|
2021-08-05 11:24:24 +00:00
|
|
|
|
print('Converted shares catalog for ' + federatedDomainFull)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
else:
|
|
|
|
|
time.sleep(2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def runFederatedSharesWatchdog(projectVersion: str, httpd) -> None:
|
|
|
|
|
"""This tries to keep the federated shares update thread
|
|
|
|
|
running even if it dies
|
|
|
|
|
"""
|
|
|
|
|
print('Starting federated shares watchdog')
|
|
|
|
|
federatedSharesOriginal = \
|
|
|
|
|
httpd.thrPostSchedule.clone(runFederatedSharesDaemon)
|
|
|
|
|
httpd.thrFederatedSharesDaemon.start()
|
|
|
|
|
while True:
|
|
|
|
|
time.sleep(55)
|
|
|
|
|
if httpd.thrFederatedSharesDaemon.is_alive():
|
|
|
|
|
continue
|
|
|
|
|
httpd.thrFederatedSharesDaemon.kill()
|
|
|
|
|
httpd.thrFederatedSharesDaemon = \
|
|
|
|
|
federatedSharesOriginal.clone(runFederatedSharesDaemon)
|
|
|
|
|
httpd.thrFederatedSharesDaemon.start()
|
|
|
|
|
print('Restarting federated shares daemon...')
|
|
|
|
|
|
|
|
|
|
|
2021-08-07 10:29:40 +00:00
|
|
|
|
def _generateNextSharesTokenUpdate(baseDir: str,
|
|
|
|
|
minDays: int, maxDays: int) -> None:
|
|
|
|
|
"""Creates a file containing the next date when the shared items token
|
|
|
|
|
for this instance will be updated
|
|
|
|
|
"""
|
2021-08-12 17:50:33 +00:00
|
|
|
|
tokenUpdateDir = baseDir + '/accounts'
|
|
|
|
|
if not os.path.isdir(baseDir):
|
|
|
|
|
os.mkdir(baseDir)
|
|
|
|
|
if not os.path.isdir(tokenUpdateDir):
|
|
|
|
|
os.mkdir(tokenUpdateDir)
|
|
|
|
|
tokenUpdateFilename = tokenUpdateDir + '/.tokenUpdate'
|
2021-08-07 10:29:40 +00:00
|
|
|
|
nextUpdateSec = None
|
|
|
|
|
if os.path.isfile(tokenUpdateFilename):
|
|
|
|
|
with open(tokenUpdateFilename, 'r') as fp:
|
|
|
|
|
nextUpdateStr = fp.read()
|
|
|
|
|
if nextUpdateStr:
|
|
|
|
|
if nextUpdateStr.isdigit():
|
|
|
|
|
nextUpdateSec = int(nextUpdateStr)
|
|
|
|
|
currTime = int(time.time())
|
|
|
|
|
updated = False
|
|
|
|
|
if nextUpdateSec:
|
|
|
|
|
if currTime > nextUpdateSec:
|
|
|
|
|
nextUpdateDays = randint(minDays, maxDays)
|
|
|
|
|
nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays)
|
|
|
|
|
nextUpdateSec += nextUpdateInterval
|
|
|
|
|
updated = True
|
|
|
|
|
else:
|
|
|
|
|
nextUpdateDays = randint(minDays, maxDays)
|
|
|
|
|
nextUpdateInterval = int(60 * 60 * 24 * nextUpdateDays)
|
|
|
|
|
nextUpdateSec = currTime + nextUpdateInterval
|
|
|
|
|
updated = True
|
|
|
|
|
if updated:
|
|
|
|
|
with open(tokenUpdateFilename, 'w+') as fp:
|
|
|
|
|
fp.write(str(nextUpdateSec))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _regenerateSharesToken(baseDir: str, domainFull: str,
|
|
|
|
|
minDays: int, maxDays: int, httpd) -> None:
|
|
|
|
|
"""Occasionally the shared items token for your instance is updated.
|
|
|
|
|
Scenario:
|
|
|
|
|
- You share items with $FriendlyInstance
|
|
|
|
|
- Some time later under new management
|
|
|
|
|
$FriendlyInstance becomes $HostileInstance
|
|
|
|
|
- You block $HostileInstance and remove them from your
|
|
|
|
|
federated shares domains list
|
|
|
|
|
- $HostileInstance still knows your shared items token,
|
|
|
|
|
and can still have access to your shared items if it presents a
|
|
|
|
|
spoofed Origin header together with the token
|
|
|
|
|
By rotating the token occasionally $HostileInstance will eventually
|
|
|
|
|
lose access to your federated shares. If other instances within your
|
|
|
|
|
federated shares list of domains continue to follow and communicate
|
|
|
|
|
then they will receive the new token automatically
|
|
|
|
|
"""
|
|
|
|
|
tokenUpdateFilename = baseDir + '/accounts/.tokenUpdate'
|
|
|
|
|
if not os.path.isfile(tokenUpdateFilename):
|
|
|
|
|
return
|
|
|
|
|
nextUpdateSec = None
|
|
|
|
|
with open(tokenUpdateFilename, 'r') as fp:
|
|
|
|
|
nextUpdateStr = fp.read()
|
|
|
|
|
if nextUpdateStr:
|
|
|
|
|
if nextUpdateStr.isdigit():
|
|
|
|
|
nextUpdateSec = int(nextUpdateStr)
|
|
|
|
|
if not nextUpdateSec:
|
|
|
|
|
return
|
|
|
|
|
currTime = int(time.time())
|
|
|
|
|
if currTime <= nextUpdateSec:
|
|
|
|
|
return
|
|
|
|
|
createSharedItemFederationToken(baseDir, domainFull, True, None)
|
|
|
|
|
_generateNextSharesTokenUpdate(baseDir, minDays, maxDays)
|
|
|
|
|
# update the tokens used within the daemon
|
|
|
|
|
httpd.sharedItemFederationTokens = \
|
|
|
|
|
generateSharedItemFederationTokens(httpd.sharedItemsFederatedDomains,
|
|
|
|
|
baseDir)
|
|
|
|
|
|
|
|
|
|
|
2021-07-26 17:54:13 +00:00
|
|
|
|
def runFederatedSharesDaemon(baseDir: str, httpd, httpPrefix: str,
|
2021-08-05 11:24:24 +00:00
|
|
|
|
domainFull: str, proxyType: str, debug: bool,
|
2021-07-27 12:55:44 +00:00
|
|
|
|
systemLanguage: str) -> None:
|
2021-07-26 17:54:13 +00:00
|
|
|
|
"""Runs the daemon used to update federated shared items
|
|
|
|
|
"""
|
|
|
|
|
secondsPerHour = 60 * 60
|
|
|
|
|
fileCheckIntervalSec = 120
|
|
|
|
|
time.sleep(60)
|
2021-08-07 10:29:40 +00:00
|
|
|
|
# the token for this instance will be changed every 7-14 days
|
|
|
|
|
minDays = 7
|
|
|
|
|
maxDays = 14
|
|
|
|
|
_generateNextSharesTokenUpdate(baseDir, minDays, maxDays)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
while True:
|
|
|
|
|
sharedItemsFederatedDomainsStr = \
|
|
|
|
|
getConfigParam(baseDir, 'sharedItemsFederatedDomains')
|
|
|
|
|
if not sharedItemsFederatedDomainsStr:
|
|
|
|
|
time.sleep(fileCheckIntervalSec)
|
|
|
|
|
continue
|
|
|
|
|
|
2021-08-07 10:29:40 +00:00
|
|
|
|
# occasionally change the federated shared items token
|
|
|
|
|
# for this instance
|
|
|
|
|
_regenerateSharesToken(baseDir, domainFull, minDays, maxDays, httpd)
|
|
|
|
|
|
2021-07-26 17:54:13 +00:00
|
|
|
|
# get a list of the domains within the shared items federation
|
|
|
|
|
sharedItemsFederatedDomains = []
|
|
|
|
|
sharedItemsFederatedDomainsList = \
|
|
|
|
|
sharedItemsFederatedDomainsStr.split(',')
|
|
|
|
|
for sharedFederatedDomain in sharedItemsFederatedDomainsList:
|
|
|
|
|
sharedItemsFederatedDomains.append(sharedFederatedDomain.strip())
|
|
|
|
|
if not sharedItemsFederatedDomains:
|
|
|
|
|
time.sleep(fileCheckIntervalSec)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# load the tokens
|
|
|
|
|
tokensFilename = \
|
|
|
|
|
baseDir + '/accounts/sharedItemsFederationTokens.json'
|
|
|
|
|
if not os.path.isfile(tokensFilename):
|
|
|
|
|
time.sleep(fileCheckIntervalSec)
|
|
|
|
|
continue
|
|
|
|
|
tokensJson = loadJson(tokensFilename, 1, 2)
|
|
|
|
|
if not tokensJson:
|
|
|
|
|
time.sleep(fileCheckIntervalSec)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
session = createSession(proxyType)
|
2021-08-09 13:07:32 +00:00
|
|
|
|
for sharesFileType in getSharesFilesList():
|
|
|
|
|
_updateFederatedSharesCache(session, sharedItemsFederatedDomains,
|
|
|
|
|
baseDir, domainFull, httpPrefix,
|
|
|
|
|
tokensJson, debug, systemLanguage,
|
|
|
|
|
sharesFileType)
|
2021-07-26 17:54:13 +00:00
|
|
|
|
time.sleep(secondsPerHour * 6)
|
2021-07-27 12:55:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _dfcToSharesFormat(catalogJson: {},
|
2021-09-12 17:10:15 +00:00
|
|
|
|
baseDir: str, systemLanguage: str,
|
|
|
|
|
httpPrefix: str, domainFull: str) -> {}:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"""Converts DFC format into the internal formal used to store shared items.
|
|
|
|
|
This simplifies subsequent search and display
|
|
|
|
|
"""
|
|
|
|
|
if not catalogJson.get('DFC:supplies'):
|
|
|
|
|
return {}
|
|
|
|
|
sharesJson = {}
|
2021-08-03 19:08:28 +00:00
|
|
|
|
|
|
|
|
|
dfcIds = {}
|
2021-08-08 18:39:03 +00:00
|
|
|
|
productTypesList = getCategoryTypes(baseDir)
|
2021-08-03 19:08:28 +00:00
|
|
|
|
for productType in productTypesList:
|
2021-09-12 17:10:15 +00:00
|
|
|
|
dfcIds[productType] = _loadDfcIds(baseDir, systemLanguage, productType,
|
|
|
|
|
httpPrefix, domainFull)
|
2021-08-03 19:08:28 +00:00
|
|
|
|
|
2021-07-27 12:55:44 +00:00
|
|
|
|
currTime = int(time.time())
|
|
|
|
|
for item in catalogJson['DFC:supplies']:
|
|
|
|
|
if not item.get('@id') or \
|
|
|
|
|
not item.get('@type') or \
|
|
|
|
|
not item.get('DFC:hasType') or \
|
|
|
|
|
not item.get('DFC:startDate') or \
|
|
|
|
|
not item.get('DFC:expiryDate') or \
|
|
|
|
|
not item.get('DFC:quantity') or \
|
|
|
|
|
not item.get('DFC:price') or \
|
|
|
|
|
not item.get('DFC:description'):
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if ' ' not in item['DFC:price']:
|
|
|
|
|
continue
|
|
|
|
|
if ':' not in item['DFC:description']:
|
|
|
|
|
continue
|
|
|
|
|
if ':' not in item['DFC:hasType']:
|
|
|
|
|
continue
|
|
|
|
|
|
2021-07-28 09:35:21 +00:00
|
|
|
|
startTimeSec = dateStringToSeconds(item['DFC:startDate'])
|
|
|
|
|
if not startTimeSec:
|
|
|
|
|
continue
|
|
|
|
|
expiryTimeSec = dateStringToSeconds(item['DFC:expiryDate'])
|
|
|
|
|
if not expiryTimeSec:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
continue
|
2021-07-28 09:35:21 +00:00
|
|
|
|
if expiryTimeSec < currTime:
|
2021-07-27 12:55:44 +00:00
|
|
|
|
# has expired
|
|
|
|
|
continue
|
|
|
|
|
|
2021-07-27 22:06:35 +00:00
|
|
|
|
if item['DFC:hasType'].startswith('epicyon:'):
|
|
|
|
|
itemType = item['DFC:hasType'].split(':')[1]
|
|
|
|
|
itemType = itemType.replace('_', ' ')
|
|
|
|
|
itemCategory = 'non-food'
|
2021-08-03 19:08:28 +00:00
|
|
|
|
productType = None
|
2021-07-27 22:06:35 +00:00
|
|
|
|
else:
|
|
|
|
|
hasType = item['DFC:hasType'].split(':')[1]
|
2021-08-03 19:08:28 +00:00
|
|
|
|
itemType = None
|
|
|
|
|
productType = None
|
|
|
|
|
for prodType in productTypesList:
|
|
|
|
|
itemType = _getshareTypeFromDfcId(hasType, dfcIds[prodType])
|
|
|
|
|
if itemType:
|
|
|
|
|
productType = prodType
|
|
|
|
|
break
|
2021-07-27 22:06:35 +00:00
|
|
|
|
itemCategory = 'food'
|
2021-07-27 12:55:44 +00:00
|
|
|
|
if not itemType:
|
|
|
|
|
continue
|
2021-07-28 21:28:41 +00:00
|
|
|
|
|
|
|
|
|
allText = item['DFC:description'] + ' ' + itemType + ' ' + itemCategory
|
|
|
|
|
if isFilteredGlobally(baseDir, allText):
|
|
|
|
|
continue
|
|
|
|
|
|
2021-08-03 19:08:28 +00:00
|
|
|
|
dfcId = None
|
|
|
|
|
if productType:
|
|
|
|
|
dfcId = dfcIds[productType][itemType]
|
2021-07-27 12:55:44 +00:00
|
|
|
|
itemID = item['@id']
|
|
|
|
|
description = item['DFC:description'].split(':', 1)[1].strip()
|
2021-07-28 21:28:41 +00:00
|
|
|
|
|
2021-09-19 14:34:54 +00:00
|
|
|
|
imageUrl = ''
|
|
|
|
|
if item.get('DFC:Image'):
|
|
|
|
|
imageUrl = item['DFC:Image']
|
2021-07-27 12:55:44 +00:00
|
|
|
|
sharesJson[itemID] = {
|
|
|
|
|
"displayName": item['DFC:description'].split(':')[0],
|
|
|
|
|
"summary": description,
|
2021-09-19 14:34:54 +00:00
|
|
|
|
"imageUrl": imageUrl,
|
2021-07-28 09:44:19 +00:00
|
|
|
|
"itemQty": float(item['DFC:quantity']),
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"dfcId": dfcId,
|
|
|
|
|
"itemType": itemType,
|
2021-07-27 22:06:35 +00:00
|
|
|
|
"category": itemCategory,
|
2021-07-27 12:55:44 +00:00
|
|
|
|
"location": "",
|
2021-07-28 09:35:21 +00:00
|
|
|
|
"published": startTimeSec,
|
|
|
|
|
"expire": expiryTimeSec,
|
2021-07-27 20:26:10 +00:00
|
|
|
|
"itemPrice": item['DFC:price'].split(' ')[0],
|
|
|
|
|
"itemCurrency": item['DFC:price'].split(' ')[1]
|
2021-07-27 12:55:44 +00:00
|
|
|
|
}
|
|
|
|
|
return sharesJson
|
2021-09-19 13:59:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def shareCategoryIcon(category: str) -> str:
|
|
|
|
|
"""Returns unicode icon for the given category
|
|
|
|
|
"""
|
|
|
|
|
categoryIcons = {
|
|
|
|
|
'accommodation': '🏠',
|
|
|
|
|
'clothes': '👚',
|
|
|
|
|
'tools': '🔧',
|
|
|
|
|
'food': '🍏'
|
|
|
|
|
}
|
|
|
|
|
if categoryIcons.get(category):
|
|
|
|
|
return categoryIcons[category]
|
|
|
|
|
return ''
|