epicyon/shares.py

1303 lines
45 KiB
Python
Raw Normal View History

2020-04-04 11:27:51 +00:00
__filename__ = "shares.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
2021-01-26 10:07:42 +00:00
__version__ = "1.2.0"
2020-04-04 11:27:51 +00:00
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__status__ = "Production"
2021-06-15 15:08:12 +00:00
__module_group__ = "Timeline"
2020-04-04 11:27:51 +00:00
2019-07-23 12:33:09 +00:00
import os
2021-07-25 10:17:39 +00:00
import re
2021-07-26 09:40:51 +00:00
import secrets
2019-07-23 12:33:09 +00:00
import time
2021-07-24 22:08:11 +00:00
import datetime
2021-07-26 17:54:13 +00:00
from session import getJson
2019-07-23 12:33:09 +00:00
from webfinger import webfingerHandle
from auth import createBasicAuthHeader
from auth import constantTimeStringCheck
2019-07-23 12:33:09 +00:00
from posts import getPersonBox
from session import postJson
2020-04-04 11:27:51 +00:00
from session import postImage
2021-07-26 17:54:13 +00:00
from session import createSession
from utils import getConfigParam
2020-12-16 11:19:16 +00:00
from utils import getFullDomain
2019-07-27 22:48:34 +00:00
from utils import validNickname
2019-10-22 11:55:06 +00:00
from utils import loadJson
from utils import saveJson
2020-11-21 11:54:29 +00:00
from utils import getImageExtensions
from utils import hasObjectDict
2021-06-26 14:21:24 +00:00
from utils import removeDomainPort
2021-07-04 18:01:31 +00:00
from utils import isAccountDir
2021-07-13 21:59:53 +00:00
from utils import acctDir
2021-07-25 13:09:39 +00:00
from utils import isfloat
2021-05-09 12:17:55 +00:00
from media import processMetaData
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
def _loadDfcIds(baseDir: str, systemLanguage: str) -> {}:
2021-07-24 14:38:43 +00:00
"""Loads the product types ontology
This is used to add an id to shared items
"""
productTypesFilename = baseDir + '/ontology/customProductTypes.json'
if not os.path.isfile(productTypesFilename):
productTypesFilename = baseDir + '/ontology/productTypes.json'
productTypes = loadJson(productTypesFilename)
if not productTypes:
return None
if not productTypes.get('@graph'):
return None
if len(productTypes['@graph']) == 0:
return None
if not productTypes['@graph'][0].get('rdfs:label'):
return None
languageExists = False
for label in productTypes['@graph'][0]['rdfs:label']:
if not label.get('@language'):
continue
if productTypes['@graph'][0]['rdfs:label']['@language'] == \
systemLanguage:
languageExists = True
break
if not languageExists:
print('productTypes ontology does not contain the language ' +
systemLanguage)
return None
dfcIds = {}
2021-07-24 14:38:43 +00:00
for item in productTypes['@graph']:
if not item.get('@id'):
continue
if not item.get('rdfs:label'):
continue
for label in item['rdfs:label']:
if not label.get('@language'):
continue
if not label.get('@value'):
continue
if label['@language'] == systemLanguage:
dfcIds[label['@value'].lower()] = item['@id']
2021-07-24 14:38:43 +00:00
break
return dfcIds
2021-07-24 14:38:43 +00:00
2021-07-25 09:33:59 +00:00
def getValidSharedItemID(actor: str, displayName: str) -> str:
2019-11-02 10:24:25 +00:00
"""Removes any invalid characters from the display name to
produce an item ID
"""
2021-07-27 19:13:55 +00:00
removeChars = (' ', '\n', '\r', '#')
2020-06-11 22:04:41 +00:00
for ch in removeChars:
displayName = displayName.replace(ch, '')
removeChars2 = ('+', '/', '\\', '?', '&')
for ch in removeChars2:
displayName = displayName.replace(ch, '-')
displayName = displayName.replace('.', '_')
2020-05-22 11:32:38 +00:00
displayName = displayName.replace("", "'")
2021-07-27 20:14:13 +00:00
actor = actor.replace('://', '___')
actor = actor.replace('/', '--')
2021-07-27 20:17:36 +00:00
return actor + '--shareditems--' + displayName
2019-11-02 10:24:25 +00:00
2020-04-04 11:27:51 +00:00
2021-07-25 09:33:59 +00:00
def removeSharedItem(baseDir: str, nickname: str, domain: str,
displayName: str,
httpPrefix: str, domainFull: str) -> None:
2019-07-23 12:33:09 +00:00
"""Removes a share for a person
"""
2021-07-13 21:59:53 +00:00
sharesFilename = acctDir(baseDir, nickname, domain) + '/shares.json'
2019-11-03 10:04:28 +00:00
if not os.path.isfile(sharesFilename):
2020-04-04 11:27:51 +00:00
print('ERROR: missing shares.json ' + sharesFilename)
2019-11-03 10:04:28 +00:00
return
2020-04-04 11:27:51 +00:00
sharesJson = loadJson(sharesFilename)
2019-11-03 10:04:28 +00:00
if not sharesJson:
2020-04-04 11:27:51 +00:00
print('ERROR: shares.json could not be loaded from ' + sharesFilename)
2019-11-03 10:04:28 +00:00
return
2019-07-23 12:33:09 +00:00
2021-07-25 09:33:59 +00:00
actor = httpPrefix + '://' + domainFull + '/users/' + nickname
itemID = getValidSharedItemID(actor, displayName)
2019-07-23 12:33:09 +00:00
if sharesJson.get(itemID):
# remove any image for the item
2020-04-04 11:27:51 +00:00
itemIDfile = baseDir + '/sharefiles/' + nickname + '/' + itemID
2019-07-23 12:33:09 +00:00
if sharesJson[itemID]['imageUrl']:
2020-11-21 11:54:29 +00:00
formats = getImageExtensions()
2020-09-22 15:59:47 +00:00
for ext in formats:
if sharesJson[itemID]['imageUrl'].endswith('.' + ext):
if os.path.isfile(itemIDfile + '.' + ext):
os.remove(itemIDfile + '.' + ext)
2019-07-23 12:33:09 +00:00
# remove the item itself
del sharesJson[itemID]
2020-04-04 11:27:51 +00:00
saveJson(sharesJson, sharesFilename)
2019-11-03 10:04:28 +00:00
else:
2020-04-04 11:27:51 +00:00
print('ERROR: share index "' + itemID +
'" does not exist in ' + sharesFilename)
def _addShareDurationSec(duration: str, published: int) -> int:
2021-06-26 19:01:48 +00:00
"""Returns the duration for the shared item in seconds
"""
if ' ' not in duration:
return 0
durationList = duration.split(' ')
if not durationList[0].isdigit():
return 0
if 'hour' in durationList[1]:
return published + (int(durationList[0]) * 60 * 60)
if 'day' in durationList[1]:
return published + (int(durationList[0]) * 60 * 60 * 24)
if 'week' in durationList[1]:
return published + (int(durationList[0]) * 60 * 60 * 24 * 7)
if 'month' in durationList[1]:
return published + (int(durationList[0]) * 60 * 60 * 24 * 30)
if 'year' in durationList[1]:
return published + (int(durationList[0]) * 60 * 60 * 24 * 365)
return 0
def _getshareDfcId(baseDir: str, systemLanguage: str,
itemType: str, itemCategory: str,
translate: {}, dfcIds: {} = None) -> str:
"""Attempts to obtain a DFC Id for the shared item,
2021-07-24 14:38:43 +00:00
based upon productTypes ontology.
See https://github.com/datafoodconsortium/ontology
"""
if translate['food'] not in itemCategory.lower():
return ''
if not dfcIds:
dfcIds = _loadDfcIds(baseDir, systemLanguage)
if not dfcIds:
return ''
2021-07-24 14:38:43 +00:00
itemTypeLower = itemType.lower()
matchName = ''
matchId = ''
for name, uri in dfcIds.items():
2021-07-24 14:38:43 +00:00
if name not in itemTypeLower:
continue
if len(name) > len(matchName):
matchName = name
matchId = uri
if not matchId:
# bag of words match
maxMatchedWords = 0
for name, uri in dfcIds.items():
2021-07-24 14:38:43 +00:00
words = name.split(' ')
score = 0
for wrd in words:
if wrd in itemTypeLower:
score += 1
if score > maxMatchedWords:
maxMatchedWords = score
matchId = uri
return matchId
def _getshareTypeFromDfcId(dfcUri: str, dfcIds: {}) -> str:
"""Attempts to obtain a share item type from its DFC Id,
based upon productTypes ontology.
See https://github.com/datafoodconsortium/ontology
"""
for name, uri in dfcIds.items():
if uri.endswith('#' + dfcUri):
return name
elif uri == dfcUri:
return name
return None
2021-07-25 09:33:59 +00:00
def _indicateNewShareAvailable(baseDir: str, httpPrefix: str,
domainFull: str) -> None:
"""Indicate to each account that a new share is available
"""
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
for handle in dirs:
if not isAccountDir(handle):
continue
accountDir = baseDir + '/accounts/' + handle
newShareFile = accountDir + '/.newShare'
if os.path.isfile(newShareFile):
continue
nickname = handle.split('@')[0]
try:
with open(newShareFile, 'w+') as fp:
fp.write(httpPrefix + '://' + domainFull +
'/users/' + nickname + '/tlshares')
except BaseException:
pass
break
2020-04-04 11:27:51 +00:00
def addShare(baseDir: str,
httpPrefix: str, nickname: str, domain: str, port: int,
displayName: str, summary: str, imageFilename: str,
2021-07-24 11:30:46 +00:00
itemQty: int, itemType: str, itemCategory: str, location: str,
2021-07-24 14:38:43 +00:00
duration: str, debug: bool, city: str,
2021-07-24 22:08:11 +00:00
price: str, currency: str,
systemLanguage: str, translate: {}) -> None:
2020-09-22 15:55:21 +00:00
"""Adds a new share
2019-07-23 12:33:09 +00:00
"""
2021-07-13 21:59:53 +00:00
sharesFilename = acctDir(baseDir, nickname, domain) + '/shares.json'
2020-04-04 11:27:51 +00:00
sharesJson = {}
2019-09-30 22:39:02 +00:00
if os.path.isfile(sharesFilename):
2021-07-26 12:20:07 +00:00
sharesJson = loadJson(sharesFilename, 1, 2)
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
duration = duration.lower()
published = int(time.time())
2021-06-26 19:01:48 +00:00
durationSec = _addShareDurationSec(duration, published)
2019-07-23 12:33:09 +00:00
2021-07-25 09:33:59 +00:00
domainFull = getFullDomain(domain, port)
actor = httpPrefix + '://' + domainFull + '/users/' + nickname
itemID = getValidSharedItemID(actor, displayName)
dfcId = _getshareDfcId(baseDir, systemLanguage,
itemType, itemCategory, translate)
2019-07-23 12:33:09 +00:00
2019-07-23 19:02:26 +00:00
# has an image for this share been uploaded?
2020-04-04 11:27:51 +00:00
imageUrl = None
moveImage = False
2019-07-23 19:02:26 +00:00
if not imageFilename:
2020-04-04 11:27:51 +00:00
sharesImageFilename = \
2021-07-13 21:59:53 +00:00
acctDir(baseDir, nickname, domain) + '/upload'
2020-11-21 11:54:29 +00:00
formats = getImageExtensions()
2020-09-22 15:59:47 +00:00
for ext in formats:
if os.path.isfile(sharesImageFilename + '.' + ext):
imageFilename = sharesImageFilename + '.' + ext
moveImage = True
2020-04-04 11:27:51 +00:00
2020-12-16 11:19:16 +00:00
domainFull = getFullDomain(domain, port)
2019-07-23 19:02:26 +00:00
# copy or move the image for the shared item to its destination
2019-07-23 12:33:09 +00:00
if imageFilename:
if os.path.isfile(imageFilename):
2020-04-04 11:27:51 +00:00
if not os.path.isdir(baseDir + '/sharefiles'):
os.mkdir(baseDir + '/sharefiles')
if not os.path.isdir(baseDir + '/sharefiles/' + nickname):
os.mkdir(baseDir + '/sharefiles/' + nickname)
itemIDfile = baseDir + '/sharefiles/' + nickname + '/' + itemID
2020-11-21 11:54:29 +00:00
formats = getImageExtensions()
2020-09-22 15:59:47 +00:00
for ext in formats:
2021-07-04 18:01:31 +00:00
if not imageFilename.endswith('.' + ext):
continue
processMetaData(baseDir, nickname, domain,
imageFilename, itemIDfile + '.' + ext,
city)
if moveImage:
os.remove(imageFilename)
imageUrl = \
httpPrefix + '://' + domainFull + \
'/sharefiles/' + nickname + '/' + itemID + '.' + ext
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
sharesJson[itemID] = {
2019-07-23 12:33:09 +00:00
"displayName": displayName,
"summary": summary,
"imageUrl": imageUrl,
2021-07-24 11:30:46 +00:00
"itemQty": itemQty,
"dfcId": dfcId,
2019-07-23 19:02:26 +00:00
"itemType": itemType,
2019-07-23 22:12:19 +00:00
"category": itemCategory,
2019-07-23 12:33:09 +00:00
"location": location,
"published": published,
2021-07-24 22:08:11 +00:00
"expire": durationSec,
2021-07-27 20:26:10 +00:00
"itemPrice": price,
"itemCurrency": currency
2019-07-23 12:33:09 +00:00
}
2020-04-04 11:27:51 +00:00
saveJson(sharesJson, sharesFilename)
2019-07-23 12:33:09 +00:00
2021-07-25 09:33:59 +00:00
_indicateNewShareAvailable(baseDir, httpPrefix, domainFull)
2020-04-04 11:27:51 +00:00
2019-10-17 09:58:30 +00:00
def expireShares(baseDir: str) -> None:
"""Removes expired items from shares
"""
2020-04-04 11:27:51 +00:00
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
2019-10-17 09:58:30 +00:00
for account in dirs:
2021-07-04 18:01:31 +00:00
if not isAccountDir(account):
2019-10-17 09:58:30 +00:00
continue
2020-04-04 11:27:51 +00:00
nickname = account.split('@')[0]
domain = account.split('@')[1]
_expireSharesForAccount(baseDir, nickname, domain)
2020-12-13 22:13:45 +00:00
break
2020-04-04 11:27:51 +00:00
2019-10-17 09:58:30 +00:00
def _expireSharesForAccount(baseDir: str, nickname: str, domain: str) -> None:
2020-09-22 16:03:31 +00:00
"""Removes expired items from shares for a particular account
2019-07-23 12:33:09 +00:00
"""
handleDomain = removeDomainPort(domain)
2020-04-04 11:27:51 +00:00
handle = nickname + '@' + handleDomain
sharesFilename = baseDir + '/accounts/' + handle + '/shares.json'
2021-07-24 22:08:11 +00:00
if not os.path.isfile(sharesFilename):
return
2021-07-26 12:20:07 +00:00
sharesJson = loadJson(sharesFilename, 1, 2)
2021-07-24 22:08:11 +00:00
if not sharesJson:
return
currTime = int(time.time())
deleteItemID = []
for itemID, item in sharesJson.items():
if currTime > item['expire']:
deleteItemID.append(itemID)
if not deleteItemID:
return
for itemID in deleteItemID:
del sharesJson[itemID]
# remove any associated images
itemIDfile = baseDir + '/sharefiles/' + nickname + '/' + itemID
formats = getImageExtensions()
for ext in formats:
if os.path.isfile(itemIDfile + '.' + ext):
os.remove(itemIDfile + '.' + ext)
saveJson(sharesJson, sharesFilename)
2020-04-04 11:27:51 +00:00
def getSharesFeedForPerson(baseDir: str,
domain: str, port: int,
path: str, httpPrefix: str,
2019-07-23 12:33:09 +00:00
sharesPerPage=12) -> {}:
"""Returns the shares for an account from GET requests
"""
if '/shares' not in path:
return None
# handle page numbers
2020-04-04 11:27:51 +00:00
headerOnly = True
pageNumber = None
2019-07-23 12:33:09 +00:00
if '?page=' in path:
2020-04-04 11:27:51 +00:00
pageNumber = path.split('?page=')[1]
if pageNumber == 'true':
pageNumber = 1
2019-07-23 12:33:09 +00:00
else:
try:
2020-04-04 11:27:51 +00:00
pageNumber = int(pageNumber)
except BaseException:
2019-07-23 12:33:09 +00:00
pass
2020-04-04 11:27:51 +00:00
path = path.split('?page=')[0]
headerOnly = False
2020-03-22 21:16:02 +00:00
2019-07-23 12:33:09 +00:00
if not path.endswith('/shares'):
return None
2020-04-04 11:27:51 +00:00
nickname = None
2019-07-23 12:33:09 +00:00
if path.startswith('/users/'):
2020-04-04 11:27:51 +00:00
nickname = path.replace('/users/', '', 1).replace('/shares', '')
2019-07-23 12:33:09 +00:00
if path.startswith('/@'):
2020-04-04 11:27:51 +00:00
nickname = path.replace('/@', '', 1).replace('/shares', '')
2019-07-23 12:33:09 +00:00
if not nickname:
return None
2020-04-04 11:27:51 +00:00
if not validNickname(domain, nickname):
2019-07-23 12:33:09 +00:00
return None
2019-07-24 09:53:07 +00:00
2020-12-16 11:19:16 +00:00
domain = getFullDomain(domain, port)
2019-07-23 12:33:09 +00:00
handleDomain = removeDomainPort(domain)
2021-07-13 21:59:53 +00:00
sharesFilename = acctDir(baseDir, nickname, handleDomain) + '/shares.json'
2019-07-23 12:33:09 +00:00
if headerOnly:
2020-04-04 11:27:51 +00:00
noOfShares = 0
2019-07-23 12:33:09 +00:00
if os.path.isfile(sharesFilename):
2020-04-04 11:27:51 +00:00
sharesJson = loadJson(sharesFilename)
2019-10-22 11:55:06 +00:00
if sharesJson:
2020-04-04 11:27:51 +00:00
noOfShares = len(sharesJson.items())
idStr = httpPrefix + '://' + domain + '/users/' + nickname
shares = {
2019-07-23 12:33:09 +00:00
'@context': 'https://www.w3.org/ns/activitystreams',
2021-06-22 12:29:17 +00:00
'first': idStr + '/shares?page=1',
'id': idStr + '/shares',
2019-07-23 12:33:09 +00:00
'totalItems': str(noOfShares),
2020-03-22 20:36:19 +00:00
'type': 'OrderedCollection'
}
2019-07-23 12:33:09 +00:00
return shares
if not pageNumber:
2020-04-04 11:27:51 +00:00
pageNumber = 1
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
nextPageNumber = int(pageNumber + 1)
idStr = httpPrefix + '://' + domain + '/users/' + nickname
shares = {
2019-07-23 12:33:09 +00:00
'@context': 'https://www.w3.org/ns/activitystreams',
2021-06-22 12:29:17 +00:00
'id': idStr + '/shares?page=' + str(pageNumber),
2019-07-23 12:33:09 +00:00
'orderedItems': [],
2021-06-22 12:29:17 +00:00
'partOf': idStr + '/shares',
2019-07-23 12:33:09 +00:00
'totalItems': 0,
2020-03-22 20:36:19 +00:00
'type': 'OrderedCollectionPage'
}
2019-07-23 12:33:09 +00:00
if not os.path.isfile(sharesFilename):
2019-07-24 09:53:07 +00:00
print("test5")
2019-07-23 12:33:09 +00:00
return shares
2020-04-04 11:27:51 +00:00
currPage = 1
pageCtr = 0
totalCtr = 0
2019-07-23 12:33:09 +00:00
2020-04-04 11:27:51 +00:00
sharesJson = loadJson(sharesFilename)
2019-09-30 22:39:02 +00:00
if sharesJson:
2020-04-04 11:27:51 +00:00
for itemID, item in sharesJson.items():
2019-07-23 12:33:09 +00:00
pageCtr += 1
totalCtr += 1
2020-04-04 11:27:51 +00:00
if currPage == pageNumber:
2019-07-23 12:33:09 +00:00
shares['orderedItems'].append(item)
2020-04-04 11:27:51 +00:00
if pageCtr >= sharesPerPage:
pageCtr = 0
2019-07-23 12:33:09 +00:00
currPage += 1
2020-04-04 11:27:51 +00:00
shares['totalItems'] = totalCtr
lastPage = int(totalCtr / sharesPerPage)
if lastPage < 1:
lastPage = 1
if nextPageNumber > lastPage:
shares['next'] = \
httpPrefix + '://' + domain + '/users/' + nickname + \
'/shares?page=' + str(lastPage)
2019-07-23 12:33:09 +00:00
return shares
2019-07-23 19:02:26 +00:00
2020-04-04 11:27:51 +00:00
def sendShareViaServer(baseDir, session,
fromNickname: str, password: str,
fromDomain: str, fromPort: int,
httpPrefix: str, displayName: str,
summary: str, imageFilename: str,
2021-07-24 11:30:46 +00:00
itemQty: int, itemType: str, itemCategory: str,
2020-04-04 11:27:51 +00:00
location: str, duration: str,
cachedWebfingers: {}, personCache: {},
2021-07-24 22:08:11 +00:00
debug: bool, projectVersion: str,
itemPrice: str, itemCurrency: str) -> {}:
2019-07-23 19:02:26 +00:00
"""Creates an item share via c2s
"""
if not session:
print('WARN: No session for sendShareViaServer')
return 6
2020-12-16 11:19:16 +00:00
fromDomainFull = getFullDomain(fromDomain, fromPort)
2019-07-23 19:02:26 +00:00
2020-04-04 11:27:51 +00:00
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
ccUrl = httpPrefix + '://' + fromDomainFull + \
'/users/' + fromNickname + '/followers'
2019-07-23 19:02:26 +00:00
2020-04-04 11:27:51 +00:00
actor = httpPrefix + '://' + fromDomainFull + '/users/' + fromNickname
newShareJson = {
2019-08-18 11:07:06 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
2019-07-23 19:02:26 +00:00
'type': 'Add',
2020-04-04 11:27:51 +00:00
'actor': actor,
2021-06-22 12:29:17 +00:00
'target': actor + '/shares',
2019-07-23 19:02:26 +00:00
'object': {
"type": "Offer",
"displayName": displayName,
"summary": summary,
2021-07-24 11:30:46 +00:00
"itemQty": itemQty,
2019-07-23 19:02:26 +00:00
"itemType": itemType,
2020-04-04 11:27:51 +00:00
"category": itemCategory,
2019-07-23 19:02:26 +00:00
"location": location,
"duration": duration,
2021-07-24 22:08:11 +00:00
"itemPrice": itemPrice,
"itemCurrency": itemCurrency,
2019-07-23 19:02:26 +00:00
'to': [toUrl],
'cc': [ccUrl]
},
'to': [toUrl],
'cc': [ccUrl]
}
2020-04-04 11:27:51 +00:00
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
2019-07-23 19:02:26 +00:00
# lookup the inbox for the To handle
2020-04-04 11:27:51 +00:00
wfRequest = \
webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
2021-03-14 19:22:58 +00:00
fromDomain, projectVersion, debug)
2019-07-23 19:02:26 +00:00
if not wfRequest:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: share webfinger failed for ' + handle)
2019-07-23 19:02:26 +00:00
return 1
2020-06-23 10:41:12 +00:00
if not isinstance(wfRequest, dict):
2021-03-18 10:01:01 +00:00
print('WARN: share webfinger for ' + handle +
' did not return a dict. ' + str(wfRequest))
2020-06-23 10:41:12 +00:00
return 1
2019-07-23 19:02:26 +00:00
2020-04-04 11:27:51 +00:00
postToBox = 'outbox'
2019-07-23 19:02:26 +00:00
# get the actor inbox for the To handle
2020-04-04 11:27:51 +00:00
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
2020-12-18 17:49:17 +00:00
fromDomain, postToBox,
83653)
2020-03-22 21:16:02 +00:00
2019-07-23 19:02:26 +00:00
if not inboxUrl:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: share no ' + postToBox +
' was found for ' + handle)
2019-07-23 19:02:26 +00:00
return 3
if not fromPersonId:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: share no actor was found for ' + handle)
2019-07-23 19:02:26 +00:00
return 4
2020-03-22 21:16:02 +00:00
2020-04-04 11:27:51 +00:00
authHeader = createBasicAuthHeader(fromNickname, password)
2019-07-23 19:02:26 +00:00
if imageFilename:
2020-04-04 11:27:51 +00:00
headers = {
'host': fromDomain,
2020-03-22 20:36:19 +00:00
'Authorization': authHeader
}
2020-04-04 11:27:51 +00:00
postResult = \
postImage(session, imageFilename, [],
inboxUrl.replace('/' + postToBox, '/shares'),
2020-09-27 19:27:24 +00:00
headers)
2020-04-04 11:27:51 +00:00
headers = {
'host': fromDomain,
'Content-type': 'application/json',
2020-03-22 20:36:19 +00:00
'Authorization': authHeader
}
2020-04-04 11:27:51 +00:00
postResult = \
2021-06-20 13:39:53 +00:00
postJson(httpPrefix, fromDomainFull,
session, newShareJson, [], inboxUrl, headers, 30, True)
2020-04-04 11:27:51 +00:00
if not postResult:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: POST share failed for c2s to ' + inboxUrl)
2020-04-04 11:27:51 +00:00
# return 5
2019-07-23 19:02:26 +00:00
if debug:
2019-07-23 21:14:16 +00:00
print('DEBUG: c2s POST share item success')
2019-07-23 19:02:26 +00:00
return newShareJson
2019-07-23 20:00:17 +00:00
2020-04-04 11:27:51 +00:00
def sendUndoShareViaServer(baseDir: str, session,
fromNickname: str, password: str,
fromDomain: str, fromPort: int,
httpPrefix: str, displayName: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
2019-07-23 21:14:16 +00:00
"""Undoes a share via c2s
"""
if not session:
print('WARN: No session for sendUndoShareViaServer')
return 6
2020-12-16 11:19:16 +00:00
fromDomainFull = getFullDomain(fromDomain, fromPort)
2019-07-23 21:14:16 +00:00
2020-04-04 11:27:51 +00:00
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
ccUrl = httpPrefix + '://' + fromDomainFull + \
'/users/' + fromNickname + '/followers'
2019-07-23 21:14:16 +00:00
2020-04-04 11:27:51 +00:00
actor = httpPrefix + '://' + fromDomainFull + '/users/' + fromNickname
undoShareJson = {
2019-08-18 11:07:06 +00:00
"@context": "https://www.w3.org/ns/activitystreams",
2019-07-23 21:14:16 +00:00
'type': 'Remove',
2020-04-04 11:27:51 +00:00
'actor': actor,
'target': actor + '/shares',
2019-07-23 21:14:16 +00:00
'object': {
"type": "Offer",
"displayName": displayName,
'to': [toUrl],
'cc': [ccUrl]
},
'to': [toUrl],
'cc': [ccUrl]
}
2020-04-04 11:27:51 +00:00
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
2019-07-23 21:14:16 +00:00
# lookup the inbox for the To handle
2020-04-04 11:27:51 +00:00
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
2021-03-14 19:22:58 +00:00
fromDomain, projectVersion, debug)
2019-07-23 21:14:16 +00:00
if not wfRequest:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: unshare webfinger failed for ' + handle)
2019-07-23 21:14:16 +00:00
return 1
2020-06-23 10:41:12 +00:00
if not isinstance(wfRequest, dict):
2021-03-18 10:01:01 +00:00
print('WARN: unshare webfinger for ' + handle +
' did not return a dict. ' + str(wfRequest))
2020-06-23 10:41:12 +00:00
return 1
2019-07-23 21:14:16 +00:00
2020-04-04 11:27:51 +00:00
postToBox = 'outbox'
2019-07-23 21:14:16 +00:00
# get the actor inbox for the To handle
2020-04-04 11:27:51 +00:00
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
2020-12-18 17:49:17 +00:00
fromDomain, postToBox,
12663)
2020-03-22 21:16:02 +00:00
2019-07-23 21:14:16 +00:00
if not inboxUrl:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: unshare no ' + postToBox +
' was found for ' + handle)
2019-07-23 21:14:16 +00:00
return 3
if not fromPersonId:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: unshare no actor was found for ' + handle)
2019-07-23 21:14:16 +00:00
return 4
2020-03-22 21:16:02 +00:00
2020-04-04 11:27:51 +00:00
authHeader = createBasicAuthHeader(fromNickname, password)
2020-03-22 21:16:02 +00:00
2020-04-04 11:27:51 +00:00
headers = {
'host': fromDomain,
'Content-type': 'application/json',
2020-03-22 20:36:19 +00:00
'Authorization': authHeader
}
2020-04-04 11:27:51 +00:00
postResult = \
2021-06-20 13:39:53 +00:00
postJson(httpPrefix, fromDomainFull,
session, undoShareJson, [], inboxUrl,
2021-03-10 19:24:52 +00:00
headers, 30, True)
2020-04-04 11:27:51 +00:00
if not postResult:
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: POST unshare failed for c2s to ' + inboxUrl)
2020-04-04 11:27:51 +00:00
# return 5
2019-07-23 21:14:16 +00:00
if debug:
2021-03-18 10:01:01 +00:00
print('DEBUG: c2s POST unshare success')
2019-07-23 21:14:16 +00:00
return undoShareJson
2020-04-04 11:27:51 +00:00
def outboxShareUpload(baseDir: str, httpPrefix: str,
nickname: str, domain: str, port: int,
2021-07-24 14:38:43 +00:00
messageJson: {}, debug: bool, city: str,
systemLanguage: str, translate: {}) -> None:
2019-07-23 20:00:17 +00:00
""" When a shared item is received by the outbox from c2s
"""
if not messageJson.get('type'):
return
2020-04-04 11:27:51 +00:00
if not messageJson['type'] == 'Add':
2019-07-23 20:00:17 +00:00
return
if not hasObjectDict(messageJson):
2019-07-23 20:00:17 +00:00
return
if not messageJson['object'].get('type'):
if debug:
print('DEBUG: undo block - no type')
return
2020-04-04 11:27:51 +00:00
if not messageJson['object']['type'] == 'Offer':
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: not an Offer activity')
return
if not messageJson['object'].get('displayName'):
if debug:
print('DEBUG: displayName missing from Offer')
return
if not messageJson['object'].get('summary'):
if debug:
print('DEBUG: summary missing from Offer')
return
2021-07-24 11:30:46 +00:00
if not messageJson['object'].get('itemQty'):
if debug:
print('DEBUG: itemQty missing from Offer')
return
2019-07-23 20:00:17 +00:00
if not messageJson['object'].get('itemType'):
if debug:
print('DEBUG: itemType missing from Offer')
return
if not messageJson['object'].get('category'):
if debug:
print('DEBUG: category missing from Offer')
return
if not messageJson['object'].get('location'):
if debug:
print('DEBUG: location missing from Offer')
return
if not messageJson['object'].get('duration'):
if debug:
print('DEBUG: duration missing from Offer')
return
2020-04-04 11:27:51 +00:00
addShare(baseDir,
httpPrefix, nickname, domain, port,
messageJson['object']['displayName'],
messageJson['object']['summary'],
messageJson['object']['imageFilename'],
2021-07-24 11:30:46 +00:00
messageJson['object']['itemQty'],
2020-04-04 11:27:51 +00:00
messageJson['object']['itemType'],
messageJson['object']['itemCategory'],
messageJson['object']['location'],
messageJson['object']['duration'],
2021-07-24 22:08:11 +00:00
debug, city,
messageJson['object']['itemPrice'],
messageJson['object']['itemCurrency'],
systemLanguage, translate)
2019-07-23 20:00:17 +00:00
if debug:
print('DEBUG: shared item received via c2s')
2019-07-23 21:14:16 +00:00
2020-04-04 11:27:51 +00:00
def outboxUndoShareUpload(baseDir: str, httpPrefix: str,
nickname: str, domain: str, port: int,
messageJson: {}, debug: bool) -> None:
2019-07-23 21:14:16 +00:00
""" When a shared item is removed via c2s
"""
if not messageJson.get('type'):
return
2020-04-04 11:27:51 +00:00
if not messageJson['type'] == 'Remove':
2019-07-23 21:14:16 +00:00
return
if not hasObjectDict(messageJson):
2019-07-23 21:14:16 +00:00
return
if not messageJson['object'].get('type'):
if debug:
print('DEBUG: undo block - no type')
return
2020-04-04 11:27:51 +00:00
if not messageJson['object']['type'] == 'Offer':
2019-07-23 21:14:16 +00:00
if debug:
print('DEBUG: not an Offer activity')
return
if not messageJson['object'].get('displayName'):
if debug:
print('DEBUG: displayName missing from Offer')
return
2021-07-25 09:33:59 +00:00
domainFull = getFullDomain(domain, port)
removeSharedItem(baseDir, nickname, domain,
messageJson['object']['displayName'],
httpPrefix, domainFull)
2019-07-23 21:14:16 +00:00
if debug:
print('DEBUG: shared item removed via c2s')
2021-07-24 22:08:11 +00:00
2021-07-25 13:09:39 +00:00
def _sharesCatalogParams(path: str) -> (bool, float, float, str):
"""Returns parameters when accessing the shares catalog
"""
today = False
minPrice = 0
maxPrice = 9999999
matchPattern = None
if '?' not in path:
return today, minPrice, maxPrice, matchPattern
args = path.split('?', 1)[1]
2021-07-25 13:48:55 +00:00
argList = args.split(';')
2021-07-25 13:09:39 +00:00
for arg in argList:
if '=' not in arg:
continue
key = arg.split('=')[0].lower()
value = arg.split('=')[1]
if key == 'today':
value = value.lower()
2021-07-25 13:50:03 +00:00
if 't' in value or 'y' in value or '1' in value:
2021-07-25 13:09:39 +00:00
today = True
elif key.startswith('min'):
if isfloat(value):
minPrice = float(value)
elif key.startswith('max'):
if isfloat(value):
maxPrice = float(value)
elif key.startswith('match'):
matchPattern = value
return today, minPrice, maxPrice, matchPattern
2021-07-24 22:08:11 +00:00
def sharesCatalogAccountEndpoint(baseDir: str, httpPrefix: str,
nickname: str, domain: str,
domainFull: str,
2021-07-25 13:09:39 +00:00
path: str) -> {}:
2021-07-24 22:08:11 +00:00
"""Returns the endpoint for the shares catalog of a particular account
2021-07-24 22:12:26 +00:00
See https://github.com/datafoodconsortium/ontology
2021-07-24 22:08:11 +00:00
"""
2021-07-25 13:09:39 +00:00
today, minPrice, maxPrice, matchPattern = _sharesCatalogParams(path)
2021-07-24 22:08:11 +00:00
dfcUrl = \
"http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#"
2021-07-25 09:33:59 +00:00
dfcPtUrl = \
"http://static.datafoodconsortium.org/data/productTypes.rdf#"
2021-07-24 22:08:11 +00:00
owner = httpPrefix + '://' + domainFull + '/users/' + nickname
2021-07-26 18:06:08 +00:00
dfcInstanceId = owner + '/catalog'
2021-07-24 22:08:11 +00:00
endpoint = {
"@context": {
"DFC": dfcUrl,
2021-07-25 09:33:59 +00:00
"dfc-pt": dfcPtUrl,
2021-07-24 22:08:11 +00:00
"@base": "http://maPlateformeNationale"
},
"@id": dfcInstanceId,
"@type": "DFC:Entreprise",
"DFC:supplies": []
}
2021-07-25 09:56:57 +00:00
currDate = datetime.datetime.utcnow()
currDateStr = currDate.strftime("%Y-%m-%d")
2021-07-24 22:08:11 +00:00
sharesFilename = acctDir(baseDir, nickname, domain) + '/shares.json'
if not os.path.isfile(sharesFilename):
return endpoint
2021-07-26 12:20:07 +00:00
sharesJson = loadJson(sharesFilename, 1, 2)
2021-07-24 22:08:11 +00:00
if not sharesJson:
return endpoint
for itemID, item in sharesJson.items():
if not item.get('dfcId'):
continue
2021-07-25 09:33:59 +00:00
if '#' not in item['dfcId']:
continue
2021-07-25 09:56:57 +00:00
if today:
if not item['published'].startswith(currDateStr):
continue
2021-07-25 10:17:39 +00:00
if minPrice is not None:
if float(item['itemPrice']) < minPrice:
continue
if maxPrice is not None:
if float(item['itemPrice']) > maxPrice:
continue
description = item['displayName'] + ': ' + item['summary']
if matchPattern:
if not re.match(matchPattern, description):
continue
2021-07-24 22:08:11 +00:00
expireDate = datetime.datetime.fromtimestamp(item['durationSec'])
expireDateStr = expireDate.strftime("%Y-%m-%dT%H:%M:%SZ")
2021-07-25 09:34:49 +00:00
shareId = getValidSharedItemID(owner, item['displayName'])
2021-07-25 09:33:59 +00:00
dfcId = item['dfcId'].split('#')[1]
2021-07-27 20:35:55 +00:00
priceStr = item['itemPrice'] + ' ' + item['itemCurrency']
2021-07-24 22:08:11 +00:00
catalogItem = {
2021-07-25 09:34:49 +00:00
"@id": shareId,
2021-07-25 09:33:59 +00:00
"@type": "DFC:SuppliedProduct",
"DFC:hasType": "dfc-pt:" + dfcId,
2021-07-24 22:08:11 +00:00
"DFC:startDate": item['published'],
"DFC:expiryDate": expireDateStr,
"DFC:quantity": item['itemQty'],
2021-07-25 09:39:21 +00:00
"DFC:price": priceStr,
2021-07-24 22:08:11 +00:00
"DFC:Image": item['imageUrl'],
2021-07-25 10:17:39 +00:00
"DFC:description": description
2021-07-24 22:08:11 +00:00
}
endpoint['DFC:supplies'].append(catalogItem)
return endpoint
def sharesCatalogEndpoint(baseDir: str, httpPrefix: str,
domainFull: str,
2021-07-25 13:09:39 +00:00
path: str) -> {}:
2021-07-24 22:08:11 +00:00
"""Returns the endpoint for the shares catalog for the instance
2021-07-24 22:12:26 +00:00
See https://github.com/datafoodconsortium/ontology
2021-07-24 22:08:11 +00:00
"""
2021-07-25 13:09:39 +00:00
today, minPrice, maxPrice, matchPattern = _sharesCatalogParams(path)
2021-07-24 22:08:11 +00:00
dfcUrl = \
"http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#"
2021-07-25 09:33:59 +00:00
dfcPtUrl = \
"http://static.datafoodconsortium.org/data/productTypes.rdf#"
2021-07-26 18:06:08 +00:00
dfcInstanceId = httpPrefix + '://' + domainFull + '/catalog'
2021-07-24 22:08:11 +00:00
endpoint = {
"@context": {
"DFC": dfcUrl,
2021-07-25 09:33:59 +00:00
"dfc-pt": dfcPtUrl,
2021-07-24 22:08:11 +00:00
"@base": "http://maPlateformeNationale"
},
"@id": dfcInstanceId,
"@type": "DFC:Entreprise",
"DFC:supplies": []
}
2021-07-25 09:56:57 +00:00
currDate = datetime.datetime.utcnow()
currDateStr = currDate.strftime("%Y-%m-%d")
2021-07-24 22:08:11 +00:00
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
for acct in dirs:
if not isAccountDir(acct):
continue
nickname = acct.split('@')[0]
domain = acct.split('@')[1]
owner = httpPrefix + '://' + domainFull + '/users/' + nickname
sharesFilename = \
acctDir(baseDir, nickname, domain) + '/shares.json'
if not os.path.isfile(sharesFilename):
continue
2021-07-26 12:20:07 +00:00
sharesJson = loadJson(sharesFilename, 1, 2)
2021-07-24 22:08:11 +00:00
if not sharesJson:
continue
for itemID, item in sharesJson.items():
if not item.get('dfcId'):
continue
2021-07-25 09:33:59 +00:00
if '#' not in item['dfcId']:
continue
2021-07-25 09:56:57 +00:00
if today:
if not item['published'].startswith(currDateStr):
continue
2021-07-25 10:17:39 +00:00
if minPrice is not None:
if float(item['itemPrice']) < minPrice:
continue
if maxPrice is not None:
if float(item['itemPrice']) > maxPrice:
continue
description = item['displayName'] + ': ' + item['summary']
if matchPattern:
if not re.match(matchPattern, description):
continue
2021-07-24 22:08:11 +00:00
expireDate = \
datetime.datetime.fromtimestamp(item['durationSec'])
expireDateStr = expireDate.strftime("%Y-%m-%dT%H:%M:%SZ")
2021-07-25 09:33:59 +00:00
shareId = getValidSharedItemID(owner, item['displayName'])
dfcId = item['dfcId'].split('#')[1]
2021-07-27 20:35:55 +00:00
priceStr = item['itemPrice'] + ' ' + item['itemCurrency']
2021-07-24 22:08:11 +00:00
catalogItem = {
2021-07-25 09:33:59 +00:00
"@id": shareId,
"@type": "DFC:SuppliedProduct",
"DFC:hasType": "dfc-pt:" + dfcId,
2021-07-24 22:08:11 +00:00
"DFC:startDate": item['published'],
"DFC:expiryDate": expireDateStr,
"DFC:quantity": item['itemQty'],
2021-07-25 09:39:21 +00:00
"DFC:price": priceStr,
2021-07-24 22:08:11 +00:00
"DFC:Image": item['imageUrl'],
"DFC:description": description
}
endpoint['DFC:supplies'].append(catalogItem)
return endpoint
2021-07-25 13:30:42 +00:00
def sharesCatalogCSVEndpoint(baseDir: str, httpPrefix: str,
domainFull: str,
path: str) -> str:
"""Returns a CSV version of the shares catalog
"""
catalogJson = \
sharesCatalogEndpoint(baseDir, httpPrefix, domainFull, path)
if not catalogJson:
return ''
if not catalogJson.get('DFC:supplies'):
return ''
csvStr = \
'id,type,hasType,startDate,expiryDate,' + \
'quantity,price,Image,description\n'
for item in catalogJson['DFC:supplies']:
csvStr += item['@id'] + ','
csvStr += item['@type'] + ','
csvStr += item['DFC:hasType'] + ','
csvStr += item['DFC:startDate'] + ','
csvStr += item['DFC:expiryDate'] + ','
csvStr += item['DFC:quantity'] + ','
csvStr += item['DFC:price'] + ','
csvStr += item['DFC:Image'] + ','
csvStr += item['DFC:description'] + '\n'
return csvStr
2021-07-26 09:40:51 +00:00
def generateSharedItemFederationTokens(sharedItemsFederatedDomains: [],
baseDir: str) -> {}:
2021-07-26 09:40:51 +00:00
"""Generates tokens for shared item federated domains
"""
if not sharedItemsFederatedDomains:
2021-07-26 12:20:07 +00:00
return {}
tokensJson = {}
if baseDir:
tokensFilename = \
baseDir + '/accounts/sharedItemsFederationTokens.json'
2021-07-26 12:20:07 +00:00
if os.path.isfile(tokensFilename):
tokensJson = loadJson(tokensFilename, 1, 2)
if tokensJson is None:
tokensJson = {}
2021-07-26 09:40:51 +00:00
tokensAdded = False
for domain in sharedItemsFederatedDomains:
if not tokensJson.get(domain):
2021-07-26 12:20:07 +00:00
tokensJson[domain] = ''
2021-07-26 09:40:51 +00:00
tokensAdded = True
2021-07-26 09:40:51 +00:00
if not tokensAdded:
return tokensJson
if baseDir:
saveJson(tokensJson, tokensFilename)
return tokensJson
2021-07-26 12:20:07 +00:00
def updateSharedItemFederationToken(baseDir: str,
tokenDomain: str, newToken: str,
tokensJson: {} = None) -> {}:
"""Updates an individual token for shared item federation
2021-07-26 12:20:07 +00:00
"""
if not tokensJson:
tokensJson = {}
if baseDir:
tokensFilename = \
baseDir + '/accounts/sharedItemsFederationTokens.json'
if os.path.isfile(tokensFilename):
tokensJson = loadJson(tokensFilename, 1, 2)
if tokensJson is None:
tokensJson = {}
updateRequired = False
if tokensJson.get(tokenDomain):
if tokensJson[tokenDomain] != newToken:
updateRequired = True
else:
updateRequired = True
if updateRequired:
tokensJson[tokenDomain] = newToken
if baseDir:
saveJson(tokensJson, tokensFilename)
2021-07-26 12:20:07 +00:00
return tokensJson
def mergeSharedItemTokens(baseDir: str, domain: str,
newSharedItemsFederatedDomains: [],
tokensJson: {}) -> {}:
"""When the shared item federation domains list has changed, update
the tokens dict accordingly
"""
removals = []
changed = False
for tokenDomain, tok in tokensJson.items():
if domain:
if tokenDomain.startswith(domain):
continue
if tokenDomain not in newSharedItemsFederatedDomains:
removals.append(tokenDomain)
# remove domains no longer in the federation list
for tokenDomain in removals:
del tokensJson[tokenDomain]
changed = True
# add new domains from the federation list
for tokenDomain in newSharedItemsFederatedDomains:
if tokenDomain not in tokensJson:
tokensJson[tokenDomain] = ''
changed = True
if baseDir and changed:
tokensFilename = \
baseDir + '/accounts/sharedItemsFederationTokens.json'
saveJson(tokensJson, tokensFilename)
return tokensJson
2021-07-26 12:20:07 +00:00
def createSharedItemFederationToken(baseDir: str,
tokenDomain: str,
tokensJson: {} = None) -> {}:
"""Updates an individual token for shared item federation
2021-07-26 12:20:07 +00:00
"""
if not tokensJson:
tokensJson = {}
if baseDir:
tokensFilename = \
baseDir + '/accounts/sharedItemsFederationTokens.json'
if os.path.isfile(tokensFilename):
tokensJson = loadJson(tokensFilename, 1, 2)
if tokensJson is None:
tokensJson = {}
if not tokensJson.get(tokenDomain):
tokensJson[tokenDomain] = secrets.token_urlsafe(64)
if baseDir:
saveJson(tokensJson, tokensFilename)
return tokensJson
def authorizeSharedItems(sharedItemsFederatedDomains: [],
baseDir: str,
callingDomain: str,
authHeader: str,
debug: bool,
tokensJson: {} = None) -> bool:
"""HTTP simple token check for shared item federation
"""
if not sharedItemsFederatedDomains:
# no shared item federation
return False
if callingDomain not in sharedItemsFederatedDomains:
if debug:
print(callingDomain +
' is not in the shared items federation list')
return False
if 'Basic ' in authHeader:
if debug:
print('DEBUG: shared item federation should not use basic auth')
return False
providedToken = authHeader.replace('\n', '').replace('\r', '').strip()
if not providedToken:
if debug:
print('DEBUG: shared item federation token is empty')
return False
if len(providedToken) < 60:
if debug:
print('DEBUG: shared item federation token is too small ' +
providedToken)
return False
if not tokensJson:
tokensFilename = \
baseDir + '/accounts/sharedItemsFederationTokens.json'
if not os.path.isfile(tokensFilename):
if debug:
print('DEBUG: shared item federation tokens file missing ' +
tokensFilename)
return False
2021-07-26 12:20:07 +00:00
tokensJson = loadJson(tokensFilename, 1, 2)
if not tokensJson:
return False
if not tokensJson.get(callingDomain):
if debug:
print('DEBUG: shared item federation token ' +
'check failed for ' + callingDomain)
return False
if not constantTimeStringCheck(tokensJson[callingDomain], providedToken):
if debug:
print('DEBUG: shared item federation token ' +
'mismatch for ' + callingDomain)
return False
return True
2021-07-26 17:54:13 +00:00
def _updateFederatedSharesCache(session, sharedItemsFederatedDomains: [],
baseDir: str, domain: str,
httpPrefix: str,
tokensJson: {}, debug: bool,
systemLanguage: str) -> None:
2021-07-26 17:54:13 +00:00
"""Updates the cache of federated shares for the instance.
This enables shared items to be available even when other instances
might not be online
"""
# create directories where catalogs will be stored
cacheDir = baseDir + '/cache'
if not os.path.isdir(cacheDir):
os.mkdir(cacheDir)
catalogsDir = cacheDir + '/catalogs'
if not os.path.isdir(catalogsDir):
os.mkdir(catalogsDir)
asHeader = {
'Accept': 'application/ld+json'
}
2021-07-27 11:29:07 +00:00
for federatedDomain in sharedItemsFederatedDomains:
# NOTE: federatedDomain does not have a port extension,
2021-07-26 17:54:13 +00:00
# so may not work in some situations
2021-07-27 11:29:07 +00:00
if federatedDomain.startswith(domain):
2021-07-26 17:54:13 +00:00
# only download from instances other than this one
continue
2021-07-27 11:29:07 +00:00
if not tokensJson.get(federatedDomain):
2021-07-26 17:54:13 +00:00
# token has been obtained for the other domain
continue
2021-07-27 11:29:07 +00:00
url = httpPrefix + '://' + federatedDomain + '/catalog'
asHeader['Authorization'] = tokensJson[federatedDomain]
2021-07-26 17:54:13 +00:00
catalogJson = getJson(session, url, asHeader, None,
debug, __version__, httpPrefix, None)
if not catalogJson:
print('WARN: failed to download shared items catalog for ' +
2021-07-27 11:29:07 +00:00
federatedDomain)
2021-07-26 17:54:13 +00:00
continue
2021-07-27 11:29:07 +00:00
catalogFilename = catalogsDir + '/' + federatedDomain + '.json'
2021-07-26 17:54:13 +00:00
if saveJson(catalogJson, catalogFilename):
2021-07-27 11:29:07 +00:00
print('Downloaded shared items catalog for ' + federatedDomain)
sharesJson = _dfcToSharesFormat(catalogJson,
baseDir, systemLanguage)
if sharesJson:
sharesFilename = \
catalogsDir + '/' + federatedDomain + '.shares.json'
saveJson(sharesJson, sharesFilename)
print('Converted shares catalog for ' + federatedDomain)
2021-07-26 17:54:13 +00:00
else:
time.sleep(2)
def runFederatedSharesWatchdog(projectVersion: str, httpd) -> None:
"""This tries to keep the federated shares update thread
running even if it dies
"""
print('Starting federated shares watchdog')
federatedSharesOriginal = \
httpd.thrPostSchedule.clone(runFederatedSharesDaemon)
httpd.thrFederatedSharesDaemon.start()
while True:
time.sleep(55)
if httpd.thrFederatedSharesDaemon.is_alive():
continue
httpd.thrFederatedSharesDaemon.kill()
httpd.thrFederatedSharesDaemon = \
federatedSharesOriginal.clone(runFederatedSharesDaemon)
httpd.thrFederatedSharesDaemon.start()
print('Restarting federated shares daemon...')
def runFederatedSharesDaemon(baseDir: str, httpd, httpPrefix: str,
domain: str, proxyType: str, debug: bool,
systemLanguage: str) -> None:
2021-07-26 17:54:13 +00:00
"""Runs the daemon used to update federated shared items
"""
secondsPerHour = 60 * 60
fileCheckIntervalSec = 120
time.sleep(60)
while True:
sharedItemsFederatedDomainsStr = \
getConfigParam(baseDir, 'sharedItemsFederatedDomains')
if not sharedItemsFederatedDomainsStr:
time.sleep(fileCheckIntervalSec)
continue
# get a list of the domains within the shared items federation
sharedItemsFederatedDomains = []
sharedItemsFederatedDomainsList = \
sharedItemsFederatedDomainsStr.split(',')
for sharedFederatedDomain in sharedItemsFederatedDomainsList:
sharedItemsFederatedDomains.append(sharedFederatedDomain.strip())
if not sharedItemsFederatedDomains:
time.sleep(fileCheckIntervalSec)
continue
# load the tokens
tokensFilename = \
baseDir + '/accounts/sharedItemsFederationTokens.json'
if not os.path.isfile(tokensFilename):
time.sleep(fileCheckIntervalSec)
continue
tokensJson = loadJson(tokensFilename, 1, 2)
if not tokensJson:
time.sleep(fileCheckIntervalSec)
continue
session = createSession(proxyType)
_updateFederatedSharesCache(session, sharedItemsFederatedDomains,
baseDir, domain, httpPrefix, tokensJson,
debug, systemLanguage)
2021-07-26 17:54:13 +00:00
time.sleep(secondsPerHour * 6)
def _dfcToSharesFormat(catalogJson: {},
baseDir: str, systemLanguage: str) -> {}:
"""Converts DFC format into the internal formal used to store shared items.
This simplifies subsequent search and display
"""
if not catalogJson.get('DFC:supplies'):
return {}
sharesJson = {}
dfcIds = _loadDfcIds(baseDir, systemLanguage)
currTime = int(time.time())
for item in catalogJson['DFC:supplies']:
if not item.get('@id') or \
not item.get('@type') or \
not item.get('DFC:hasType') or \
not item.get('DFC:startDate') or \
not item.get('DFC:expiryDate') or \
not item.get('DFC:quantity') or \
not item.get('DFC:price') or \
not item.get('DFC:Image') or \
not item.get('DFC:description'):
continue
if ' ' not in item['DFC:price']:
continue
if ':' not in item['DFC:description']:
continue
if ':' not in item['DFC:hasType']:
continue
try:
expiryTime = \
datetime.datetime.strptime(item['DFC:expiryDate'],
'%Y-%m-%dT%H:%M:%SZ')
except BaseException:
continue
durationSec = \
int((expiryTime - datetime.datetime(1970, 1, 1)).total_seconds())
if durationSec < currTime:
# has expired
continue
hasType = item['DFC:hasType'].split(':')[1]
itemType = _getshareTypeFromDfcId(hasType, dfcIds)
if not itemType:
continue
dfcId = dfcIds[itemType]
itemID = item['@id']
description = item['DFC:description'].split(':', 1)[1].strip()
sharesJson[itemID] = {
"displayName": item['DFC:description'].split(':')[0],
"summary": description,
"imageUrl": item['DFC:Image'],
"itemQty": item['DFC:quantity'],
"dfcId": dfcId,
"itemType": itemType,
"category": "food",
"location": "",
"published": item['DFC:startDate'],
"expire": durationSec,
2021-07-27 20:26:10 +00:00
"itemPrice": item['DFC:price'].split(' ')[0],
"itemCurrency": item['DFC:price'].split(' ')[1]
}
return sharesJson