Merge branch 'main' of gitlab.com:bashrc2/epicyon

merge-requests/30/head
Bob Mottram 2021-10-02 14:20:20 +01:00
commit 363042060e
153 changed files with 20364 additions and 3177 deletions

View File

@ -3,6 +3,6 @@ image: debian:testing
test:
script:
- apt-get update
- apt-get install -y python3-cryptography python3-dateutil python3-idna python3-numpy python3-pil.imagetk python3-requests python3-socks python3-setuptools python3-pyqrcode
- apt-get install -y python3-cryptography python3-dateutil python3-idna python3-numpy python3-pil.imagetk python3-requests python3-socks python3-setuptools python3-pyqrcode imagemagick gnupg
- python3 epicyon.py --tests
- python3 epicyon.py --testsnetwork

View File

@ -4,7 +4,7 @@ By submitting code, documentation or artwork you agree that it will be licensed
## Security Vulnerability Disclosure
Create an issue on https://gitlab.com/bashrc2/epicyon/issues. If the vulnerability is especially sensitive then send an XMPP message to **bob@freedombone.net** or a Matrix message to **@bob:matrix.freedombone.net**.
Create an issue on https://gitlab.com/bashrc2/epicyon/issues. If the vulnerability is especially sensitive then send an XMPP message to **bob@libreserver.org** or a Matrix message to **@bob:matrix.libreserver.org**.
## Code of Conduct
@ -14,7 +14,7 @@ The code of conduct can be found [here](code-of-conduct.md).
Submit to https://gitlab.com/bashrc2/epicyon/issues
You can also post patches in the old-fashioned style via email to **bob@freedombone.net**. Include **[Epicyon]** in the subject line, otherwise it may be ignored.
You can also post patches in the old-fashioned style via email to **bob@libreserver.org**. Include **[Epicyon]** in the subject line, otherwise it may be ignored.
## Development Style

View File

@ -168,8 +168,6 @@ server {
location / {
proxy_http_version 1.1;
client_max_body_size 31M;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forward-For $proxy_add_x_forwarded_for;
@ -185,10 +183,6 @@ server {
proxy_redirect off;
proxy_request_buffering off;
proxy_buffering off;
location ~ ^/accounts/(avatars|headers)/(.*).(png|jpg|gif|webp|svg) {
expires 1d;
proxy_pass http://localhost:7156;
}
proxy_pass http://localhost:7156;
}
}

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"
@ -23,6 +23,7 @@ from utils import saveJson
from utils import undoAnnounceCollectionEntry
from utils import updateAnnounceCollection
from utils import localActorUrl
from utils import replaceUsersWithAt
from posts import sendSignedJson
from posts import getPersonBox
from session import postJson
@ -121,7 +122,8 @@ def createAnnounce(session, baseDir: str, federationList: [],
clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates an announce message
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
and ccUrl might be a specific person favorited or repeated and the
@ -178,7 +180,8 @@ def createAnnounce(session, baseDir: str, federationList: [],
announceNickname, announceDomain, announcePort, None,
httpPrefix, True, clientToServer, federationList,
sendThreads, postLog, cachedWebfingers, personCache,
debug, projectVersion, None, groupAccount)
debug, projectVersion, None, groupAccount,
signingPrivateKeyPem, 639633)
return newAnnounce
@ -188,7 +191,8 @@ def announcePublic(session, baseDir: str, federationList: [],
objectUrl: str, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Makes a public announcement
"""
fromDomain = getFullDomain(domain, port)
@ -201,7 +205,8 @@ def announcePublic(session, baseDir: str, federationList: [],
objectUrl, True, clientToServer,
sendThreads, postLog,
personCache, cachedWebfingers,
debug, projectVersion)
debug, projectVersion,
signingPrivateKeyPem)
def sendAnnounceViaServer(baseDir: str, session,
@ -209,7 +214,8 @@ def sendAnnounceViaServer(baseDir: str, session,
fromDomain: str, fromPort: int,
httpPrefix: str, repeatObjectUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates an announce message via c2s
"""
if not session:
@ -241,7 +247,8 @@ def sendAnnounceViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: announce webfinger failed for ' + handle)
@ -254,13 +261,16 @@ def sendAnnounceViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId,
sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
fromNickname, fromDomain,
postToBox, 73528)
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
fromNickname, fromDomain,
postToBox, 73528)
if not inboxUrl:
if debug:
@ -297,7 +307,8 @@ def sendUndoAnnounceViaServer(baseDir: str, session,
domain: str, port: int,
httpPrefix: str, repeatObjectUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Undo an announce message via c2s
"""
if not session:
@ -307,7 +318,7 @@ def sendUndoAnnounceViaServer(baseDir: str, session,
domainFull = getFullDomain(domain, port)
actor = localActorUrl(httpPrefix, nickname, domainFull)
handle = actor.replace('/users/', '/@')
handle = replaceUsersWithAt(actor)
statusNumber, published = getStatusNumber()
unAnnounceJson = {
@ -321,7 +332,8 @@ def sendUndoAnnounceViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
domain, projectVersion, debug, False)
domain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: undo announce webfinger failed for ' + handle)
@ -334,13 +346,16 @@ def sendUndoAnnounceViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
originDomain = domain
(inboxUrl, pubKeyId, pubKey, fromPersonId,
sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain,
postToBox, 73528)
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain,
postToBox, 73528)
if not inboxUrl:
if debug:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 182 KiB

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 74 KiB

After

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 77 KiB

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 172 KiB

After

Width:  |  Height:  |  Size: 147 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 87 KiB

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

After

Width:  |  Height:  |  Size: 74 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 184 KiB

After

Width:  |  Height:  |  Size: 166 KiB

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Security"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"
@ -82,7 +82,8 @@ def sendAvailabilityViaServer(baseDir: str, session,
httpPrefix: str,
status: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Sets the availability for a person via c2s
"""
if not session:
@ -107,7 +108,8 @@ def sendAvailabilityViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
domain, projectVersion, debug, False)
domain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: availability webfinger failed for ' + handle)
@ -120,12 +122,14 @@ def sendAvailabilityViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, nickname,
domain, postToBox, 57262)
originDomain = domain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, nickname,
domain, postToBox, 57262)
if not inboxUrl:
if debug:

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"
@ -461,52 +461,66 @@ def mutePost(baseDir: str, nickname: str, domain: str, port: int,
debug: bool) -> None:
""" Mutes the given post
"""
print('mutePost: postId ' + postId)
postFilename = locatePost(baseDir, nickname, domain, postId)
if not postFilename:
print('mutePost: file not found ' + postId)
return
postJsonObject = loadJson(postFilename)
if not postJsonObject:
print('mutePost: object not loaded ' + postId)
return
print('mutePost: ' + str(postJsonObject))
postJsonObj = postJsonObject
alsoUpdatePostId = None
if hasObjectDict(postJsonObject):
domainFull = getFullDomain(domain, port)
actor = localActorUrl(httpPrefix, nickname, domainFull)
postJsonObj = postJsonObject['object']
else:
if postJsonObject.get('object'):
if isinstance(postJsonObject['object'], str):
alsoUpdatePostId = removeIdEnding(postJsonObject['object'])
if postJsonObject['object'].get('conversation'):
muteConversation(baseDir, nickname, domain,
postJsonObject['object']['conversation'])
domainFull = getFullDomain(domain, port)
actor = localActorUrl(httpPrefix, nickname, domainFull)
# does this post have ignores on it from differenent actors?
if not postJsonObject['object'].get('ignores'):
if debug:
print('DEBUG: Adding initial mute to ' + postId)
ignoresJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'id': postId,
'type': 'Collection',
"totalItems": 1,
'items': [{
'type': 'Ignore',
'actor': actor
}]
}
postJsonObject['object']['ignores'] = ignoresJson
else:
if not postJsonObject['object']['ignores'].get('items'):
postJsonObject['object']['ignores']['items'] = []
itemsList = postJsonObject['object']['ignores']['items']
for ignoresItem in itemsList:
if ignoresItem.get('actor'):
if ignoresItem['actor'] == actor:
return
newIgnore = {
if postJsonObj.get('conversation'):
muteConversation(baseDir, nickname, domain,
postJsonObj['conversation'])
# does this post have ignores on it from differenent actors?
if not postJsonObj.get('ignores'):
if debug:
print('DEBUG: Adding initial mute to ' + postId)
ignoresJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'id': postId,
'type': 'Collection',
"totalItems": 1,
'items': [{
'type': 'Ignore',
'actor': actor
}
igIt = len(itemsList)
itemsList.append(newIgnore)
postJsonObject['object']['ignores']['totalItems'] = igIt
saveJson(postJsonObject, postFilename)
}]
}
postJsonObj['ignores'] = ignoresJson
else:
if not postJsonObj['ignores'].get('items'):
postJsonObj['ignores']['items'] = []
itemsList = postJsonObj['ignores']['items']
for ignoresItem in itemsList:
if ignoresItem.get('actor'):
if ignoresItem['actor'] == actor:
return
newIgnore = {
'type': 'Ignore',
'actor': actor
}
igIt = len(itemsList)
itemsList.append(newIgnore)
postJsonObj['ignores']['totalItems'] = igIt
postJsonObj['muted'] = True
if saveJson(postJsonObject, postFilename):
print('mutePost: saved ' + postFilename)
# remove cached post so that the muted version gets recreated
# without its content text and/or image
@ -514,7 +528,13 @@ def mutePost(baseDir: str, nickname: str, domain: str, port: int,
getCachedPostFilename(baseDir, nickname, domain, postJsonObject)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
os.remove(cachedPostFilename)
try:
os.remove(cachedPostFilename)
print('MUTE: cached post removed ' + cachedPostFilename)
except BaseException:
pass
else:
print('MUTE: cached post not found ' + cachedPostFilename)
with open(postFilename + '.muted', 'w+') as muteFile:
muteFile.write('\n')
@ -526,14 +546,39 @@ def mutePost(baseDir: str, nickname: str, domain: str, port: int,
removeIdEnding(postJsonObject['id']).replace('/', '#')
if postId in recentPostsCache['index']:
print('MUTE: ' + postId + ' is in recent posts cache')
if recentPostsCache['json'].get(postId):
postJsonObject['muted'] = True
recentPostsCache['json'][postId] = json.dumps(postJsonObject)
if recentPostsCache.get('html'):
if recentPostsCache['html'].get(postId):
del recentPostsCache['html'][postId]
print('MUTE: ' + postId +
' marked as muted in recent posts memory cache')
if recentPostsCache.get('json'):
recentPostsCache['json'][postId] = json.dumps(postJsonObject)
print('MUTE: ' + postId +
' marked as muted in recent posts memory cache')
if recentPostsCache.get('html'):
if recentPostsCache['html'].get(postId):
del recentPostsCache['html'][postId]
print('MUTE: ' + postId + ' removed cached html')
if alsoUpdatePostId:
postFilename = locatePost(baseDir, nickname, domain, alsoUpdatePostId)
if os.path.isfile(postFilename):
postJsonObj = loadJson(postFilename)
cachedPostFilename = \
getCachedPostFilename(baseDir, nickname, domain,
postJsonObj)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
try:
os.remove(cachedPostFilename)
print('MUTE: cached referenced post removed ' +
cachedPostFilename)
except BaseException:
pass
if recentPostsCache.get('json'):
if recentPostsCache['json'].get(alsoUpdatePostId):
del recentPostsCache['json'][alsoUpdatePostId]
print('MUTE: ' + alsoUpdatePostId + ' removed referenced json')
if recentPostsCache.get('html'):
if recentPostsCache['html'].get(alsoUpdatePostId):
del recentPostsCache['html'][alsoUpdatePostId]
print('MUTE: ' + alsoUpdatePostId + ' removed referenced html')
def unmutePost(baseDir: str, nickname: str, domain: str, port: int,
@ -550,37 +595,48 @@ def unmutePost(baseDir: str, nickname: str, domain: str, port: int,
muteFilename = postFilename + '.muted'
if os.path.isfile(muteFilename):
os.remove(muteFilename)
try:
os.remove(muteFilename)
except BaseException:
pass
print('UNMUTE: ' + muteFilename + ' file removed')
postJsonObj = postJsonObject
alsoUpdatePostId = None
if hasObjectDict(postJsonObject):
if postJsonObject['object'].get('conversation'):
unmuteConversation(baseDir, nickname, domain,
postJsonObject['object']['conversation'])
postJsonObj = postJsonObject['object']
else:
if postJsonObject.get('object'):
if isinstance(postJsonObject['object'], str):
alsoUpdatePostId = removeIdEnding(postJsonObject['object'])
if postJsonObject['object'].get('ignores'):
domainFull = getFullDomain(domain, port)
actor = localActorUrl(httpPrefix, nickname, domainFull)
totalItems = 0
if postJsonObject['object']['ignores'].get('totalItems'):
totalItems = \
postJsonObject['object']['ignores']['totalItems']
itemsList = postJsonObject['object']['ignores']['items']
for ignoresItem in itemsList:
if ignoresItem.get('actor'):
if ignoresItem['actor'] == actor:
if debug:
print('DEBUG: mute was removed for ' + actor)
itemsList.remove(ignoresItem)
break
if totalItems == 1:
if debug:
print('DEBUG: mute was removed from post')
del postJsonObject['object']['ignores']
else:
igItLen = len(postJsonObject['object']['ignores']['items'])
postJsonObject['object']['ignores']['totalItems'] = igItLen
saveJson(postJsonObject, postFilename)
if postJsonObj.get('conversation'):
unmuteConversation(baseDir, nickname, domain,
postJsonObj['conversation'])
if postJsonObj.get('ignores'):
domainFull = getFullDomain(domain, port)
actor = localActorUrl(httpPrefix, nickname, domainFull)
totalItems = 0
if postJsonObj['ignores'].get('totalItems'):
totalItems = postJsonObj['ignores']['totalItems']
itemsList = postJsonObj['ignores']['items']
for ignoresItem in itemsList:
if ignoresItem.get('actor'):
if ignoresItem['actor'] == actor:
if debug:
print('DEBUG: mute was removed for ' + actor)
itemsList.remove(ignoresItem)
break
if totalItems == 1:
if debug:
print('DEBUG: mute was removed from post')
del postJsonObj['ignores']
else:
igItLen = len(postJsonObj['ignores']['items'])
postJsonObj['ignores']['totalItems'] = igItLen
postJsonObj['muted'] = False
saveJson(postJsonObject, postFilename)
# remove cached post so that the muted version gets recreated
# with its content text and/or image
@ -588,7 +644,10 @@ def unmutePost(baseDir: str, nickname: str, domain: str, port: int,
getCachedPostFilename(baseDir, nickname, domain, postJsonObject)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
os.remove(cachedPostFilename)
try:
os.remove(cachedPostFilename)
except BaseException:
pass
# if the post is in the recent posts cache then mark it as unmuted
if recentPostsCache.get('index'):
@ -596,14 +655,40 @@ def unmutePost(baseDir: str, nickname: str, domain: str, port: int,
removeIdEnding(postJsonObject['id']).replace('/', '#')
if postId in recentPostsCache['index']:
print('UNMUTE: ' + postId + ' is in recent posts cache')
if recentPostsCache['json'].get(postId):
postJsonObject['muted'] = False
recentPostsCache['json'][postId] = json.dumps(postJsonObject)
if recentPostsCache.get('html'):
if recentPostsCache['html'].get(postId):
del recentPostsCache['html'][postId]
print('UNMUTE: ' + postId +
' marked as unmuted in recent posts cache')
if recentPostsCache.get('json'):
recentPostsCache['json'][postId] = json.dumps(postJsonObject)
print('UNMUTE: ' + postId +
' marked as unmuted in recent posts cache')
if recentPostsCache.get('html'):
if recentPostsCache['html'].get(postId):
del recentPostsCache['html'][postId]
print('UNMUTE: ' + postId + ' removed cached html')
if alsoUpdatePostId:
postFilename = locatePost(baseDir, nickname, domain, alsoUpdatePostId)
if os.path.isfile(postFilename):
postJsonObj = loadJson(postFilename)
cachedPostFilename = \
getCachedPostFilename(baseDir, nickname, domain,
postJsonObj)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
try:
os.remove(cachedPostFilename)
print('MUTE: cached referenced post removed ' +
cachedPostFilename)
except BaseException:
pass
if recentPostsCache.get('json'):
if recentPostsCache['json'].get(alsoUpdatePostId):
del recentPostsCache['json'][alsoUpdatePostId]
print('UNMUTE: ' +
alsoUpdatePostId + ' removed referenced json')
if recentPostsCache.get('html'):
if recentPostsCache['html'].get(alsoUpdatePostId):
del recentPostsCache['html'][alsoUpdatePostId]
print('UNMUTE: ' +
alsoUpdatePostId + ' removed referenced html')
def outboxMute(baseDir: str, httpPrefix: str,
@ -740,7 +825,10 @@ def setBrochMode(baseDir: str, domainFull: str, enabled: bool) -> None:
if not enabled:
# remove instance allow list
if os.path.isfile(allowFilename):
os.remove(allowFilename)
try:
os.remove(allowFilename)
except BaseException:
pass
print('Broch mode turned off')
else:
if os.path.isfile(allowFilename):
@ -799,11 +887,14 @@ def brochModeLapses(baseDir: str, lapseDays: int = 7) -> bool:
currTime = datetime.datetime.utcnow()
daysSinceBroch = (currTime - modifiedDate).days
if daysSinceBroch >= lapseDays:
removed = False
try:
os.remove(allowFilename)
removed = True
except BaseException:
pass
if removed:
setConfigParam(baseDir, "brochMode", False)
print('Broch mode has elapsed')
return True
except BaseException:
pass
return False

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Timeline"
@ -47,7 +47,10 @@ def undoBookmarksCollectionEntry(recentPostsCache: {},
domain, postJsonObject)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
os.remove(cachedPostFilename)
try:
os.remove(cachedPostFilename)
except BaseException:
pass
removePostFromCache(postJsonObject, recentPostsCache)
# remove from the index
@ -152,7 +155,10 @@ def updateBookmarksCollection(recentPostsCache: {},
domain, postJsonObject)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
os.remove(cachedPostFilename)
try:
os.remove(cachedPostFilename)
except BaseException:
pass
removePostFromCache(postJsonObject, recentPostsCache)
if not postJsonObject.get('object'):
@ -348,7 +354,8 @@ def sendBookmarkViaServer(baseDir: str, session,
domain: str, fromPort: int,
httpPrefix: str, bookmarkUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a bookmark via c2s
"""
if not session:
@ -377,7 +384,8 @@ def sendBookmarkViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
domain, projectVersion, debug, False)
domain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: bookmark webfinger failed for ' + handle)
@ -390,12 +398,15 @@ def sendBookmarkViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain,
postToBox, 52594)
originDomain = domain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain,
postToBox, 58391)
if not inboxUrl:
if debug:
@ -433,7 +444,8 @@ def sendUndoBookmarkViaServer(baseDir: str, session,
domain: str, fromPort: int,
httpPrefix: str, bookmarkUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Removes a bookmark via c2s
"""
if not session:
@ -462,7 +474,8 @@ def sendUndoBookmarkViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
domain, projectVersion, debug, False)
domain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: unbookmark webfinger failed for ' + handle)
@ -475,12 +488,15 @@ def sendUndoBookmarkViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain,
postToBox, 52594)
originDomain = domain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain,
postToBox, 52594)
if not inboxUrl:
if debug:

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"
@ -139,7 +139,8 @@ def getWebfingerFromCache(handle: str, cachedWebfingers: {}) -> {}:
def getPersonPubKey(baseDir: str, session, personUrl: str,
personCache: {}, debug: bool,
projectVersion: str, httpPrefix: str,
domain: str, onionDomain: str) -> str:
domain: str, onionDomain: str,
signingPrivateKeyPem: str) -> str:
if not personUrl:
return None
personUrl = personUrl.replace('#main-key', '')
@ -165,7 +166,8 @@ def getPersonPubKey(baseDir: str, session, personUrl: str,
'Accept': 'application/activity+json; profile="' + profileStr + '"'
}
personJson = \
getJson(session, personUrl, asHeader, None, debug,
getJson(signingPrivateKeyPem,
session, personUrl, asHeader, None, debug,
projectVersion, httpPrefix, personDomain)
if not personJson:
return None

View File

@ -11,7 +11,7 @@ example.com {
header / X-Download-Options "noopen"
header / X-Frame-Options "DENY"
header / X-Permitted-Cross-Domain-Policies "none"
header / X-Robots-Tag "noindex,nofollow,nosnippet,noarchive"
header / X-Robots-Tag "noindex"
header / X-XSS-Protection "1; mode=block"
proxy / http://localhost:7156 {

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "RSS Feeds"
@ -93,7 +93,10 @@ def updateHashtagCategories(baseDir: str) -> None:
hashtagCategories = getHashtagCategories(baseDir)
if not hashtagCategories:
if os.path.isfile(categoryListFilename):
os.remove(categoryListFilename)
try:
os.remove(categoryListFilename)
except BaseException:
pass
return
categoryList = []

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Metadata"

View File

@ -52,7 +52,7 @@ If you're raising concerns about something or someone, there must be demonstrabl
This is not a big project and so there is no division of labor or special enforcement committee or bureaucratic process.
Complaints should be either reported in the Matrix chat room **#epicyon:matrix.freedombone.net** or sent to bob@freedombone.net, preferably via XMPP/Conversations with OMEMO enabled but you can also use the same address for email correspondence.
Complaints should be either reported in the Matrix chat room **#epicyon:matrix.libreserver.org** or sent to bob@libreserver.org, preferably via XMPP/Conversations with OMEMO enabled but you can also use the same address for email correspondence.
## In case of violations
@ -60,6 +60,5 @@ Violators of this code of conduct will:
* Be removed from any associated Matrix and/or XMPP chat rooms
* Will not have pending or future patches or pull requests merged
* If they have a user account on *code.freedombone.net* it will be removed
This applies regardless of past levels of commitment or technical abilities.

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"
@ -11,6 +11,7 @@ import os
import email.parser
import urllib.parse
from shutil import copyfile
from utils import dangerousSVG
from utils import removeDomainPort
from utils import isValidLanguage
from utils import getImageExtensions
@ -938,9 +939,15 @@ def saveMediaInFormPOST(mediaBytes, debug: bool,
for ex in extensionTypes:
possibleOtherFormat = filenameBase + '.' + ex
if os.path.isfile(possibleOtherFormat):
os.remove(possibleOtherFormat)
try:
os.remove(possibleOtherFormat)
except BaseException:
pass
if os.path.isfile(filenameBase):
os.remove(filenameBase)
try:
os.remove(filenameBase)
except BaseException:
pass
if debug:
print('DEBUG: No media found within POST')
@ -1006,7 +1013,17 @@ def saveMediaInFormPOST(mediaBytes, debug: bool,
detectedExtension, '.' +
ex)
if os.path.isfile(possibleOtherFormat):
os.remove(possibleOtherFormat)
try:
os.remove(possibleOtherFormat)
except BaseException:
pass
# don't allow scripts within svg files
if detectedExtension == 'svg':
svgStr = mediaBytes[startPos:]
svgStr = svgStr.decode()
if dangerousSVG(svgStr, False):
return None, None
with open(filename, 'wb') as fp:
fp.write(mediaBytes[startPos:])

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Security"

View File

@ -3,13 +3,14 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Timeline"
import os
from utils import hasObjectDict
from utils import acctDir
from utils import removeIdEnding
def updateConversation(baseDir: str, nickname: str, domain: str,
@ -27,7 +28,7 @@ def updateConversation(baseDir: str, nickname: str, domain: str,
os.mkdir(conversationDir)
conversationId = postJsonObject['object']['conversation']
conversationId = conversationId.replace('/', '#')
postId = postJsonObject['object']['id']
postId = removeIdEnding(postJsonObject['object']['id'])
conversationFilename = conversationDir + '/' + conversationId
if not os.path.isfile(conversationFilename):
try:

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

1399
daemon.py

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"
@ -30,7 +30,8 @@ def sendDeleteViaServer(baseDir: str, session,
fromDomain: str, fromPort: int,
httpPrefix: str, deleteObjectUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a delete request message via c2s
"""
if not session:
@ -57,7 +58,8 @@ def sendDeleteViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: delete webfinger failed for ' + handle)
@ -70,11 +72,13 @@ def sendDeleteViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
projectVersion, httpPrefix, fromNickname,
fromDomain, postToBox, 53036)
displayName, _) = getPersonBox(signingPrivateKeyPem, originDomain,
baseDir, session, wfRequest, personCache,
projectVersion, httpPrefix, fromNickname,
fromDomain, postToBox, 53036)
if not inboxUrl:
if debug:

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Client"
@ -418,7 +418,8 @@ def _desktopReplyToPost(session, postId: str,
debug: bool, subject: str,
screenreader: str, systemLanguage: str,
espeak, conversationId: str,
lowBandwidth: bool) -> None:
lowBandwidth: bool,
signingPrivateKeyPem: str) -> None:
"""Use the desktop client to send a reply to the most recent post
"""
if '://' not in postId:
@ -463,7 +464,7 @@ def _desktopReplyToPost(session, postId: str,
city = 'London, England'
sayStr = 'Sending reply'
_sayCommand(sayStr, sayStr, screenreader, systemLanguage, espeak)
if sendPostViaServer(__version__,
if sendPostViaServer(signingPrivateKeyPem, __version__,
baseDir, session, nickname, password,
domain, port,
toNickname, toDomain, toPort, ccUrl,
@ -486,7 +487,8 @@ def _desktopNewPost(session,
cachedWebfingers: {}, personCache: {},
debug: bool,
screenreader: str, systemLanguage: str,
espeak, lowBandwidth: bool) -> None:
espeak, lowBandwidth: bool,
signingPrivateKeyPem: str) -> None:
"""Use the desktop client to create a new post
"""
conversationId = None
@ -527,7 +529,7 @@ def _desktopNewPost(session,
subject = None
sayStr = 'Sending'
_sayCommand(sayStr, sayStr, screenreader, systemLanguage, espeak)
if sendPostViaServer(__version__,
if sendPostViaServer(signingPrivateKeyPem, __version__,
baseDir, session, nickname, password,
domain, port,
None, '#Public', port, ccUrl,
@ -661,7 +663,9 @@ def _readLocalBoxPost(session, nickname: str, domain: str,
systemLanguage: str,
screenreader: str, espeak,
translate: {}, yourActor: str,
domainFull: str, personCache: {}) -> {}:
domainFull: str, personCache: {},
signingPrivateKeyPem: str,
blockedCache: {}) -> {}:
"""Reads a post from the given timeline
Returns the post json
"""
@ -688,6 +692,7 @@ def _readLocalBoxPost(session, nickname: str, domain: str,
recentPostsCache = {}
allowLocalNetworkAccess = False
YTReplacementDomain = None
twitterReplacementDomain = None
postJsonObject2 = \
downloadAnnounce(session, baseDir,
httpPrefix,
@ -695,10 +700,13 @@ def _readLocalBoxPost(session, nickname: str, domain: str,
postJsonObject,
__version__, translate,
YTReplacementDomain,
twitterReplacementDomain,
allowLocalNetworkAccess,
recentPostsCache, False,
systemLanguage,
domainFull, personCache)
domainFull, personCache,
signingPrivateKeyPem,
blockedCache)
if postJsonObject2:
if hasObjectDict(postJsonObject2):
if postJsonObject2['object'].get('attributedTo') and \
@ -742,7 +750,7 @@ def _readLocalBoxPost(session, nickname: str, domain: str,
if isPGPEncrypted(content):
sayStr = 'Encrypted message. Please enter your passphrase.'
_sayCommand(sayStr, sayStr, screenreader, systemLanguage, espeak)
content = pgpDecrypt(domain, content, actor)
content = pgpDecrypt(domain, content, actor, signingPrivateKeyPem)
if isPGPEncrypted(content):
sayStr = 'Message could not be decrypted'
_sayCommand(sayStr, sayStr, screenreader, systemLanguage, espeak)
@ -823,7 +831,7 @@ def _desktopShowProfile(session, nickname: str, domain: str,
systemLanguage: str,
screenreader: str, espeak,
translate: {}, yourActor: str,
postJsonObject: {}) -> {}:
postJsonObject: {}, signingPrivateKeyPem: str) -> {}:
"""Shows the profile of the actor for the given post
Returns the actor json
"""
@ -854,7 +862,8 @@ def _desktopShowProfile(session, nickname: str, domain: str,
if 'http://' in actor:
isHttp = True
actorJson, asHeader = \
getActorJson(domain, actor, isHttp, False, False, True)
getActorJson(domain, actor, isHttp, False, False, True,
signingPrivateKeyPem)
_desktopShowActor(baseDir, actorJson, translate,
systemLanguage, screenreader, espeak)
@ -868,12 +877,14 @@ def _desktopShowProfileFromHandle(session, nickname: str, domain: str,
systemLanguage: str,
screenreader: str, espeak,
translate: {}, yourActor: str,
postJsonObject: {}) -> {}:
postJsonObject: {},
signingPrivateKeyPem: str) -> {}:
"""Shows the profile for a handle
Returns the actor json
"""
actorJson, asHeader = \
getActorJson(domain, handle, False, False, False, True)
getActorJson(domain, handle, False, False, False, True,
signingPrivateKeyPem)
_desktopShowActor(baseDir, actorJson, translate,
systemLanguage, screenreader, espeak)
@ -1112,7 +1123,8 @@ def _desktopNewDM(session, toHandle: str,
cachedWebfingers: {}, personCache: {},
debug: bool,
screenreader: str, systemLanguage: str,
espeak, lowBandwidth: bool) -> None:
espeak, lowBandwidth: bool,
signingPrivateKeyPem: str) -> None:
"""Use the desktop client to create a new direct message
which can include multiple destination handles
"""
@ -1133,7 +1145,8 @@ def _desktopNewDM(session, toHandle: str,
cachedWebfingers, personCache,
debug,
screenreader, systemLanguage,
espeak, lowBandwidth)
espeak, lowBandwidth,
signingPrivateKeyPem)
def _desktopNewDMbase(session, toHandle: str,
@ -1142,7 +1155,8 @@ def _desktopNewDMbase(session, toHandle: str,
cachedWebfingers: {}, personCache: {},
debug: bool,
screenreader: str, systemLanguage: str,
espeak, lowBandwidth: bool) -> None:
espeak, lowBandwidth: bool,
signingPrivateKeyPem: str) -> None:
"""Use the desktop client to create a new direct message
"""
conversationId = None
@ -1201,7 +1215,8 @@ def _desktopNewDMbase(session, toHandle: str,
for after in range(randint(1, 16)):
paddedMessage += ' '
cipherText = \
pgpEncryptToActor(domain, paddedMessage, toHandle)
pgpEncryptToActor(domain, paddedMessage, toHandle,
signingPrivateKeyPem)
if not cipherText:
sayStr = \
toHandle + ' has no PGP public key. ' + \
@ -1222,7 +1237,7 @@ def _desktopNewDMbase(session, toHandle: str,
sayStr = 'Sending'
_sayCommand(sayStr, sayStr, screenreader, systemLanguage, espeak)
if sendPostViaServer(__version__,
if sendPostViaServer(signingPrivateKeyPem, __version__,
baseDir, session, nickname, password,
domain, port,
toNickname, toDomain, toPort, ccUrl,
@ -1301,6 +1316,11 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
"""Runs the desktop and screen reader client,
which announces new inbox items
"""
# TODO: this should probably be retrieved somehow from the server
signingPrivateKeyPem = None
blockedCache = {}
indent = ' '
if showNewPosts:
indent = ''
@ -1400,7 +1420,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
cachedWebfingers, personCache,
debug, False)
debug, False,
signingPrivateKeyPem)
sayStr = indent + 'PGP public key uploaded'
_sayCommand(sayStr, sayStr, screenreader,
systemLanguage, espeak)
@ -1410,7 +1431,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
currTimeline, pageNumber,
debug)
debug, signingPrivateKeyPem)
followRequestsJson = \
getFollowRequestsViaServer(baseDir, session,
@ -1418,14 +1439,16 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, 1,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
if not (currTimeline == 'inbox' and pageNumber == 1):
# monitor the inbox to generate notifications
inboxJson = c2sBoxJson(baseDir, session,
nickname, password,
domain, port, httpPrefix,
'inbox', 1, debug)
'inbox', 1, debug,
signingPrivateKeyPem)
else:
inboxJson = boxJson
newDMsExist = False
@ -1502,7 +1525,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
currTimeline, pageNumber,
debug)
debug, signingPrivateKeyPem)
if boxJson:
_desktopShowBox(indent, followRequestsJson,
yourActor, currTimeline, boxJson,
@ -1519,7 +1542,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
currTimeline, pageNumber,
debug)
debug, signingPrivateKeyPem)
if boxJson:
_desktopShowBox(indent, followRequestsJson,
yourActor, currTimeline, boxJson,
@ -1537,7 +1560,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
currTimeline, pageNumber,
debug)
debug, signingPrivateKeyPem)
if boxJson:
_desktopShowBox(indent, followRequestsJson,
yourActor, currTimeline, boxJson,
@ -1556,7 +1579,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
currTimeline, pageNumber,
debug)
debug, signingPrivateKeyPem)
if boxJson:
_desktopShowBox(indent, followRequestsJson,
yourActor, currTimeline, boxJson,
@ -1583,7 +1606,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
nickname, password,
domain, port, httpPrefix,
currTimeline, pageNumber,
debug)
debug, signingPrivateKeyPem)
if boxJson:
_desktopShowBox(indent, followRequestsJson,
yourActor, currTimeline, boxJson,
@ -1606,7 +1629,9 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
pageNumber, postIndex, boxJson,
systemLanguage, screenreader,
espeak, translate, yourActor,
domainFull, personCache)
domainFull, personCache,
signingPrivateKeyPem,
blockedCache)
print('')
sayStr = 'Press Enter to continue...'
sayStr2 = _highlightText(sayStr)
@ -1628,7 +1653,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
boxJson,
systemLanguage, screenreader,
espeak, translate, yourActor,
postJsonObject)
postJsonObject,
signingPrivateKeyPem)
else:
postIndexStr = '1'
else:
@ -1643,7 +1669,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
currTimeline, profileHandle,
systemLanguage, screenreader,
espeak, translate, yourActor,
None)
None, signingPrivateKeyPem)
sayStr = 'Press Enter to continue...'
sayStr2 = _highlightText(sayStr)
_sayCommand(sayStr2, sayStr,
@ -1661,7 +1687,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
pageNumber, postIndex, boxJson,
systemLanguage, screenreader,
espeak, translate, yourActor,
None)
None, signingPrivateKeyPem)
sayStr = 'Press Enter to continue...'
sayStr2 = _highlightText(sayStr)
_sayCommand(sayStr2, sayStr,
@ -1689,7 +1715,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
debug, subject,
screenreader, systemLanguage,
espeak, conversationId,
lowBandwidth)
lowBandwidth,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr == 'post' or commandStr == 'p' or
@ -1723,7 +1750,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
cachedWebfingers, personCache,
debug,
screenreader, systemLanguage,
espeak, lowBandwidth)
espeak, lowBandwidth,
signingPrivateKeyPem)
refreshTimeline = True
else:
# public post
@ -1733,7 +1761,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
cachedWebfingers, personCache,
debug,
screenreader, systemLanguage,
espeak, lowBandwidth)
espeak, lowBandwidth,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif commandStr == 'like' or commandStr.startswith('like '):
@ -1759,7 +1788,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port, httpPrefix,
postJsonObject['id'],
cachedWebfingers, personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr == 'undo mute' or
@ -1797,7 +1827,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, postJsonObject['id'],
cachedWebfingers, personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr == 'mute' or
@ -1826,7 +1857,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, postJsonObject['id'],
cachedWebfingers, personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr == 'undo bookmark' or
@ -1867,7 +1899,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
postJsonObject['id'],
cachedWebfingers,
personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr == 'bookmark' or
@ -1896,7 +1929,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port, httpPrefix,
postJsonObject['id'],
cachedWebfingers, personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr.startswith('undo block ') or
@ -1931,7 +1965,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
blockActor,
cachedWebfingers,
personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif commandStr.startswith('block '):
@ -1976,7 +2011,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
blockActor,
cachedWebfingers,
personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif commandStr == 'unlike' or commandStr == 'undo like':
@ -2003,7 +2039,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port, httpPrefix,
postJsonObject['id'],
cachedWebfingers, personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr.startswith('announce') or
@ -2033,7 +2070,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, postId,
cachedWebfingers, personCache,
True, __version__)
True, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr.startswith('unannounce') or
@ -2067,7 +2105,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
httpPrefix, postId,
cachedWebfingers,
personCache,
True, __version__)
True, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')
elif (commandStr == 'follow requests' or
@ -2083,7 +2122,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, currPage,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
if followRequestsJson:
if isinstance(followRequestsJson, dict):
_desktopShowFollowRequests(followRequestsJson,
@ -2102,7 +2142,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, currPage,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
if followingJson:
if isinstance(followingJson, dict):
_desktopShowFollowing(followingJson, translate,
@ -2122,7 +2163,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
domain, port,
httpPrefix, currPage,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
if followersJson:
if isinstance(followersJson, dict):
_desktopShowFollowing(followersJson, translate,
@ -2161,7 +2203,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
httpPrefix,
cachedWebfingers,
personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
else:
if followHandle:
sayStr = followHandle + ' is not valid'
@ -2195,7 +2238,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
httpPrefix,
cachedWebfingers,
personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
else:
sayStr = followHandle + ' is not valid'
_sayCommand(sayStr, sayStr,
@ -2224,7 +2268,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
cachedWebfingers,
personCache,
debug,
__version__)
__version__,
signingPrivateKeyPem)
else:
if approveHandle:
sayStr = approveHandle + ' is not valid'
@ -2256,7 +2301,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
cachedWebfingers,
personCache,
debug,
__version__)
__version__,
signingPrivateKeyPem)
else:
if denyHandle:
sayStr = denyHandle + ' is not valid'
@ -2331,6 +2377,7 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
recentPostsCache = {}
allowLocalNetworkAccess = False
YTReplacementDomain = None
twitterReplacementDomain = None
postJsonObject2 = \
downloadAnnounce(session, baseDir,
httpPrefix,
@ -2338,10 +2385,13 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
postJsonObject,
__version__, translate,
YTReplacementDomain,
twitterReplacementDomain,
allowLocalNetworkAccess,
recentPostsCache, False,
systemLanguage,
domainFull, personCache)
domainFull, personCache,
signingPrivateKeyPem,
blockedCache)
if postJsonObject2:
postJsonObject = postJsonObject2
if postJsonObject:
@ -2423,7 +2473,8 @@ def runDesktopClient(baseDir: str, proxyType: str, httpPrefix: str,
postJsonObject['id'],
cachedWebfingers,
personCache,
False, __version__)
False, __version__,
signingPrivateKeyPem)
refreshTimeline = True
print('')

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Security"
@ -44,7 +44,10 @@ def E2EEremoveDevice(baseDir: str, nickname: str, domain: str,
personDir = acctDir(baseDir, nickname, domain)
deviceFilename = personDir + '/devices/' + deviceId + '.json'
if os.path.isfile(deviceFilename):
os.remove(deviceFilename)
try:
os.remove(deviceFilename)
except BaseException:
pass
return True
return False

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

View File

@ -769,5 +769,6 @@
"void": "void",
"openbsd": "openbsd",
"freebsd": "freebsd",
"orgmode": "orgmode"
"orgmode": "orgmode",
"kde": "kde"
}

BIN
emoji/kde.png 100644

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

View File

@ -201,6 +201,13 @@ figure {
height: auto;
}
mark {
background-color: var(--main-bg-color);
color: var(--main-fg-color);
font-size: 130%;
font-weight: bold;
}
.accesskeys {
border: 0;
width: 100%;
@ -1383,6 +1390,26 @@ div.container {
margin-bottom: var(--button-bottom-margin);
margin-left: var(--button-left-margin);
}
.contactbutton {
border-radius: var(--button-corner-radius);
background-color: var(--button-background);
color: var(--button-text);
text-align: center;
font-size: var(--font-size-header);
font-family: var(--header-font);
padding: var(--button-height-padding);
width: 20%;
margin: var(--button-margin);
min-width: var(--button-width-chars);
transition: all 0.5s;
cursor: pointer;
border-top: var(--tab-border-width) solid var(--tab-border-color);
border-bottom: none;
border-left: var(--tab-border-width) solid var(--tab-border-color);
border-right: var(--tab-border-width) solid var(--tab-border-color);
margin-bottom: var(--button-bottom-margin);
margin-left: var(--button-left-margin);
}
.buttonDesktop {
border-radius: var(--button-corner-radius);
background-color: var(--button-background);
@ -1667,6 +1694,8 @@ div.container {
.columnIcons img {
float: right;
}
.pageslist {
}
}
@media screen and (min-width: 2200px) {
@ -1696,7 +1725,7 @@ div.container {
color: var(--title-color);
}
blockquote {
font-size: var(--quote-font-size-mobile);
font-size: var(--quote-font-size-mobile);
}
.accountsTable {
width: 100%;
@ -2017,6 +2046,25 @@ div.container {
border-right: var(--tab-border-width) solid var(--tab-border-color);
margin-bottom: var(--button-bottom-margin);
}
.contactbutton {
border-radius: var(--button-corner-radius);
background-color: var(--button-background);
color: var(--button-text);
text-align: center;
font-size: var(--font-size-button-mobile);
font-family: var(--header-font);
padding: var(--button-height-padding-mobile);
width: 30%;
min-width: var(--button-width-chars);
transition: all 0.5s;
cursor: pointer;
margin: var(--button-margin);
border-top: var(--tab-border-width) solid var(--tab-border-color);
border-bottom: none;
border-left: var(--tab-border-width) solid var(--tab-border-color);
border-right: var(--tab-border-width) solid var(--tab-border-color);
margin-bottom: var(--button-bottom-margin);
}
.frontPageMobileButtons{
display: block;
border: var(--border-width-header) solid var(--border-color);
@ -2323,4 +2371,6 @@ div.container {
float: right;
margin-right: 1vw;
}
.pageslist {
}
}

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Commandline Interface"
@ -25,6 +25,7 @@ from roles import setRole
from webfinger import webfingerHandle
from bookmarks import sendBookmarkViaServer
from bookmarks import sendUndoBookmarkViaServer
from posts import getInstanceActorKey
from posts import sendMuteViaServer
from posts import sendUndoMuteViaServer
from posts import c2sBoxJson
@ -169,6 +170,11 @@ parser.add_argument('--dormantMonths',
default=3,
help='How many months does a followed account need to ' +
'be unseen for before being considered dormant')
parser.add_argument('--defaultReplyIntervalHours',
dest='defaultReplyIntervalHours', type=int,
default=1000,
help='How many hours after publication of a post ' +
'are replies to it permitted')
parser.add_argument('--sendThreadsTimeoutMins',
dest='sendThreadsTimeoutMins', type=int,
default=30,
@ -217,6 +223,9 @@ parser.add_argument('--path', dest='baseDir',
parser.add_argument('--ytdomain', dest='YTReplacementDomain',
type=str, default=None,
help='Domain used to replace youtube.com')
parser.add_argument('--twitterdomain', dest='twitterReplacementDomain',
type=str, default=None,
help='Domain used to replace twitter.com')
parser.add_argument('--language', dest='language',
type=str, default=None,
help='Language code, eg. en/fr/de/es')
@ -406,10 +415,11 @@ parser.add_argument("--debug", type=str2bool, nargs='?',
parser.add_argument("--notificationSounds", type=str2bool, nargs='?',
const=True, default=True,
help="Play notification sounds")
parser.add_argument("--authenticatedFetch", type=str2bool, nargs='?',
parser.add_argument("--secureMode", type=str2bool, nargs='?',
const=True, default=False,
help="Enable authentication on GET requests" +
" for json (authenticated fetch)")
help="Requires all GET requests to be signed, " +
"so that the sender can be identifies and " +
"blocked if neccessary")
parser.add_argument("--instanceOnlySkillsSearch", type=str2bool, nargs='?',
const=True, default=False,
help="Skills searches only return " +
@ -633,12 +643,13 @@ if args.tests:
sys.exit()
if args.testsnetwork:
print('Network Tests')
testSharedItemsFederation()
testGroupFollow()
testPostMessageBetweenServers()
testFollowBetweenServers()
testClientToServer()
testUpdateActor()
baseDir = os.getcwd()
testSharedItemsFederation(baseDir)
testGroupFollow(baseDir)
testPostMessageBetweenServers(baseDir)
testFollowBetweenServers(baseDir)
testClientToServer(baseDir)
testUpdateActor(baseDir)
print('All tests succeeded')
sys.exit()
@ -662,6 +673,12 @@ if args.libretranslateApiKey:
setConfigParam(baseDir, 'libretranslateApiKey', args.libretranslateApiKey)
if args.posts:
if not args.domain:
originDomain = getConfigParam(baseDir, 'domain')
else:
originDomain = args.domain
if debug:
print('originDomain: ' + str(originDomain))
if '@' not in args.posts:
if '/users/' in args.posts:
postsNickname = getNicknameFromActor(args.posts)
@ -688,9 +705,11 @@ if args.posts:
proxyType = 'gnunet'
if not args.language:
args.language = 'en'
signingPrivateKeyPem = getInstanceActorKey(baseDir, originDomain)
getPublicPostsOfPerson(baseDir, nickname, domain, False, True,
proxyType, args.port, httpPrefix, debug,
__version__, args.language)
__version__, args.language,
signingPrivateKeyPem, originDomain)
sys.exit()
if args.postDomains:
@ -722,13 +741,22 @@ if args.postDomains:
domainList = []
if not args.language:
args.language = 'en'
signingPrivateKeyPem = None
if not args.domain:
originDomain = getConfigParam(baseDir, 'domain')
else:
originDomain = args.domain
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, originDomain)
domainList = getPublicPostDomains(None,
baseDir, nickname, domain,
originDomain,
proxyType, args.port,
httpPrefix, debug,
__version__,
wordFrequency, domainList,
args.language)
args.language,
signingPrivateKeyPem)
for postDomain in domainList:
print(postDomain)
sys.exit()
@ -765,13 +793,17 @@ if args.postDomainsBlocked:
domainList = []
if not args.language:
args.language = 'en'
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
domainList = getPublicPostDomainsBlocked(None,
baseDir, nickname, domain,
proxyType, args.port,
httpPrefix, debug,
__version__,
wordFrequency, domainList,
args.language)
args.language,
signingPrivateKeyPem)
for postDomain in domainList:
print(postDomain)
sys.exit()
@ -806,12 +838,16 @@ if args.checkDomains:
maxBlockedDomains = 0
if not args.language:
args.language = 'en'
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
checkDomains(None,
baseDir, nickname, domain,
proxyType, args.port,
httpPrefix, debug,
__version__,
maxBlockedDomains, False, args.language)
maxBlockedDomains, False, args.language,
signingPrivateKeyPem)
sys.exit()
if args.socnet:
@ -825,10 +861,19 @@ if args.socnet:
proxyType = 'tor'
if not args.language:
args.language = 'en'
if not args.domain:
args.domain = getConfigParam(baseDir, 'domain')
domain = ''
if args.domain:
domain = args.domain
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
dotGraph = instancesGraph(baseDir, args.socnet,
proxyType, args.port,
httpPrefix, debug,
__version__, args.language)
__version__, args.language,
signingPrivateKeyPem)
try:
with open('socnet.dot', 'w+') as fp:
fp.write(dotGraph)
@ -838,6 +883,12 @@ if args.socnet:
sys.exit()
if args.postsraw:
if not args.domain:
originDomain = getConfigParam(baseDir, 'domain')
else:
originDomain = args.domain
if debug:
print('originDomain: ' + str(originDomain))
if '@' not in args.postsraw:
print('Syntax: --postsraw nickname@domain')
sys.exit()
@ -854,9 +905,11 @@ if args.postsraw:
proxyType = 'gnunet'
if not args.language:
args.language = 'en'
signingPrivateKeyPem = getInstanceActorKey(baseDir, originDomain)
getPublicPostsOfPerson(baseDir, nickname, domain, False, False,
proxyType, args.port, httpPrefix, debug,
__version__, args.language)
__version__, args.language,
signingPrivateKeyPem, originDomain)
sys.exit()
if args.json:
@ -865,8 +918,20 @@ if args.json:
asHeader = {
'Accept': 'application/ld+json; profile="' + profileStr + '"'
}
testJson = getJson(session, args.json, asHeader, None,
debug, __version__, httpPrefix, None)
if not args.domain:
args.domain = getConfigParam(baseDir, 'domain')
domain = ''
if args.domain:
domain = args.domain
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
if debug:
print('baseDir: ' + str(baseDir))
if signingPrivateKeyPem:
print('Obtained instance actor signing key')
else:
print('Did not obtain instance actor key for ' + domain)
testJson = getJson(signingPrivateKeyPem, session, args.json, asHeader,
None, debug, __version__, httpPrefix, domain)
pprint(testJson)
sys.exit()
@ -1075,6 +1140,11 @@ if args.approve:
postLog = []
cachedWebfingers = {}
personCache = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
manualApproveFollowRequest(session, baseDir,
httpPrefix,
args.nickname, domain, port,
@ -1082,7 +1152,8 @@ if args.approve:
federationList,
sendThreads, postLog,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
sys.exit()
if args.deny:
@ -1097,6 +1168,11 @@ if args.deny:
postLog = []
cachedWebfingers = {}
personCache = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
manualDenyFollowRequest(session, baseDir,
httpPrefix,
args.nickname, domain, port,
@ -1104,7 +1180,8 @@ if args.deny:
federationList,
sendThreads, postLog,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
sys.exit()
if args.followerspending:
@ -1184,9 +1261,14 @@ if args.message:
replyTo = args.replyto
followersOnly = False
isArticle = False
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending post to ' + args.sendto)
sendPostViaServer(__version__,
sendPostViaServer(signingPrivateKeyPem, __version__,
baseDir, session, args.nickname, args.password,
domain, port,
toNickname, toDomain, toPort, ccUrl,
@ -1216,13 +1298,18 @@ if args.announce:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending announce/repeat of ' + args.announce)
sendAnnounceViaServer(baseDir, session, args.nickname, args.password,
domain, port,
httpPrefix, args.announce,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1255,13 +1342,18 @@ if args.box:
args.port = 80
elif args.gnunet:
proxyType = 'gnunet'
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
session = createSession(proxyType)
boxJson = c2sBoxJson(baseDir, session,
args.nickname, args.password,
domain, port, httpPrefix,
args.box, args.pageNumber,
args.debug)
args.debug, signingPrivateKeyPem)
if boxJson:
pprint(boxJson)
else:
@ -1311,6 +1403,11 @@ if args.itemName:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending shared item: ' + args.itemName)
sendShareViaServer(baseDir, session,
@ -1327,7 +1424,8 @@ if args.itemName:
args.duration,
cachedWebfingers, personCache,
debug, __version__,
args.itemPrice, args.itemCurrency)
args.itemPrice, args.itemCurrency,
signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1348,6 +1446,11 @@ if args.undoItemName:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending undo of shared item: ' + args.undoItemName)
sendUndoShareViaServer(baseDir, session,
@ -1356,7 +1459,7 @@ if args.undoItemName:
httpPrefix,
args.undoItemName,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1405,6 +1508,11 @@ if args.wantedItemName:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending wanted item: ' + args.wantedItemName)
sendWantedViaServer(baseDir, session,
@ -1421,7 +1529,8 @@ if args.wantedItemName:
args.duration,
cachedWebfingers, personCache,
debug, __version__,
args.itemPrice, args.itemCurrency)
args.itemPrice, args.itemCurrency,
signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1442,6 +1551,11 @@ if args.undoWantedItemName:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending undo of wanted item: ' + args.undoWantedItemName)
sendUndoWantedViaServer(baseDir, session,
@ -1450,7 +1564,7 @@ if args.undoWantedItemName:
httpPrefix,
args.undoWantedItemName,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1471,6 +1585,11 @@ if args.like:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending like of ' + args.like)
sendLikeViaServer(baseDir, session,
@ -1478,7 +1597,7 @@ if args.like:
domain, port,
httpPrefix, args.like,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1499,6 +1618,11 @@ if args.undolike:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending undo like of ' + args.undolike)
sendUndoLikeViaServer(baseDir, session,
@ -1506,7 +1630,8 @@ if args.undolike:
domain, port,
httpPrefix, args.undolike,
cachedWebfingers, personCache,
True, __version__)
True, __version__,
signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1527,6 +1652,11 @@ if args.bookmark:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending bookmark of ' + args.bookmark)
sendBookmarkViaServer(baseDir, session,
@ -1534,7 +1664,8 @@ if args.bookmark:
domain, port,
httpPrefix, args.bookmark,
cachedWebfingers, personCache,
True, __version__)
True, __version__,
signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1555,6 +1686,11 @@ if args.unbookmark:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending undo bookmark of ' + args.unbookmark)
sendUndoBookmarkViaServer(baseDir, session,
@ -1562,7 +1698,7 @@ if args.unbookmark:
domain, port,
httpPrefix, args.unbookmark,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1583,6 +1719,11 @@ if args.delete:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending delete request of ' + args.delete)
sendDeleteViaServer(baseDir, session,
@ -1590,7 +1731,7 @@ if args.delete:
domain, port,
httpPrefix, args.delete,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -1623,6 +1764,11 @@ if args.follow:
followHttpPrefix = httpPrefix
if args.follow.startswith('https'):
followHttpPrefix = 'https'
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
sendFollowRequestViaServer(baseDir, session,
args.nickname, args.password,
@ -1630,7 +1776,7 @@ if args.follow:
followNickname, followDomain, followPort,
httpPrefix,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__, signingPrivateKeyPem)
for t in range(20):
time.sleep(1)
# TODO some method to know if it worked
@ -1664,6 +1810,11 @@ if args.unfollow:
followHttpPrefix = httpPrefix
if args.follow.startswith('https'):
followHttpPrefix = 'https'
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
sendUnfollowRequestViaServer(baseDir, session,
args.nickname, args.password,
@ -1671,7 +1822,7 @@ if args.unfollow:
followNickname, followDomain, followPort,
httpPrefix,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__, signingPrivateKeyPem)
for t in range(20):
time.sleep(1)
# TODO some method to know if it worked
@ -1694,6 +1845,11 @@ if args.followingList:
personCache = {}
cachedWebfingers = {}
followHttpPrefix = httpPrefix
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
followingJson = \
getFollowingViaServer(baseDir, session,
@ -1701,7 +1857,7 @@ if args.followingList:
domain, port,
httpPrefix, args.pageNumber,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__, signingPrivateKeyPem)
if followingJson:
pprint(followingJson)
sys.exit()
@ -1722,6 +1878,11 @@ if args.followersList:
personCache = {}
cachedWebfingers = {}
followHttpPrefix = httpPrefix
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
followersJson = \
getFollowersViaServer(baseDir, session,
@ -1729,7 +1890,8 @@ if args.followersList:
domain, port,
httpPrefix, args.pageNumber,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__,
signingPrivateKeyPem)
if followersJson:
pprint(followersJson)
sys.exit()
@ -1750,6 +1912,11 @@ if args.followRequestsList:
personCache = {}
cachedWebfingers = {}
followHttpPrefix = httpPrefix
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
followRequestsJson = \
getFollowRequestsViaServer(baseDir, session,
@ -1757,7 +1924,7 @@ if args.followRequestsList:
domain, port,
httpPrefix, args.pageNumber,
cachedWebfingers, personCache,
debug, __version__)
debug, __version__, signingPrivateKeyPem)
if followRequestsJson:
pprint(followRequestsJson)
sys.exit()
@ -1797,9 +1964,14 @@ if args.migrations:
httpPrefix = 'https'
port = 443
session = createSession(proxyType)
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
ctr = migrateAccounts(baseDir, session,
httpPrefix, cachedWebfingers,
True)
True, signingPrivateKeyPem)
if ctr == 0:
print('No followed accounts have moved')
else:
@ -1807,7 +1979,17 @@ if args.migrations:
sys.exit()
if args.actor:
getActorJson(args.domain, args.actor, args.http, args.gnunet, debug)
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
if debug:
print('baseDir: ' + str(baseDir))
if signingPrivateKeyPem:
print('Obtained instance actor signing key')
else:
print('Did not obtain instance actor key for ' + domain)
getActorJson(domain, args.actor, args.http, args.gnunet,
debug, False, signingPrivateKeyPem)
sys.exit()
if args.followers:
@ -1882,10 +2064,17 @@ if args.followers:
if nickname == 'inbox':
nickname = domain
hostDomain = None
if args.domain:
hostDomain = args.domain
handle = nickname + '@' + domain
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
wfRequest = webfingerHandle(session, handle,
httpPrefix, cachedWebfingers,
None, __version__, debug, False)
hostDomain, __version__, debug, False,
signingPrivateKeyPem)
if not wfRequest:
print('Unable to webfinger ' + handle)
sys.exit()
@ -1927,9 +2116,12 @@ if args.followers:
asHeader = {
'Accept': 'application/ld+json; profile="' + profileStr + '"'
}
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
followersList = \
downloadFollowCollection('followers', session,
downloadFollowCollection(signingPrivateKeyPem,
'followers', session,
httpPrefix, personUrl, 1, 3)
if followersList:
for actor in followersList:
@ -2179,6 +2371,11 @@ if args.skill:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending ' + args.skill + ' skill level ' +
str(args.skillLevelPercent) + ' for ' + nickname)
@ -2188,7 +2385,7 @@ if args.skill:
httpPrefix,
args.skill, args.skillLevelPercent,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -2209,6 +2406,11 @@ if args.availability:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending availability status of ' + nickname +
' as ' + args.availability)
@ -2217,7 +2419,7 @@ if args.availability:
httpPrefix,
args.availability,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -2318,13 +2520,18 @@ if args.block:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending block of ' + args.block)
sendBlockViaServer(baseDir, session, nickname, args.password,
domain, port,
httpPrefix, args.block,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -2345,13 +2552,18 @@ if args.mute:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending mute of ' + args.mute)
sendMuteViaServer(baseDir, session, nickname, args.password,
domain, port,
httpPrefix, args.mute,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -2372,13 +2584,18 @@ if args.unmute:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending undo mute of ' + args.unmute)
sendUndoMuteViaServer(baseDir, session, nickname, args.password,
domain, port,
httpPrefix, args.unmute,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -2411,13 +2628,18 @@ if args.unblock:
session = createSession(proxyType)
personCache = {}
cachedWebfingers = {}
if not domain:
domain = getConfigParam(baseDir, 'domain')
signingPrivateKeyPem = None
if args.secureMode:
signingPrivateKeyPem = getInstanceActorKey(baseDir, domain)
print('Sending undo block of ' + args.unblock)
sendUndoBlockViaServer(baseDir, session, nickname, args.password,
domain, port,
httpPrefix, args.unblock,
cachedWebfingers, personCache,
True, __version__)
True, __version__, signingPrivateKeyPem)
for i in range(10):
# TODO detect send success/fail
time.sleep(1)
@ -2803,6 +3025,15 @@ if YTDomain:
if '.' in YTDomain:
args.YTReplacementDomain = YTDomain
twitterDomain = getConfigParam(baseDir, 'twitterdomain')
if twitterDomain:
if '://' in twitterDomain:
twitterDomain = twitterDomain.split('://')[1]
if '/' in twitterDomain:
twitterDomain = twitterDomain.split('/')[0]
if '.' in twitterDomain:
args.twitterReplacementDomain = twitterDomain
if setTheme(baseDir, themeName, domain,
args.allowLocalNetworkAccess, args.language):
print('Theme set to ' + themeName)
@ -2833,7 +3064,8 @@ if args.defaultCurrency:
print('Default currency set to ' + args.defaultCurrency)
if __name__ == "__main__":
runDaemon(args.lowBandwidth, args.maxLikeCount,
runDaemon(args.defaultReplyIntervalHours,
args.lowBandwidth, args.maxLikeCount,
sharedItemsFederatedDomains,
userAgentsBlocked,
args.logLoginFailures,
@ -2869,9 +3101,10 @@ if __name__ == "__main__":
instanceId, args.client, baseDir,
domain, onionDomain, i2pDomain,
args.YTReplacementDomain,
args.twitterReplacementDomain,
port, proxyPort, httpPrefix,
federationList, args.maxMentions,
args.maxEmoji, args.authenticatedFetch,
args.maxEmoji, args.secureMode,
proxyType, args.maxReplies,
args.domainMaxPostsPerDay,
args.accountMaxPostsPerDay,

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "RSS Feeds"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Moderation"

111
follow.py
View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"
@ -211,6 +211,12 @@ def isFollowerOfPerson(baseDir: str, nickname: str, domain: str,
followerNickname: str, followerDomain: str) -> bool:
"""is the given nickname a follower of followerNickname?
"""
if not followerDomain:
print('No followerDomain')
return False
if not followerNickname:
print('No followerNickname for ' + followerDomain)
return False
domain = removeDomainPort(domain)
followersFile = acctDir(baseDir, nickname, domain) + '/followers.txt'
if not os.path.isfile(followersFile):
@ -308,7 +314,10 @@ def clearFollows(baseDir: str, nickname: str, domain: str,
os.mkdir(baseDir + '/accounts/' + handle)
filename = baseDir + '/accounts/' + handle + '/' + followFile
if os.path.isfile(filename):
os.remove(filename)
try:
os.remove(filename)
except BaseException:
pass
def clearFollowers(baseDir: str, nickname: str, domain: str) -> None:
@ -631,7 +640,8 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
cachedWebfingers: {}, personCache: {},
messageJson: {}, federationList: [],
debug: bool, projectVersion: str,
maxFollowers: int, onionDomain: str) -> bool:
maxFollowers: int, onionDomain: str,
signingPrivateKeyPem: str) -> bool:
"""Receives a follow request within the POST section of HTTPServer
"""
if not messageJson['type'].startswith('Follow'):
@ -743,7 +753,8 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
print('Obtaining the following actor: ' + messageJson['actor'])
if not getPersonPubKey(baseDir, session, messageJson['actor'],
personCache, debug, projectVersion,
httpPrefix, domainToFollow, onionDomain):
httpPrefix, domainToFollow, onionDomain,
signingPrivateKeyPem):
if debug:
print('Unable to obtain following actor: ' +
messageJson['actor'])
@ -779,7 +790,8 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
print('Obtaining the following actor: ' + messageJson['actor'])
if not getPersonPubKey(baseDir, session, messageJson['actor'],
personCache, debug, projectVersion,
httpPrefix, domainToFollow, onionDomain):
httpPrefix, domainToFollow, onionDomain,
signingPrivateKeyPem):
if debug:
print('Unable to obtain following actor: ' +
messageJson['actor'])
@ -824,7 +836,8 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
messageJson['actor'], federationList,
messageJson, sendThreads, postLog,
cachedWebfingers, personCache,
debug, projectVersion, True)
debug, projectVersion, True,
signingPrivateKeyPem)
def followedAccountAccepts(session, baseDir: str, httpPrefix: str,
@ -835,7 +848,8 @@ def followedAccountAccepts(session, baseDir: str, httpPrefix: str,
followJson: {}, sendThreads: [], postLog: [],
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str,
removeFollowActivity: bool):
removeFollowActivity: bool,
signingPrivateKeyPem: str):
"""The person receiving a follow request accepts the new follower
and sends back an Accept activity
"""
@ -884,7 +898,8 @@ def followedAccountAccepts(session, baseDir: str, httpPrefix: str,
federationList,
sendThreads, postLog, cachedWebfingers,
personCache, debug, projectVersion, None,
groupAccount)
groupAccount, signingPrivateKeyPem,
7856837)
def followedAccountRejects(session, baseDir: str, httpPrefix: str,
@ -894,7 +909,8 @@ def followedAccountRejects(session, baseDir: str, httpPrefix: str,
federationList: [],
sendThreads: [], postLog: [],
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str):
debug: bool, projectVersion: str,
signingPrivateKeyPem: str):
"""The person receiving a follow request rejects the new follower
and sends back a Reject activity
"""
@ -949,7 +965,8 @@ def followedAccountRejects(session, baseDir: str, httpPrefix: str,
federationList,
sendThreads, postLog, cachedWebfingers,
personCache, debug, projectVersion, None,
groupAccount)
groupAccount, signingPrivateKeyPem,
6393063)
def sendFollowRequest(session, baseDir: str,
@ -960,9 +977,12 @@ def sendFollowRequest(session, baseDir: str,
clientToServer: bool, federationList: [],
sendThreads: [], postLog: [], cachedWebfingers: {},
personCache: {}, debug: bool,
projectVersion: str) -> {}:
projectVersion: str, signingPrivateKeyPem: str) -> {}:
"""Gets the json object for sending a follow request
"""
if not signingPrivateKeyPem:
print('WARN: follow request without signing key')
if not domainPermitted(followDomain, federationList):
print('You are not permitted to follow the domain ' + followDomain)
return None
@ -1016,7 +1036,8 @@ def sendFollowRequest(session, baseDir: str,
httpPrefix, True, clientToServer,
federationList,
sendThreads, postLog, cachedWebfingers, personCache,
debug, projectVersion, None, groupAccount)
debug, projectVersion, None, groupAccount,
signingPrivateKeyPem, 8234389)
return newFollowJson
@ -1028,7 +1049,8 @@ def sendFollowRequestViaServer(baseDir: str, session,
followPort: int,
httpPrefix: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a follow request via c2s
"""
if not session:
@ -1057,7 +1079,8 @@ def sendFollowRequestViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: follow request webfinger failed for ' + handle)
@ -1070,11 +1093,13 @@ def sendFollowRequestViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
projectVersion, httpPrefix, fromNickname,
fromDomain, postToBox, 52025)
displayName, _) = getPersonBox(signingPrivateKeyPem, originDomain,
baseDir, session, wfRequest, personCache,
projectVersion, httpPrefix, fromNickname,
fromDomain, postToBox, 52025)
if not inboxUrl:
if debug:
@ -1114,7 +1139,8 @@ def sendUnfollowRequestViaServer(baseDir: str, session,
followPort: int,
httpPrefix: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a unfollow request via c2s
"""
if not session:
@ -1147,7 +1173,8 @@ def sendUnfollowRequestViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: unfollow webfinger failed for ' + handle)
@ -1160,14 +1187,16 @@ def sendUnfollowRequestViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session,
wfRequest, personCache,
projectVersion, httpPrefix,
fromNickname,
fromDomain, postToBox,
76536)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session,
wfRequest, personCache,
projectVersion, httpPrefix,
fromNickname,
fromDomain, postToBox,
76536)
if not inboxUrl:
if debug:
@ -1205,7 +1234,8 @@ def getFollowingViaServer(baseDir: str, session,
domain: str, port: int,
httpPrefix: str, pageNumber: int,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Gets a page from the following collection as json
"""
if not session:
@ -1227,9 +1257,8 @@ def getFollowingViaServer(baseDir: str, session,
pageNumber = 1
url = followActor + '/following?page=' + str(pageNumber)
followingJson = \
getJson(session, url, headers, {}, debug,
__version__, httpPrefix,
domain, 10, True)
getJson(signingPrivateKeyPem, session, url, headers, {}, debug,
__version__, httpPrefix, domain, 10, True)
if not followingJson:
if debug:
print('DEBUG: GET following list failed for c2s to ' + url)
@ -1246,7 +1275,8 @@ def getFollowersViaServer(baseDir: str, session,
domain: str, port: int,
httpPrefix: str, pageNumber: int,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Gets a page from the followers collection as json
"""
if not session:
@ -1268,7 +1298,7 @@ def getFollowersViaServer(baseDir: str, session,
pageNumber = 1
url = followActor + '/followers?page=' + str(pageNumber)
followersJson = \
getJson(session, url, headers, {}, debug,
getJson(signingPrivateKeyPem, session, url, headers, {}, debug,
__version__, httpPrefix, domain, 10, True)
if not followersJson:
if debug:
@ -1286,7 +1316,8 @@ def getFollowRequestsViaServer(baseDir: str, session,
domain: str, port: int,
httpPrefix: str, pageNumber: int,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Gets a page from the follow requests collection as json
"""
if not session:
@ -1308,7 +1339,7 @@ def getFollowRequestsViaServer(baseDir: str, session,
pageNumber = 1
url = followActor + '/followrequests?page=' + str(pageNumber)
followersJson = \
getJson(session, url, headers, {}, debug,
getJson(signingPrivateKeyPem, session, url, headers, {}, debug,
__version__, httpPrefix, domain, 10, True)
if not followersJson:
if debug:
@ -1326,7 +1357,8 @@ def approveFollowRequestViaServer(baseDir: str, session,
domain: str, port: int,
httpPrefix: str, approveHandle: int,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> str:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> str:
"""Approves a follow request
This is not exactly via c2s though. It simulates pressing the Approve
button on the web interface
@ -1348,7 +1380,7 @@ def approveFollowRequestViaServer(baseDir: str, session,
url = actor + '/followapprove=' + approveHandle
approveHtml = \
getJson(session, url, headers, {}, debug,
getJson(signingPrivateKeyPem, session, url, headers, {}, debug,
__version__, httpPrefix, domain, 10, True)
if not approveHtml:
if debug:
@ -1366,7 +1398,8 @@ def denyFollowRequestViaServer(baseDir: str, session,
domain: str, port: int,
httpPrefix: str, denyHandle: int,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> str:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> str:
"""Denies a follow request
This is not exactly via c2s though. It simulates pressing the Deny
button on the web interface
@ -1388,7 +1421,7 @@ def denyFollowRequestViaServer(baseDir: str, session,
url = actor + '/followdeny=' + denyHandle
denyHtml = \
getJson(session, url, headers, {}, debug,
getJson(signingPrivateKeyPem, session, url, headers, {}, debug,
__version__, httpPrefix, domain, 10, True)
if not denyHtml:
if debug:

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Calendar"

View File

@ -62,8 +62,6 @@ Create a web server configuration:
And paste the following:
proxy_cache_path /var/www/cache levels=1:2 keys_zone=my_cache:10m max_size=10g
inactive=60m use_temp_path=off;
server {
listen 80;
listen [::]:80;
@ -118,8 +116,6 @@ And paste the following:
location / {
proxy_http_version 1.1;
client_max_body_size 31M;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forward-For $proxy_add_x_forwarded_for;
@ -135,10 +131,6 @@ And paste the following:
proxy_redirect off;
proxy_request_buffering off;
proxy_buffering off;
location ~ ^/accounts/(avatars|headers)/(.*).(png|jpg|gif|webp|svg) {
expires 1d;
proxy_pass http://localhost:7156;
}
proxy_pass http://localhost:7156;
}
}
@ -146,7 +138,6 @@ And paste the following:
Enable the site:
ln -s /etc/nginx/sites-available/YOUR_DOMAIN /etc/nginx/sites-enabled/
mkdir /var/www/cache
Forward port 443 from your internet router to your server. If you have dynamic DNS make sure its configured. Add a TLS certificate:

2
git.py
View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"

View File

@ -4,7 +4,7 @@ __credits__ = ['lamia']
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Security"
@ -24,6 +24,7 @@ from time import gmtime, strftime
import datetime
from utils import getFullDomain
from utils import getSHA256
from utils import getSHA512
from utils import localActorUrl
@ -39,7 +40,8 @@ def signPostHeaders(dateStr: str, privateKeyPem: str,
toDomain: str, toPort: int,
path: str,
httpPrefix: str,
messageBodyJsonStr: str) -> str:
messageBodyJsonStr: str,
contentType: str) -> str:
"""Returns a raw signature string that can be plugged into a header and
used to verify the authenticity of an HTTP transmission.
"""
@ -49,13 +51,18 @@ def signPostHeaders(dateStr: str, privateKeyPem: str,
if not dateStr:
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
keyID = localActorUrl(httpPrefix, nickname, domain) + '#main-key'
if nickname != domain and nickname.lower() != 'actor':
keyID = localActorUrl(httpPrefix, nickname, domain)
else:
# instance actor
keyID = httpPrefix + '://' + domain + '/actor'
keyID += '#main-key'
if not messageBodyJsonStr:
headers = {
'(request-target)': f'post {path}',
'(request-target)': f'get {path}',
'host': toDomain,
'date': dateStr,
'content-type': 'application/json'
'accept': contentType
}
else:
bodyDigest = messageContentDigest(messageBodyJsonStr)
@ -78,7 +85,8 @@ def signPostHeaders(dateStr: str, privateKeyPem: str,
signedHeaderText = ''
for headerKey in signedHeaderKeys:
signedHeaderText += f'{headerKey}: {headers[headerKey]}\n'
signedHeaderText = signedHeaderText.strip()
# strip the trailing linefeed
signedHeaderText = signedHeaderText.rstrip('\n')
# signedHeaderText.encode('ascii') matches
headerDigest = getSHA256(signedHeaderText.encode('ascii'))
# print('headerDigest2: ' + str(headerDigest))
@ -155,11 +163,18 @@ def signPostHeadersNew(dateStr: str, privateKeyPem: str,
for headerKey in signedHeaderKeys:
signedHeaderText += f'{headerKey}: {headers[headerKey]}\n'
signedHeaderText = signedHeaderText.strip()
headerDigest = getSHA256(signedHeaderText.encode('ascii'))
# Sign the digest. Potentially other signing algorithms can be added here.
signature = ''
if algorithm == 'rsa-sha256':
if algorithm == 'rsa-sha512':
headerDigest = getSHA512(signedHeaderText.encode('ascii'))
rawSignature = key.sign(headerDigest,
padding.PKCS1v15(),
hazutils.Prehashed(hashes.SHA512()))
signature = base64.b64encode(rawSignature).decode('ascii')
else:
# default sha256
headerDigest = getSHA256(signedHeaderText.encode('ascii'))
rawSignature = key.sign(headerDigest,
padding.PKCS1v15(),
hazutils.Prehashed(hashes.SHA256()))
@ -184,27 +199,35 @@ def signPostHeadersNew(dateStr: str, privateKeyPem: str,
return signatureIndexHeader, signatureHeader
def createSignedHeader(privateKeyPem: str, nickname: str,
def createSignedHeader(dateStr: str, privateKeyPem: str, nickname: str,
domain: str, port: int,
toDomain: str, toPort: int,
path: str, httpPrefix: str, withDigest: bool,
messageBodyJsonStr: str) -> {}:
messageBodyJsonStr: str,
contentType: str) -> {}:
"""Note that the domain is the destination, not the sender
"""
contentType = 'application/activity+json'
headerDomain = getFullDomain(toDomain, toPort)
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
# if no date is given then create one
if not dateStr:
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
# Content-Type or Accept header
if not contentType:
contentType = 'application/activity+json'
if not withDigest:
headers = {
'(request-target)': f'post {path}',
'(request-target)': f'get {path}',
'host': headerDomain,
'date': dateStr
'date': dateStr,
'accept': contentType
}
signatureHeader = \
signPostHeaders(dateStr, privateKeyPem, nickname,
domain, port, toDomain, toPort,
path, httpPrefix, None)
path, httpPrefix, None, contentType)
else:
bodyDigest = messageContentDigest(messageBodyJsonStr)
contentLength = len(messageBodyJsonStr)
@ -220,7 +243,8 @@ def createSignedHeader(privateKeyPem: str, nickname: str,
signPostHeaders(dateStr, privateKeyPem, nickname,
domain, port,
toDomain, toPort,
path, httpPrefix, messageBodyJsonStr)
path, httpPrefix, messageBodyJsonStr,
contentType)
headers['signature'] = signatureHeader
return headers
@ -302,9 +326,13 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
for k, v in [i.split('=', 1) for i in signatureHeader.split(',')]
}
if debug:
print('signatureDict: ' + str(signatureDict))
# Unpack the signed headers and set values based on current headers and
# body (if a digest was included)
signedHeaderList = []
algorithm = 'rsa-sha256'
for signedHeader in signatureDict[requestTargetKey].split(fieldSep2):
signedHeader = signedHeader.strip()
if debug:
@ -323,6 +351,9 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
# if ')' in appendStr:
# appendStr = appendStr.split(')')[0]
signedHeaderList.append(appendStr)
elif signedHeader == 'algorithm':
if headers.get(signedHeader):
algorithm = headers[signedHeader]
elif signedHeader == 'digest':
if messageBodyDigest:
bodyDigest = messageBodyDigest
@ -333,19 +364,17 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
if headers.get(signedHeader):
appendStr = f'content-length: {headers[signedHeader]}'
signedHeaderList.append(appendStr)
elif headers.get('Content-Length'):
contentLength = headers['Content-Length']
signedHeaderList.append(f'content-length: {contentLength}')
elif headers.get('Content-length'):
contentLength = headers['Content-length']
appendStr = f'content-length: {contentLength}'
signedHeaderList.append(appendStr)
else:
if headers.get('Content-Length'):
contentLength = headers['Content-Length']
signedHeaderList.append(f'content-length: {contentLength}')
else:
if headers.get('Content-length'):
contentLength = headers['Content-length']
appendStr = f'content-length: {contentLength}'
signedHeaderList.append(appendStr)
else:
if debug:
print('DEBUG: verifyPostHeaders ' + signedHeader +
' not found in ' + str(headers))
if debug:
print('DEBUG: verifyPostHeaders ' + signedHeader +
' not found in ' + str(headers))
else:
if headers.get(signedHeader):
if signedHeader == 'date' and not noRecencyCheck:
@ -395,11 +424,10 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
signedHeaderList.append(
f'{signedHeader}: {headers[signedHeaderCap]}')
if debug:
print('DEBUG: signedHeaderList: ' + str(signedHeaderList))
# Now we have our header data digest
signedHeaderText = '\n'.join(signedHeaderList)
headerDigest = getSHA256(signedHeaderText.encode('ascii'))
if debug:
print('signedHeaderText:\n' + signedHeaderText + 'END')
# Get the signature, verify with public key, return result
signature = None
@ -415,15 +443,29 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
else:
# Original Mastodon signature
signature = base64.b64decode(signatureDict['signature'])
if debug:
print('signature: ' + algorithm + ' ' +
signatureDict['signature'])
# If extra signing algorithms need to be added then do it here
if algorithm == 'rsa-sha256':
headerDigest = getSHA256(signedHeaderText.encode('ascii'))
paddingStr = padding.PKCS1v15()
alg = hazutils.Prehashed(hashes.SHA256())
elif algorithm == 'rsa-sha512':
headerDigest = getSHA512(signedHeaderText.encode('ascii'))
paddingStr = padding.PKCS1v15()
alg = hazutils.Prehashed(hashes.SHA512())
else:
print('Unknown http signature algorithm: ' + algorithm)
paddingStr = padding.PKCS1v15()
alg = hazutils.Prehashed(hashes.SHA256())
headerDigest = ''
try:
pubkey.verify(
signature,
headerDigest,
padding.PKCS1v15(),
hazutils.Prehashed(hashes.SHA256()))
pubkey.verify(signature, headerDigest, paddingStr, alg)
return True
except BaseException:
if debug:
print('DEBUG: verifyPostHeaders pkcs1_15 verify failure')
return False
return False

View File

@ -5,7 +5,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.1.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
import base64, hashlib, sys

Binary file not shown.

After

Width:  |  Height:  |  Size: 131 KiB

538
inbox.py

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"

127
like.py
View File

@ -3,10 +3,12 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"
import os
from pprint import pprint
from utils import removeDomainPort
from utils import hasObjectDict
from utils import hasUsersPath
@ -16,10 +18,13 @@ from utils import urlPermitted
from utils import getNicknameFromActor
from utils import getDomainFromActor
from utils import locatePost
from utils import updateLikesCollection
from utils import undoLikesCollectionEntry
from utils import hasGroupType
from utils import localActorUrl
from utils import loadJson
from utils import saveJson
from utils import removePostFromCache
from utils import getCachedPostFilename
from posts import sendSignedJson
from session import postJson
from webfinger import webfingerHandle
@ -62,7 +67,8 @@ def _like(recentPostsCache: {},
clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a like
actor is the person doing the liking
'to' might be a specific person (actor) whose post was liked
@ -122,7 +128,8 @@ def _like(recentPostsCache: {},
'https://www.w3.org/ns/activitystreams#Public',
httpPrefix, True, clientToServer, federationList,
sendThreads, postLog, cachedWebfingers, personCache,
debug, projectVersion, None, groupAccount)
debug, projectVersion, None, groupAccount,
signingPrivateKeyPem, 7367374)
return newLikeJson
@ -135,7 +142,8 @@ def likePost(recentPostsCache: {},
likeStatusNumber: int, clientToServer: bool,
sendThreads: [], postLog: [],
personCache: {}, cachedWebfingers: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Likes a given status post. This is only used by unit tests
"""
likeDomain = getFullDomain(likeDomain, likePort)
@ -147,7 +155,7 @@ def likePost(recentPostsCache: {},
session, baseDir, federationList, nickname, domain, port,
ccList, httpPrefix, objectUrl, actorLiked, clientToServer,
sendThreads, postLog, personCache, cachedWebfingers,
debug, projectVersion)
debug, projectVersion, signingPrivateKeyPem)
def sendLikeViaServer(baseDir: str, session,
@ -155,7 +163,8 @@ def sendLikeViaServer(baseDir: str, session,
fromDomain: str, fromPort: int,
httpPrefix: str, likeUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a like via c2s
"""
if not session:
@ -178,7 +187,8 @@ def sendLikeViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: like webfinger failed for ' + handle)
@ -191,12 +201,15 @@ def sendLikeViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
fromNickname, fromDomain,
postToBox, 72873)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
fromNickname, fromDomain,
postToBox, 72873)
if not inboxUrl:
if debug:
@ -233,7 +246,8 @@ def sendUndoLikeViaServer(baseDir: str, session,
fromDomain: str, fromPort: int,
httpPrefix: str, likeUrl: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Undo a like via c2s
"""
if not session:
@ -260,7 +274,8 @@ def sendUndoLikeViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: unlike webfinger failed for ' + handle)
@ -274,12 +289,15 @@ def sendUndoLikeViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
72625)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
72625)
if not inboxUrl:
if debug:
@ -398,3 +416,68 @@ def outboxUndoLike(recentPostsCache: {},
domain, debug)
if debug:
print('DEBUG: post undo liked via c2s - ' + postFilename)
def updateLikesCollection(recentPostsCache: {},
baseDir: str, postFilename: str,
objectUrl: str, actor: str,
nickname: str, domain: str, debug: bool) -> None:
"""Updates the likes collection within a post
"""
postJsonObject = loadJson(postFilename)
if not postJsonObject:
return
# remove any cached version of this post so that the
# like icon is changed
removePostFromCache(postJsonObject, recentPostsCache)
cachedPostFilename = getCachedPostFilename(baseDir, nickname,
domain, postJsonObject)
if cachedPostFilename:
if os.path.isfile(cachedPostFilename):
try:
os.remove(cachedPostFilename)
except BaseException:
pass
if not hasObjectDict(postJsonObject):
if debug:
pprint(postJsonObject)
print('DEBUG: post ' + objectUrl + ' has no object')
return
if not objectUrl.endswith('/likes'):
objectUrl = objectUrl + '/likes'
if not postJsonObject['object'].get('likes'):
if debug:
print('DEBUG: Adding initial like to ' + objectUrl)
likesJson = {
"@context": "https://www.w3.org/ns/activitystreams",
'id': objectUrl,
'type': 'Collection',
"totalItems": 1,
'items': [{
'type': 'Like',
'actor': actor
}]
}
postJsonObject['object']['likes'] = likesJson
else:
if not postJsonObject['object']['likes'].get('items'):
postJsonObject['object']['likes']['items'] = []
for likeItem in postJsonObject['object']['likes']['items']:
if likeItem.get('actor'):
if likeItem['actor'] == actor:
# already liked
return
newLike = {
'type': 'Like',
'actor': actor
}
postJsonObject['object']['likes']['items'].append(newLike)
itlen = len(postJsonObject['object']['likes']['items'])
postJsonObject['object']['likes']['totalItems'] = itlen
if debug:
print('DEBUG: saving post with likes added')
pprint(postJsonObject)
saveJson(postJsonObject, postFilename)

View File

@ -5,7 +5,7 @@ __credits__ = ['Based on ' +
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Security"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"
@ -26,7 +26,8 @@ def manualDenyFollowRequest(session, baseDir: str,
sendThreads: [], postLog: [],
cachedWebfingers: {}, personCache: {},
debug: bool,
projectVersion: str) -> None:
projectVersion: str,
signingPrivateKeyPem: str) -> None:
"""Manually deny a follow request
"""
accountsDir = acctDir(baseDir, nickname, domain)
@ -60,7 +61,8 @@ def manualDenyFollowRequest(session, baseDir: str,
federationList,
sendThreads, postLog,
cachedWebfingers, personCache,
debug, projectVersion)
debug, projectVersion,
signingPrivateKeyPem)
print('Follow request from ' + denyHandle + ' was denied.')
@ -87,7 +89,8 @@ def manualApproveFollowRequest(session, baseDir: str,
sendThreads: [], postLog: [],
cachedWebfingers: {}, personCache: {},
debug: bool,
projectVersion: str) -> None:
projectVersion: str,
signingPrivateKeyPem: str) -> None:
"""Manually approve a follow request
"""
handle = nickname + '@' + domain
@ -176,7 +179,8 @@ def manualApproveFollowRequest(session, baseDir: str,
cachedWebfingers,
personCache,
debug,
projectVersion, False)
projectVersion, False,
signingPrivateKeyPem)
updateApprovedFollowers = True
else:
# this isn't the approved follow so it will remain
@ -218,6 +222,12 @@ def manualApproveFollowRequest(session, baseDir: str,
# remove the .follow file
if followActivityfilename:
if os.path.isfile(followActivityfilename):
os.remove(followActivityfilename)
try:
os.remove(followActivityfilename)
except BaseException:
pass
else:
os.remove(approveFollowsFilename + '.new')
try:
os.remove(approveFollowsFilename + '.new')
except BaseException:
pass

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Web Interface"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "API"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Timeline"
@ -28,10 +28,10 @@ from shutil import move
from city import spoofGeolocation
def replaceYouTube(postJsonObject: {}, replacementDomain: str,
systemLanguage: str) -> None:
"""Replace YouTube with a replacement domain
This denies Google some, but not all, tracking data
def _replaceSiloDomain(postJsonObject: {},
siloDomain: str, replacementDomain: str,
systemLanguage: str) -> None:
"""Replace a silo domain with a replacement domain
"""
if not replacementDomain:
return
@ -40,14 +40,32 @@ def replaceYouTube(postJsonObject: {}, replacementDomain: str,
if not postJsonObject['object'].get('content'):
return
contentStr = getBaseContentFromPost(postJsonObject, systemLanguage)
if 'www.youtube.com' not in contentStr:
if siloDomain not in contentStr:
return
contentStr = contentStr.replace('www.youtube.com', replacementDomain)
contentStr = contentStr.replace(siloDomain, replacementDomain)
postJsonObject['object']['content'] = contentStr
if postJsonObject['object'].get('contentMap'):
postJsonObject['object']['contentMap'][systemLanguage] = contentStr
def replaceYouTube(postJsonObject: {}, replacementDomain: str,
systemLanguage: str) -> None:
"""Replace YouTube with a replacement domain
This denies Google some, but not all, tracking data
"""
_replaceSiloDomain(postJsonObject, 'www.youtube.com',
replacementDomain, systemLanguage)
def replaceTwitter(postJsonObject: {}, replacementDomain: str,
systemLanguage: str) -> None:
"""Replace Twitter with a replacement domain
This allows you to view twitter posts without having a twitter account
"""
_replaceSiloDomain(postJsonObject, 'twitter.com',
replacementDomain, systemLanguage)
def _removeMetaData(imageFilename: str, outputFilename: str) -> None:
"""Attempts to do this with pure python didn't work well,
so better to use a dedicated tool if one is installed

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Metadata"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"
@ -23,7 +23,8 @@ from person import getActorJson
def _moveFollowingHandlesForAccount(baseDir: str, nickname: str, domain: str,
session,
httpPrefix: str, cachedWebfingers: {},
debug: bool) -> int:
debug: bool,
signingPrivateKeyPem: str) -> int:
"""Goes through all follows for an account and updates any that have moved
"""
ctr = 0
@ -38,14 +39,14 @@ def _moveFollowingHandlesForAccount(baseDir: str, nickname: str, domain: str,
_updateMovedHandle(baseDir, nickname, domain,
followHandle, session,
httpPrefix, cachedWebfingers,
debug)
debug, signingPrivateKeyPem)
return ctr
def _updateMovedHandle(baseDir: str, nickname: str, domain: str,
handle: str, session,
httpPrefix: str, cachedWebfingers: {},
debug: bool) -> int:
debug: bool, signingPrivateKeyPem: str) -> int:
"""Check if an account has moved, and if so then alter following.txt
for each account.
Returns 1 if moved, 0 otherwise
@ -59,7 +60,8 @@ def _updateMovedHandle(baseDir: str, nickname: str, domain: str,
handle = handle[1:]
wfRequest = webfingerHandle(session, handle,
httpPrefix, cachedWebfingers,
None, __version__, debug, False)
domain, __version__, debug, False,
signingPrivateKeyPem)
if not wfRequest:
print('updateMovedHandle unable to webfinger ' + handle)
return ctr
@ -83,7 +85,8 @@ def _updateMovedHandle(baseDir: str, nickname: str, domain: str,
if httpPrefix == 'gnunet':
gnunet = True
personJson = \
getActorJson(domain, personUrl, httpPrefix, gnunet, debug)
getActorJson(domain, personUrl, httpPrefix, gnunet, debug, False,
signingPrivateKeyPem)
if not personJson:
return ctr
if not personJson.get('movedTo'):
@ -172,7 +175,7 @@ def _updateMovedHandle(baseDir: str, nickname: str, domain: str,
def migrateAccounts(baseDir: str, session,
httpPrefix: str, cachedWebfingers: {},
debug: bool) -> int:
debug: bool, signingPrivateKeyPem: str) -> int:
"""If followed accounts change then this modifies the
following lists for each account accordingly.
Returns the number of accounts migrated
@ -188,6 +191,7 @@ def migrateAccounts(baseDir: str, session,
ctr += \
_moveFollowingHandlesForAccount(baseDir, nickname, domain,
session, httpPrefix,
cachedWebfingers, debug)
cachedWebfingers, debug,
signingPrivateKeyPem)
break
return ctr

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Web Interface Columns"
@ -526,6 +526,7 @@ def _convertRSStoActivityPub(baseDir: str, httpPrefix: str,
"""Converts rss items in a newswire into posts
"""
if not newswire:
print('No newswire to convert')
return
basePath = baseDir + '/accounts/news@' + domain + '/outbox'
@ -542,9 +543,18 @@ def _convertRSStoActivityPub(baseDir: str, httpPrefix: str,
dateStr = dateStr.replace(' ', 'T')
dateStr = dateStr.replace('+00:00', 'Z')
else:
dateStrWithOffset = \
datetime.datetime.strptime(dateStr, "%Y-%m-%d %H:%M:%S%z")
dateStr = dateStrWithOffset.strftime("%Y-%m-%dT%H:%M:%SZ")
try:
dateStrWithOffset = \
datetime.datetime.strptime(dateStr, "%Y-%m-%d %H:%M:%S%z")
except BaseException:
print('Newswire strptime failed ' + str(dateStr))
continue
try:
dateStr = dateStrWithOffset.strftime("%Y-%m-%dT%H:%M:%SZ")
except BaseException:
print('Newswire dateStrWithOffset failed ' +
str(dateStrWithOffset))
continue
statusNumber, published = getStatusNumber(dateStr)
newPostId = \
@ -702,7 +712,10 @@ def _convertRSStoActivityPub(baseDir: str, httpPrefix: str,
blog['object']['arrived'])
else:
if os.path.isfile(filename + '.arrived'):
os.remove(filename + '.arrived')
try:
os.remove(filename + '.arrived')
except BaseException:
pass
# setting the url here links to the activitypub object
# stored locally
@ -750,6 +763,7 @@ def runNewswireDaemon(baseDir: str, httpd,
print('Newswire daemon session established')
# try to update the feeds
print('Updating newswire feeds')
newNewswire = \
getDictFromNewswire(httpd.session, baseDir, domain,
httpd.maxNewswirePostsPerSource,
@ -761,16 +775,22 @@ def runNewswireDaemon(baseDir: str, httpd,
httpd.systemLanguage)
if not httpd.newswire:
print('Newswire feeds not updated')
if os.path.isfile(newswireStateFilename):
print('Loading newswire from file')
httpd.newswire = loadJson(newswireStateFilename)
print('Merging with previous newswire')
_mergeWithPreviousNewswire(httpd.newswire, newNewswire)
httpd.newswire = newNewswire
if newNewswire:
saveJson(httpd.newswire, newswireStateFilename)
print('Newswire updated')
else:
print('No new newswire')
print('Converting newswire to activitypub format')
_convertRSStoActivityPub(baseDir,
httpPrefix, domain, port,
newNewswire, translate,
@ -792,6 +812,7 @@ def runNewswireDaemon(baseDir: str, httpd,
archiveDir = baseDir + '/archive'
archiveSubdir = \
archiveDir + '/accounts/news@' + domain + '/outbox'
print('Archiving news posts')
archivePostsForPerson(httpPrefix, 'news',
domain, baseDir, 'outbox',
archiveSubdir,

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Web Interface Columns"
@ -192,9 +192,9 @@ def parseFeedDate(pubDate: str) -> str:
formats = ("%a, %d %b %Y %H:%M:%S %z",
"%a, %d %b %Y %H:%M:%S EST",
"%a, %d %b %Y %H:%M:%S UT",
"%a, %d %b %Y %H:%M:%S GMT",
"%Y-%m-%dT%H:%M:%SZ",
"%Y-%m-%dT%H:%M:%S%z")
publishedDate = None
for dateFormat in formats:
if ',' in pubDate and ',' not in dateFormat:
@ -207,6 +207,8 @@ def parseFeedDate(pubDate: str) -> str:
continue
if 'EST' not in pubDate and 'EST' in dateFormat:
continue
if 'GMT' not in pubDate and 'GMT' in dateFormat:
continue
if 'EST' in pubDate and 'EST' not in dateFormat:
continue
if 'UT' not in pubDate and 'UT' in dateFormat:
@ -218,8 +220,6 @@ def parseFeedDate(pubDate: str) -> str:
publishedDate = \
datetime.strptime(pubDate, dateFormat)
except BaseException:
print('WARN: unrecognized date format: ' +
pubDate + ' ' + dateFormat)
continue
if publishedDate:
@ -238,6 +238,8 @@ def parseFeedDate(pubDate: str) -> str:
pubDateStr = str(publishedDate)
if not pubDateStr.endswith('+00:00'):
pubDateStr += '+00:00'
else:
print('WARN: unrecognized date format: ' + pubDate)
return pubDateStr
@ -1028,7 +1030,10 @@ def _addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
else:
# remove the file if there is nothing to moderate
if os.path.isfile(newswireModerationFilename):
os.remove(newswireModerationFilename)
try:
os.remove(newswireModerationFilename)
except BaseException:
pass
def getDictFromNewswire(session, baseDir: str, domain: str,

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Calendar"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,95 @@
<?xml version="1.0"?>
<Ontology xmlns="http://www.w3.org/2002/07/owl#"
xml:base="http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
ontologyIRI="http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl">
<Prefix name="" IRI="http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#"/>
<Prefix name="cc" IRI="http://creativecommons.org/ns#"/>
<Prefix name="dc" IRI="http://purl.org/dc/terms/"/>
<Prefix name="owl" IRI="http://www.w3.org/2002/07/owl#"/>
<Prefix name="rdf" IRI="http://www.w3.org/1999/02/22-rdf-syntax-ns#"/>
<Prefix name="xml" IRI="http://www.w3.org/XML/1998/namespace"/>
<Prefix name="xsd" IRI="http://www.w3.org/2001/XMLSchema#"/>
<Prefix name="xsp" IRI="http://www.owl-ontologies.com/2005/08/07/xsp.owl#"/>
<Prefix name="foaf" IRI="http://xmlns.com/foaf/0.1/"/>
<Prefix name="rdfs" IRI="http://www.w3.org/2000/01/rdf-schema#"/>
<Prefix name="swrl" IRI="http://www.w3.org/2003/11/swrl#"/>
<Prefix name="vann" IRI="http://purl.org/vocab/vann/"/>
<Prefix name="swrlb" IRI="http://www.w3.org/2003/11/swrlb#"/>
<Prefix name="protege" IRI="http://protege.stanford.edu/plugins/owl/protege#"/>
<Import>http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl</Import>
<Import>http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl</Import>
<Import>http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl</Import>
<Annotation>
<AnnotationProperty abbreviatedIRI="cc:license"/>
<IRI>https://www.gnu.org/licenses/agpl-3.0.en.html</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:contributor"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#rachelA</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:contributor"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#simonL</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:creator"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#bernardC</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:description"/>
<Literal xml:lang="en">A common vocabulary for digital food platforms</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:issued"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#date">2018-05-28</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:modified"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#date">2019-10-21</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:publisher"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:title"/>
<Literal xml:lang="en">Data Food Consortium</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="vann:preferredNamespacePrefix"/>
<Literal>dfc</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<Literal xml:lang="en">A common vocabulary for digital food platforms</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="owl:versionInfo"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#decimal">4.0</Literal>
</Annotation>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#bernardC"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Organization"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#rachelA"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
</ClassAssertion>
</Ontology>
<!-- Generated by the OWL API (version 4.5.9.2019-02-01T07:24:44Z) https://github.com/owlcs/owlapi -->

View File

@ -0,0 +1,84 @@
<?xml version="1.0"?>
<rdf:RDF xmlns="http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#"
xml:base="http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:dc="http://purl.org/dc/terms/"
xmlns:owl="http://www.w3.org/2002/07/owl#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:xsp="http://www.owl-ontologies.com/2005/08/07/xsp.owl#"
xmlns:foaf="http://xmlns.com/foaf/0.1/"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:swrl="http://www.w3.org/2003/11/swrl#"
xmlns:vann="http://purl.org/vocab/vann/"
xmlns:swrlb="http://www.w3.org/2003/11/swrlb#"
xmlns:protege="http://protege.stanford.edu/plugins/owl/protege#">
<owl:Ontology rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl">
<rdf:type rdf:resource="http://purl.org/vocommons/voaf#Vocabulary"/>
<vann:preferredNamespacePrefix>dfc</vann:preferredNamespacePrefix>
<vann:preferredNamespaceUri>http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#</vann:preferredNamespaceUri>
<owl:imports rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl"/>
<owl:imports rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl"/>
<owl:imports rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl"/>
<cc:license rdf:resource="https://www.gnu.org/licenses/agpl-3.0.en.html"/>
<dc:contributor rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#rachelA"/>
<dc:contributor rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
<dc:creator rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#bernardC"/>
<dc:description xml:lang="en">A common vocabulary for digital food platforms</dc:description>
<dc:issued rdf:datatype="http://www.w3.org/2001/XMLSchema#date">2018-05-28</dc:issued>
<dc:modified rdf:datatype="http://www.w3.org/2001/XMLSchema#date">2019-10-21</dc:modified>
<dc:publisher rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
<dc:title xml:lang="en">Data Food Consortium</dc:title>
<rdfs:comment xml:lang="en">A common vocabulary for digital food platforms</rdfs:comment>
<owl:versionInfo rdf:datatype="http://www.w3.org/2001/XMLSchema#decimal">4.0</owl:versionInfo>
</owl:Ontology>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Individuals
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/data/publication.rdf#bernardC -->
<rdf:Description rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#bernardC">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</rdf:Description>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium -->
<rdf:Description rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Organization"/>
</rdf:Description>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#rachelA -->
<rdf:Description rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#rachelA">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</rdf:Description>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#simonL -->
<rdf:Description rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#simonL">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</rdf:Description>
</rdf:RDF>
<!-- Generated by the OWL API (version 4.5.9.2019-02-01T07:24:44Z) https://github.com/owlcs/owlapi -->

View File

@ -0,0 +1,549 @@
<?xml version="1.0"?>
<Ontology xmlns="http://www.w3.org/2002/07/owl#"
xml:base="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
ontologyIRI="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl">
<Prefix name="" IRI="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl"/>
<Prefix name="cc" IRI="http://creativecommons.org/ns#"/>
<Prefix name="dc" IRI="http://purl.org/dc/terms/"/>
<Prefix name="owl" IRI="http://www.w3.org/2002/07/owl#"/>
<Prefix name="rdf" IRI="http://www.w3.org/1999/02/22-rdf-syntax-ns#"/>
<Prefix name="xml" IRI="http://www.w3.org/XML/1998/namespace"/>
<Prefix name="xsd" IRI="http://www.w3.org/2001/XMLSchema#"/>
<Prefix name="xsp" IRI="http://www.owl-ontologies.com/2005/08/07/xsp.owl#"/>
<Prefix name="foaf" IRI="http://xmlns.com/foaf/0.1/"/>
<Prefix name="rdfs" IRI="http://www.w3.org/2000/01/rdf-schema#"/>
<Prefix name="swrl" IRI="http://www.w3.org/2003/11/swrl#"/>
<Prefix name="vann" IRI="http://purl.org/vocab/vann/"/>
<Prefix name="swrlb" IRI="http://www.w3.org/2003/11/swrlb#"/>
<Prefix name="protege" IRI="http://protege.stanford.edu/plugins/owl/protege#"/>
<Import>http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl</Import>
<Annotation>
<AnnotationProperty abbreviatedIRI="cc:license"/>
<IRI>https://www.gnu.org/licenses/agpl-3.0.en.html</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:contributor"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#rachelA</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:contributor"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#simonL</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:creator"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#bernardC</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:description"/>
<Literal xml:lang="en">A common vocabulary for digital food platforms (Product Glossary Part)</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:issued"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#date">2018-05-28</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:modified"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#date">2019-10-21</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:publisher"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:title"/>
<Literal xml:lang="en">Data Food Consortium Product</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="vann:preferredNamespacePrefix"/>
<Literal>dfc-p</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<Literal xml:lang="en">A common vocabulary for digital food platforms (Product Glossary Part)</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="owl:versionInfo"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#decimal">4.0</Literal>
</Annotation>
<Declaration>
<Class IRI="#Certification"/>
</Declaration>
<Declaration>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</Declaration>
<Declaration>
<Class IRI="#DFC_ProductGlossary_Measure"/>
</Declaration>
<Declaration>
<Class IRI="#DFC_ProductGlossary_Type"/>
</Declaration>
<Declaration>
<Class IRI="#Dimension"/>
</Declaration>
<Declaration>
<Class IRI="#GlobalGenericOrigin"/>
</Declaration>
<Declaration>
<Class IRI="#NatureOrigin"/>
</Declaration>
<Declaration>
<Class IRI="#PartOrigin"/>
</Declaration>
<Declaration>
<Class IRI="#Process"/>
</Declaration>
<Declaration>
<Class IRI="#ProductType"/>
</Declaration>
<Declaration>
<Class IRI="#TerritorialOrigin"/>
</Declaration>
<Declaration>
<Class IRI="#Unit"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#DFC_ProductGlossary_ObjectProperty"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#generalizes"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#measuredBy"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#measures"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#specializes"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#bernardC"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#rachelA"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:contributor"/>
</Declaration>
<SubClassOf>
<Class IRI="#Certification"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#Certification"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#Certification"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#Certification"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#Certification"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Facet"/>
<Class IRI="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Facet"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Facet"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Facet"/>
<ObjectExactCardinality cardinality="1">
<ObjectProperty IRI="#specializes"/>
</ObjectExactCardinality>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Measure"/>
<Class IRI="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Type"/>
<Class IRI="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Type"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#DFC_ProductGlossary_Type"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Type"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#DFC_ProductGlossary_Type"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#DFC_ProductGlossary_Type"/>
<ObjectExactCardinality cardinality="1">
<ObjectProperty IRI="#specializes"/>
</ObjectExactCardinality>
</SubClassOf>
<SubClassOf>
<Class IRI="#Dimension"/>
<Class IRI="#DFC_ProductGlossary_Measure"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#Dimension"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#measuredBy"/>
<Class IRI="#Unit"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#GlobalGenericOrigin"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#GlobalGenericOrigin"/>
<ObjectSomeValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#GlobalGenericOrigin"/>
</ObjectSomeValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#GlobalGenericOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#GlobalGenericOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#NatureOrigin"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#NatureOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#NatureOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#NatureOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#NatureOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#PartOrigin"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#PartOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#PartOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#PartOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#PartOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#Process"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#Process"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#Process"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#Process"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#Process"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#ProductType"/>
<Class IRI="#DFC_ProductGlossary_Type"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#ProductType"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#ProductType"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#ProductType"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#ProductType"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#TerritorialOrigin"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#TerritorialOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#TerritorialOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#TerritorialOrigin"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#TerritorialOrigin"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#Unit"/>
<Class IRI="#DFC_ProductGlossary_Measure"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#Unit"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#measures"/>
<Class IRI="#Dimension"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#Unit"/>
<ObjectExactCardinality cardinality="1">
<ObjectProperty IRI="#measures"/>
</ObjectExactCardinality>
</SubClassOf>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#bernardC"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Organization"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#rachelA"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
</ClassAssertion>
<SubObjectPropertyOf>
<ObjectProperty IRI="#generalizes"/>
<ObjectProperty IRI="#DFC_ProductGlossary_ObjectProperty"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectProperty IRI="#measuredBy"/>
<ObjectProperty IRI="#DFC_ProductGlossary_ObjectProperty"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectProperty IRI="#measures"/>
<ObjectProperty IRI="#DFC_ProductGlossary_ObjectProperty"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectProperty IRI="#specializes"/>
<ObjectProperty IRI="#DFC_ProductGlossary_ObjectProperty"/>
</SubObjectPropertyOf>
<InverseObjectProperties>
<ObjectProperty IRI="#generalizes"/>
<ObjectProperty IRI="#specializes"/>
</InverseObjectProperties>
<InverseObjectProperties>
<ObjectProperty IRI="#measuredBy"/>
<ObjectProperty IRI="#measures"/>
</InverseObjectProperties>
<ObjectPropertyDomain>
<ObjectProperty IRI="#generalizes"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</ObjectPropertyDomain>
<ObjectPropertyDomain>
<ObjectProperty IRI="#measuredBy"/>
<Class IRI="#Dimension"/>
</ObjectPropertyDomain>
<ObjectPropertyDomain>
<ObjectProperty IRI="#measures"/>
<Class IRI="#Unit"/>
</ObjectPropertyDomain>
<ObjectPropertyDomain>
<ObjectProperty IRI="#specializes"/>
<Class IRI="#DFC_ProductGlossary_Facet"/>
</ObjectPropertyDomain>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Certification</IRI>
<Literal xml:lang="en">certification</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Certification</IRI>
<Literal xml:lang="fr">certification</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#DFC_ProductGlossary_Facet</IRI>
<Literal xml:lang="en">Subject of the facets thesaurus</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#DFC_ProductGlossary_Facet</IRI>
<Literal xml:lang="fr">Sujet du Thésaurus à Facettes</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#DFC_ProductGlossary_Measure</IRI>
<Literal xml:lang="fr">thesaurus des unités de mesure</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#DFC_ProductGlossary_Measure</IRI>
<Literal xml:lang="en">unit of measures thesaurus</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Dimension</IRI>
<Literal xml:lang="en">dimension</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Dimension</IRI>
<Literal xml:lang="fr">dimension</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#GlobalGenericOrigin</IRI>
<Literal xml:lang="en">Global generic origin</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#GlobalGenericOrigin</IRI>
<Literal xml:lang="fr">Origines génériques globales</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#NatureOrigin</IRI>
<Literal xml:lang="en">natural &quot;living&quot; origin</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#NatureOrigin</IRI>
<Literal xml:lang="fr">source &quot;vivante&quot; d&apos;origine</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#PartOrigin</IRI>
<Literal xml:lang="en">part of natural &quot;living&quot; origin concerned</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#PartOrigin</IRI>
<Literal xml:lang="fr">partie de la source &quot;vivante&quot; d&apos;origine concernée</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Process</IRI>
<Literal xml:lang="en">process applied</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Process</IRI>
<Literal xml:lang="fr">procédé appliqué</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#ProductType</IRI>
<Literal xml:lang="en">product type (general taxonomy)</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#ProductType</IRI>
<Literal xml:lang="fr">type de produit (classification générale)</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#TerritorialOrigin</IRI>
<Literal xml:lang="fr">origine territoriale</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#TerritorialOrigin</IRI>
<Literal xml:lang="en">territorial origin</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Unit</IRI>
<Literal xml:lang="en">unit</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Unit</IRI>
<Literal xml:lang="fr">unité</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#generalizes</IRI>
<Literal xml:lang="en">generalizes</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#generalizes</IRI>
<Literal xml:lang="fr">généralise</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#measuredBy</IRI>
<Literal xml:lang="en">measured by</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#measuredBy</IRI>
<Literal xml:lang="fr">mesuré en</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#measures</IRI>
<Literal xml:lang="en">measures</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#measures</IRI>
<Literal xml:lang="fr">mesure</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#specializes</IRI>
<Literal xml:lang="en">specializes</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#specializes</IRI>
<Literal xml:lang="fr">spécialise</Literal>
</AnnotationAssertion>
</Ontology>
<!-- Generated by the OWL API (version 4.5.9.2019-02-01T07:24:44Z) https://github.com/owlcs/owlapi -->

View File

@ -0,0 +1,428 @@
<?xml version="1.0"?>
<rdf:RDF xmlns="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl"
xml:base="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:dc="http://purl.org/dc/terms/"
xmlns:owl="http://www.w3.org/2002/07/owl#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:xsp="http://www.owl-ontologies.com/2005/08/07/xsp.owl#"
xmlns:foaf="http://xmlns.com/foaf/0.1/"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:swrl="http://www.w3.org/2003/11/swrl#"
xmlns:vann="http://purl.org/vocab/vann/"
xmlns:swrlb="http://www.w3.org/2003/11/swrlb#"
xmlns:protege="http://protege.stanford.edu/plugins/owl/protege#">
<owl:Ontology rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl">
<rdf:type rdf:resource="http://purl.org/vocommons/voaf#Vocabulary"/>
<vann:preferredNamespacePrefix>dfc-p</vann:preferredNamespacePrefix>
<vann:preferredNamespaceUri>http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#</vann:preferredNamespaceUri>
<owl:imports rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl"/>
<cc:license rdf:resource="https://www.gnu.org/licenses/agpl-3.0.en.html"/>
<dc:contributor rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#rachelA"/>
<dc:contributor rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
<dc:creator rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#bernardC"/>
<dc:description xml:lang="en">A common vocabulary for digital food platforms (Product Glossary Part)</dc:description>
<dc:issued rdf:datatype="http://www.w3.org/2001/XMLSchema#date">2018-05-28</dc:issued>
<dc:modified rdf:datatype="http://www.w3.org/2001/XMLSchema#date">2019-10-21</dc:modified>
<dc:publisher rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
<dc:title xml:lang="en">Data Food Consortium Product</dc:title>
<rdfs:comment xml:lang="en">A common vocabulary for digital food platforms (Product Glossary Part)</rdfs:comment>
<owl:versionInfo rdf:datatype="http://www.w3.org/2001/XMLSchema#decimal">4.0</owl:versionInfo>
</owl:Ontology>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Annotation properties
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://purl.org/dc/terms/contributor -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/contributor"/>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Object Properties
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_ObjectProperty -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_ObjectProperty"/>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes">
<rdfs:subPropertyOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_ObjectProperty"/>
<owl:inverseOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<rdfs:domain rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:label xml:lang="en">generalizes</rdfs:label>
<rdfs:label xml:lang="fr">généralise</rdfs:label>
</owl:ObjectProperty>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measuredBy -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measuredBy">
<rdfs:subPropertyOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_ObjectProperty"/>
<owl:inverseOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measures"/>
<rdfs:domain rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Dimension"/>
<rdfs:label xml:lang="en">measured by</rdfs:label>
<rdfs:label xml:lang="fr">mesuré en</rdfs:label>
</owl:ObjectProperty>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measures -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measures">
<rdfs:subPropertyOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_ObjectProperty"/>
<rdfs:domain rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Unit"/>
<rdfs:label xml:lang="en">measures</rdfs:label>
<rdfs:label xml:lang="fr">mesure</rdfs:label>
</owl:ObjectProperty>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes">
<rdfs:subPropertyOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_ObjectProperty"/>
<rdfs:domain rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:label xml:lang="en">specializes</rdfs:label>
<rdfs:label xml:lang="fr">spécialise</rdfs:label>
</owl:ObjectProperty>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Classes
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Certification -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Certification">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Certification"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Certification"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">certification</rdfs:label>
<rdfs:label xml:lang="fr">certification</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:cardinality rdf:datatype="http://www.w3.org/2001/XMLSchema#nonNegativeInteger">1</owl:cardinality>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">Subject of the facets thesaurus</rdfs:label>
<rdfs:label xml:lang="fr">Sujet du Thésaurus à Facettes</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Measure -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Measure">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
<rdfs:label xml:lang="fr">thesaurus des unités de mesure</rdfs:label>
<rdfs:label xml:lang="en">unit of measures thesaurus</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Type -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Type">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Type"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Type"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:cardinality rdf:datatype="http://www.w3.org/2001/XMLSchema#nonNegativeInteger">1</owl:cardinality>
</owl:Restriction>
</rdfs:subClassOf>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Dimension -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Dimension">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Measure"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measuredBy"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Unit"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">dimension</rdfs:label>
<rdfs:label xml:lang="fr">dimension</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#GlobalGenericOrigin -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#GlobalGenericOrigin">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:someValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#GlobalGenericOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#GlobalGenericOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">Global generic origin</rdfs:label>
<rdfs:label xml:lang="fr">Origines génériques globales</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#NatureOrigin -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#NatureOrigin">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#NatureOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#NatureOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">natural &quot;living&quot; origin</rdfs:label>
<rdfs:label xml:lang="fr">source &quot;vivante&quot; d&apos;origine</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#PartOrigin -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#PartOrigin">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#PartOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#PartOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">part of natural &quot;living&quot; origin concerned</rdfs:label>
<rdfs:label xml:lang="fr">partie de la source &quot;vivante&quot; d&apos;origine concernée</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Process -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Process">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Process"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Process"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">process applied</rdfs:label>
<rdfs:label xml:lang="fr">procédé appliqué</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#ProductType -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#ProductType">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Type"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#ProductType"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#ProductType"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">product type (general taxonomy)</rdfs:label>
<rdfs:label xml:lang="fr">type de produit (classification générale)</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#TerritorialOrigin -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#TerritorialOrigin">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Facet"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#generalizes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#TerritorialOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#specializes"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#TerritorialOrigin"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="fr">origine territoriale</rdfs:label>
<rdfs:label xml:lang="en">territorial origin</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Unit -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Unit">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#DFC_ProductGlossary_Measure"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measures"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#Dimension"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#measures"/>
<owl:cardinality rdf:datatype="http://www.w3.org/2001/XMLSchema#nonNegativeInteger">1</owl:cardinality>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:label xml:lang="en">unit</rdfs:label>
<rdfs:label xml:lang="fr">unité</rdfs:label>
</owl:Class>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Individuals
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/data/publication.rdf#bernardC -->
<owl:NamedIndividual rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#bernardC">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</owl:NamedIndividual>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium -->
<rdf:Description rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Organization"/>
</rdf:Description>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#rachelA -->
<owl:NamedIndividual rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#rachelA">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</owl:NamedIndividual>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#simonL -->
<rdf:Description rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#simonL">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</rdf:Description>
</rdf:RDF>
<!-- Generated by the OWL API (version 4.5.9.2019-02-01T07:24:44Z) https://github.com/owlcs/owlapi -->

View File

@ -0,0 +1,269 @@
<?xml version="1.0"?>
<Ontology xmlns="http://www.w3.org/2002/07/owl#"
xml:base="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
ontologyIRI="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl">
<Prefix name="" IRI="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl"/>
<Prefix name="cc" IRI="http://creativecommons.org/ns#"/>
<Prefix name="dc" IRI="http://purl.org/dc/terms/"/>
<Prefix name="owl" IRI="http://www.w3.org/2002/07/owl#"/>
<Prefix name="rdf" IRI="http://www.w3.org/1999/02/22-rdf-syntax-ns#"/>
<Prefix name="xml" IRI="http://www.w3.org/XML/1998/namespace"/>
<Prefix name="xsd" IRI="http://www.w3.org/2001/XMLSchema#"/>
<Prefix name="xsp" IRI="http://www.owl-ontologies.com/2005/08/07/xsp.owl#"/>
<Prefix name="foaf" IRI="http://xmlns.com/foaf/0.1/"/>
<Prefix name="rdfs" IRI="http://www.w3.org/2000/01/rdf-schema#"/>
<Prefix name="swrl" IRI="http://www.w3.org/2003/11/swrl#"/>
<Prefix name="vann" IRI="http://purl.org/vocab/vann/"/>
<Prefix name="swrlb" IRI="http://www.w3.org/2003/11/swrlb#"/>
<Prefix name="protege" IRI="http://protege.stanford.edu/plugins/owl/protege#"/>
<Annotation>
<AnnotationProperty abbreviatedIRI="cc:license"/>
<IRI>https://www.gnu.org/licenses/agpl-3.0.en.html</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:creator"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#simonL</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:description"/>
<Literal xml:lang="en">A common vocabulary for digital food platforms (Technical Part)</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:issued"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#date">2018-05-28</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:modified"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#date">2019-10-21</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:publisher"/>
<IRI>http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium</IRI>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="dc:title"/>
<Literal xml:lang="en">Data Food Consortium Technical</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="vann:preferredNamespacePrefix"/>
<Literal>dfc-t</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<Literal xml:lang="en">A common vocabulary for digital food platforms (Technical Part)</Literal>
</Annotation>
<Annotation>
<AnnotationProperty abbreviatedIRI="owl:versionInfo"/>
<Literal datatypeIRI="http://www.w3.org/2001/XMLSchema#decimal">4.0</Literal>
</Annotation>
<Declaration>
<Class IRI="#DFC_DitributedRepresentation"/>
</Declaration>
<Declaration>
<Class IRI="#Platform"/>
</Declaration>
<Declaration>
<Class IRI="#RepresentationPivot"/>
</Declaration>
<Declaration>
<Class IRI="#RepresentedThing"/>
</Declaration>
<Declaration>
<Class abbreviatedIRI="foaf:Organization"/>
</Declaration>
<Declaration>
<Class abbreviatedIRI="foaf:Person"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl#DFC_TechnicalOntology_ObjectProperty"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#hasPivot"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#hostedBy"/>
</Declaration>
<Declaration>
<ObjectProperty IRI="#represent"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
</Declaration>
<Declaration>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="cc:license"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:creator"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:description"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:issued"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:modified"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:publisher"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="dc:title"/>
</Declaration>
<Declaration>
<AnnotationProperty abbreviatedIRI="vann:preferredNamespacePrefix"/>
</Declaration>
<SubClassOf>
<Class IRI="#Platform"/>
<Class IRI="#DFC_DitributedRepresentation"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#Platform"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#hostedBy"/>
<Class abbreviatedIRI="owl:Thing"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentationPivot"/>
<Class IRI="#DFC_DitributedRepresentation"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentationPivot"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#represent"/>
<Class IRI="#RepresentedThing"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentationPivot"/>
<ObjectMinCardinality cardinality="1">
<ObjectProperty IRI="#represent"/>
<Class IRI="#RepresentedThing"/>
</ObjectMinCardinality>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentedThing"/>
<Class IRI="#DFC_DitributedRepresentation"/>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentedThing"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#hasPivot"/>
<Class IRI="#RepresentationPivot"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentedThing"/>
<ObjectAllValuesFrom>
<ObjectProperty IRI="#hostedBy"/>
<Class IRI="#Platform"/>
</ObjectAllValuesFrom>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentedThing"/>
<ObjectExactCardinality cardinality="1">
<ObjectProperty IRI="#hasPivot"/>
<Class IRI="#RepresentationPivot"/>
</ObjectExactCardinality>
</SubClassOf>
<SubClassOf>
<Class IRI="#RepresentedThing"/>
<ObjectExactCardinality cardinality="1">
<ObjectProperty IRI="#hostedBy"/>
<Class IRI="#Platform"/>
</ObjectExactCardinality>
</SubClassOf>
<DisjointClasses>
<Class IRI="#Platform"/>
<Class IRI="#RepresentationPivot"/>
<Class IRI="#RepresentedThing"/>
</DisjointClasses>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Organization"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
</ClassAssertion>
<ClassAssertion>
<Class abbreviatedIRI="foaf:Person"/>
<NamedIndividual IRI="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
</ClassAssertion>
<SubObjectPropertyOf>
<ObjectProperty IRI="#hasPivot"/>
<ObjectProperty abbreviatedIRI="owl:topObjectProperty"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectProperty IRI="#hostedBy"/>
<ObjectProperty abbreviatedIRI="owl:topObjectProperty"/>
</SubObjectPropertyOf>
<SubObjectPropertyOf>
<ObjectProperty IRI="#represent"/>
<ObjectProperty abbreviatedIRI="owl:topObjectProperty"/>
</SubObjectPropertyOf>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#DFC_DitributedRepresentation</IRI>
<Literal xml:lang="fr">Concepts de réconciliation de représentation distribuée</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#DFC_DitributedRepresentation</IRI>
<Literal xml:lang="en">ditributed représentation reconcialition concepts</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<IRI>#Platform</IRI>
<Literal xml:lang="fr">Organisation qui heberge la donnée</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#Platform</IRI>
<Literal xml:lang="fr">Plateforme</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<IRI>#RepresentationPivot</IRI>
<Literal xml:lang="fr">Permet de designer tous les RepresentatedThing qui sont équivalents et d&apos;etre désigné par un RepresentedThing pour connaitre ses équivalence par transitivité</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#RepresentationPivot</IRI>
<Literal xml:lang="fr">Pivot de représentation</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<IRI>#RepresentedThing</IRI>
<Literal xml:lang="fr">Chose représentée sur une platefome posadant des equivalences sur d&apos;autres plateformes</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#RepresentedThing</IRI>
<Literal xml:lang="fr">Chose représentée</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:comment"/>
<IRI>#hasPivot</IRI>
<Literal xml:lang="fr">possède un point pivot</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#hostedBy</IRI>
<Literal xml:lang="fr">hébergé par</Literal>
</AnnotationAssertion>
<AnnotationAssertion>
<AnnotationProperty abbreviatedIRI="rdfs:label"/>
<IRI>#represent</IRI>
<Literal xml:lang="fr">représente</Literal>
</AnnotationAssertion>
</Ontology>
<!-- Generated by the OWL API (version 4.5.9.2019-02-01T07:24:44Z) https://github.com/owlcs/owlapi -->

View File

@ -0,0 +1,291 @@
<?xml version="1.0"?>
<rdf:RDF xmlns="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl"
xml:base="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:dc="http://purl.org/dc/terms/"
xmlns:owl="http://www.w3.org/2002/07/owl#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:xml="http://www.w3.org/XML/1998/namespace"
xmlns:xsd="http://www.w3.org/2001/XMLSchema#"
xmlns:xsp="http://www.owl-ontologies.com/2005/08/07/xsp.owl#"
xmlns:foaf="http://xmlns.com/foaf/0.1/"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:swrl="http://www.w3.org/2003/11/swrl#"
xmlns:vann="http://purl.org/vocab/vann/"
xmlns:swrlb="http://www.w3.org/2003/11/swrlb#"
xmlns:protege="http://protege.stanford.edu/plugins/owl/protege#">
<owl:Ontology rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl">
<rdf:type rdf:resource="http://purl.org/vocommons/voaf#Vocabulary"/>
<vann:preferredNamespacePrefix>dfc-t</vann:preferredNamespacePrefix>
<vann:preferredNamespaceUri>http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#</vann:preferredNamespaceUri>
<cc:license rdf:resource="https://www.gnu.org/licenses/agpl-3.0.en.html"/>
<dc:creator rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#simonL"/>
<dc:description xml:lang="en">A common vocabulary for digital food platforms (Technical Part)</dc:description>
<dc:issued rdf:datatype="http://www.w3.org/2001/XMLSchema#date">2018-05-28</dc:issued>
<dc:modified rdf:datatype="http://www.w3.org/2001/XMLSchema#date">2019-10-21</dc:modified>
<dc:publisher rdf:resource="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium"/>
<dc:title xml:lang="en">Data Food Consortium Technical</dc:title>
<rdfs:comment xml:lang="en">A common vocabulary for digital food platforms (Technical Part)</rdfs:comment>
<owl:versionInfo rdf:datatype="http://www.w3.org/2001/XMLSchema#decimal">4.0</owl:versionInfo>
</owl:Ontology>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Annotation properties
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://creativecommons.org/ns#license -->
<owl:AnnotationProperty rdf:about="http://creativecommons.org/ns#license"/>
<!-- http://purl.org/dc/terms/creator -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/creator"/>
<!-- http://purl.org/dc/terms/description -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/description"/>
<!-- http://purl.org/dc/terms/issued -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/issued"/>
<!-- http://purl.org/dc/terms/modified -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/modified"/>
<!-- http://purl.org/dc/terms/publisher -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/publisher"/>
<!-- http://purl.org/dc/terms/title -->
<owl:AnnotationProperty rdf:about="http://purl.org/dc/terms/title"/>
<!-- http://purl.org/vocab/vann/preferredNamespacePrefix -->
<owl:AnnotationProperty rdf:about="http://purl.org/vocab/vann/preferredNamespacePrefix"/>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Object Properties
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl#DFC_TechnicalOntology_ObjectProperty -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl#DFC_TechnicalOntology_ObjectProperty"/>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hasPivot -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hasPivot">
<rdfs:subPropertyOf rdf:resource="http://www.w3.org/2002/07/owl#topObjectProperty"/>
<rdfs:comment xml:lang="fr">possède un point pivot</rdfs:comment>
</owl:ObjectProperty>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hostedBy -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hostedBy">
<rdfs:subPropertyOf rdf:resource="http://www.w3.org/2002/07/owl#topObjectProperty"/>
<rdfs:label xml:lang="fr">hébergé par</rdfs:label>
</owl:ObjectProperty>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#represent -->
<owl:ObjectProperty rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#represent">
<rdfs:subPropertyOf rdf:resource="http://www.w3.org/2002/07/owl#topObjectProperty"/>
<rdfs:label xml:lang="fr">représente</rdfs:label>
</owl:ObjectProperty>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Classes
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#DFC_DitributedRepresentation -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#DFC_DitributedRepresentation">
<rdfs:label xml:lang="fr">Concepts de réconciliation de représentation distribuée</rdfs:label>
<rdfs:label xml:lang="en">ditributed représentation reconcialition concepts</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#Platform -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#Platform">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#DFC_DitributedRepresentation"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hostedBy"/>
<owl:allValuesFrom rdf:resource="http://www.w3.org/2002/07/owl#Thing"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:comment xml:lang="fr">Organisation qui heberge la donnée</rdfs:comment>
<rdfs:label xml:lang="fr">Plateforme</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentationPivot -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentationPivot">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#DFC_DitributedRepresentation"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#represent"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#represent"/>
<owl:minQualifiedCardinality rdf:datatype="http://www.w3.org/2001/XMLSchema#nonNegativeInteger">1</owl:minQualifiedCardinality>
<owl:onClass rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:comment xml:lang="fr">Permet de designer tous les RepresentatedThing qui sont équivalents et d&apos;etre désigné par un RepresentedThing pour connaitre ses équivalence par transitivité</rdfs:comment>
<rdfs:label xml:lang="fr">Pivot de représentation</rdfs:label>
</owl:Class>
<!-- http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing -->
<owl:Class rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing">
<rdfs:subClassOf rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#DFC_DitributedRepresentation"/>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hasPivot"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentationPivot"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hostedBy"/>
<owl:allValuesFrom rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#Platform"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hasPivot"/>
<owl:qualifiedCardinality rdf:datatype="http://www.w3.org/2001/XMLSchema#nonNegativeInteger">1</owl:qualifiedCardinality>
<owl:onClass rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentationPivot"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:subClassOf>
<owl:Restriction>
<owl:onProperty rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#hostedBy"/>
<owl:qualifiedCardinality rdf:datatype="http://www.w3.org/2001/XMLSchema#nonNegativeInteger">1</owl:qualifiedCardinality>
<owl:onClass rdf:resource="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#Platform"/>
</owl:Restriction>
</rdfs:subClassOf>
<rdfs:comment xml:lang="fr">Chose représentée sur une platefome posadant des equivalences sur d&apos;autres plateformes</rdfs:comment>
<rdfs:label xml:lang="fr">Chose représentée</rdfs:label>
</owl:Class>
<!-- http://xmlns.com/foaf/0.1/Organization -->
<owl:Class rdf:about="http://xmlns.com/foaf/0.1/Organization"/>
<!-- http://xmlns.com/foaf/0.1/Person -->
<owl:Class rdf:about="http://xmlns.com/foaf/0.1/Person"/>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// Individuals
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<!-- http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium -->
<owl:NamedIndividual rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Organization"/>
</owl:NamedIndividual>
<!-- http://static.datafoodconsortium.org/data/publication.rdf#simonL -->
<owl:NamedIndividual rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#simonL">
<rdf:type rdf:resource="http://xmlns.com/foaf/0.1/Person"/>
</owl:NamedIndividual>
<!--
///////////////////////////////////////////////////////////////////////////////////////
//
// General axioms
//
///////////////////////////////////////////////////////////////////////////////////////
-->
<rdf:Description>
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#AllDisjointClasses"/>
<owl:members rdf:parseType="Collection">
<rdf:Description rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#Platform"/>
<rdf:Description rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentationPivot"/>
<rdf:Description rdf:about="http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#RepresentedThing"/>
</owl:members>
</rdf:Description>
</rdf:RDF>
<!-- Generated by the OWL API (version 4.5.9.2019-02-01T07:24:44Z) https://github.com/owlcs/owlapi -->

View File

@ -0,0 +1,989 @@
{
"@context": {
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
"dfc-b": "http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl#",
"dfc-p": "http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#",
"dfc-t": "http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#",
"dfc-u": "http://static.datafoodconsortium.org/data/units.json#",
"dfc-p:specialize": {
"@type": "@id"
}
},
"@graph": [
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#EntirePlace",
"rdfs:label": [
{
"@value": "Entire Place",
"@language": "en"
},
{
"@value": "Entire Place",
"@language": "ar"
},
{
"@value": "Entire Place",
"@language": "ku"
},
{
"@value": "Entire Place",
"@language": "es"
},
{
"@value": "Posto intero",
"@language": "it"
},
{
"@value": "Gesamter Ort",
"@language": "de"
},
{
"@value": "Entire Place",
"@language": "sw"
},
{
"@value": "Lugar completo",
"@language": "pt"
},
{
"@value": "Entire Place",
"@language": "oc"
},
{
"@value": "Все место",
"@language": "ru"
},
{
"@value": "Entire Place",
"@language": "cy"
},
{
"@value": "集合場所",
"@language": "ja"
},
{
"@value": "Áit Eintire",
"@language": "ga"
},
{
"@value": "जगह",
"@language": "hi"
},
{
"@value": "入口",
"@language": "zh"
},
{
"@value": "Entire Place",
"@language": "fr"
},
{
"@value": "Entire Place",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#EntirePlace",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#PrivateRoom",
"rdfs:label": [
{
"@value": "Private Room",
"@language": "en"
},
{
"@value": "الغرفة الخاصة",
"@language": "ar"
},
{
"@value": "Private Room",
"@language": "ku"
},
{
"@value": "Habitación privada",
"@language": "es"
},
{
"@value": "Stanza privata",
"@language": "it"
},
{
"@value": "Privatzimmer",
"@language": "de"
},
{
"@value": "Private Room",
"@language": "sw"
},
{
"@value": "Quarto privado",
"@language": "pt"
},
{
"@value": "Private Room",
"@language": "oc"
},
{
"@value": "Частная комната",
"@language": "ru"
},
{
"@value": "Private Room",
"@language": "cy"
},
{
"@value": "プライベートルーム",
"@language": "ja"
},
{
"@value": "Seomra na nDaoine",
"@language": "ga"
},
{
"@value": "निजी कक्ष",
"@language": "hi"
},
{
"@value": "私人会议室",
"@language": "zh"
},
{
"@value": "Salle privée",
"@language": "fr"
},
{
"@value": "Private Room",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#PrivateRoom",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#HotelRoom",
"rdfs:label": [
{
"@value": "Hotel Room",
"@language": "en"
},
{
"@value": "فندق",
"@language": "ar"
},
{
"@value": "Hotel Room",
"@language": "ku"
},
{
"@value": "Hotel Room",
"@language": "es"
},
{
"@value": "Camera dell'hotel",
"@language": "it"
},
{
"@value": "Hotelzimmer",
"@language": "de"
},
{
"@value": "Hotel Room",
"@language": "sw"
},
{
"@value": "Quarto de Hotel",
"@language": "pt"
},
{
"@value": "Hotel Room",
"@language": "oc"
},
{
"@value": "Номер в отеле",
"@language": "ru"
},
{
"@value": "Hotel Room",
"@language": "cy"
},
{
"@value": "ホテル ルーム",
"@language": "ja"
},
{
"@value": "Seomra Óstán",
"@language": "ga"
},
{
"@value": "होटल",
"@language": "hi"
},
{
"@value": "旅馆",
"@language": "zh"
},
{
"@value": "Hotel Room",
"@language": "fr"
},
{
"@value": "Hotel Room",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#HotelRoom",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#SharedRoom",
"rdfs:label": [
{
"@value": "Shared Room",
"@language": "en"
},
{
"@value": "الغرفة المشتركة",
"@language": "ar"
},
{
"@value": "Shared Room",
"@language": "ku"
},
{
"@value": "Habitación compartida",
"@language": "es"
},
{
"@value": "Camera condivisa",
"@language": "it"
},
{
"@value": "Zimmer",
"@language": "de"
},
{
"@value": "Shared Room",
"@language": "sw"
},
{
"@value": "Quarto compartilhado",
"@language": "pt"
},
{
"@value": "Shared Room",
"@language": "oc"
},
{
"@value": "Общая комната",
"@language": "ru"
},
{
"@value": "Shared Room",
"@language": "cy"
},
{
"@value": "シェアルーム",
"@language": "ja"
},
{
"@value": "Seomra Comhroinnte",
"@language": "ga"
},
{
"@value": "साझा कक्ष",
"@language": "hi"
},
{
"@value": "共有会议室",
"@language": "zh"
},
{
"@value": "Salle partagée",
"@language": "fr"
},
{
"@value": "Shared Room",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#SharedRoom",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Sofa",
"rdfs:label": [
{
"@value": "Sofa",
"@language": "en"
},
{
"@value": "Sofa",
"@language": "ar"
},
{
"@value": "Sofa",
"@language": "ku"
},
{
"@value": "Sofa",
"@language": "es"
},
{
"@value": "Divano",
"@language": "it"
},
{
"@value": "Sofa",
"@language": "de"
},
{
"@value": "Sofa",
"@language": "sw"
},
{
"@value": "Sofá",
"@language": "pt"
},
{
"@value": "Sofa",
"@language": "oc"
},
{
"@value": "Диван",
"@language": "ru"
},
{
"@value": "Sofa",
"@language": "cy"
},
{
"@value": "ソファ",
"@language": "ja"
},
{
"@value": "Toir agus Crainn",
"@language": "ga"
},
{
"@value": "सोफा",
"@language": "hi"
},
{
"@value": "Sofa",
"@language": "zh"
},
{
"@value": "Sofa",
"@language": "fr"
},
{
"@value": "Sofa",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Sofa",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Boat",
"rdfs:label": [
{
"@value": "Boat",
"@language": "en"
},
{
"@value": "Boat",
"@language": "ar"
},
{
"@value": "Boat",
"@language": "ku"
},
{
"@value": "El barco",
"@language": "es"
},
{
"@value": "Barca",
"@language": "it"
},
{
"@value": "Boote",
"@language": "de"
},
{
"@value": "Boat",
"@language": "sw"
},
{
"@value": "Barco",
"@language": "pt"
},
{
"@value": "Boat",
"@language": "oc"
},
{
"@value": "Лодка",
"@language": "ru"
},
{
"@value": "Boat",
"@language": "cy"
},
{
"@value": "ボート",
"@language": "ja"
},
{
"@value": "taiseachas aeir: fliuch",
"@language": "ga"
},
{
"@value": "नाव",
"@language": "hi"
},
{
"@value": "B. 博塔",
"@language": "zh"
},
{
"@value": "Boat",
"@language": "fr"
},
{
"@value": "Boat",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Boat",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Barge",
"rdfs:label": [
{
"@value": "Barge",
"@language": "en"
},
{
"@value": "Barge",
"@language": "ar"
},
{
"@value": "Barge",
"@language": "ku"
},
{
"@value": "Barge",
"@language": "es"
},
{
"@value": "Barge",
"@language": "it"
},
{
"@value": "Barrel",
"@language": "de"
},
{
"@value": "Barge",
"@language": "sw"
},
{
"@value": "Barco",
"@language": "pt"
},
{
"@value": "Barge",
"@language": "oc"
},
{
"@value": "Барж",
"@language": "ru"
},
{
"@value": "Barge",
"@language": "cy"
},
{
"@value": "バージ",
"@language": "ja"
},
{
"@value": "Toir agus Crainn",
"@language": "ga"
},
{
"@value": "बार्ज",
"@language": "hi"
},
{
"@value": "律师协会",
"@language": "zh"
},
{
"@value": "Barge",
"@language": "fr"
},
{
"@value": "Barge",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Boat",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Tent",
"rdfs:label": [
{
"@value": "Tent",
"@language": "en"
},
{
"@value": "الخيمة",
"@language": "ar"
},
{
"@value": "Tent",
"@language": "ku"
},
{
"@value": "Tent",
"@language": "es"
},
{
"@value": "Tenda",
"@language": "it"
},
{
"@value": "Zelt",
"@language": "de"
},
{
"@value": "Tent",
"@language": "sw"
},
{
"@value": "Tenda",
"@language": "pt"
},
{
"@value": "Tent",
"@language": "oc"
},
{
"@value": "Тент",
"@language": "ru"
},
{
"@value": "Tent",
"@language": "cy"
},
{
"@value": "テント",
"@language": "ja"
},
{
"@value": "Tent",
"@language": "ga"
},
{
"@value": "टेंट",
"@language": "hi"
},
{
"@value": "答辩",
"@language": "zh"
},
{
"@value": "Tent",
"@language": "fr"
},
{
"@value": "Tent",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Tent",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Caravan",
"rdfs:label": [
{
"@value": "Caravan",
"@language": "en"
},
{
"@value": "Caravan",
"@language": "ar"
},
{
"@value": "Caravan",
"@language": "ku"
},
{
"@value": "Caravan",
"@language": "es"
},
{
"@value": "Caravan",
"@language": "it"
},
{
"@value": "Wohnwagen",
"@language": "de"
},
{
"@value": "Caravan",
"@language": "sw"
},
{
"@value": "Caravana",
"@language": "pt"
},
{
"@value": "Caravan",
"@language": "oc"
},
{
"@value": "Караван",
"@language": "ru"
},
{
"@value": "Caravan",
"@language": "cy"
},
{
"@value": "キャラバン",
"@language": "ja"
},
{
"@value": "Amharc ar gach eolas",
"@language": "ga"
},
{
"@value": "कारवां",
"@language": "hi"
},
{
"@value": "车队",
"@language": "zh"
},
{
"@value": "Caravan",
"@language": "fr"
},
{
"@value": "Caravan",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Caravan",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Hostel",
"rdfs:label": [
{
"@value": "Hostel",
"@language": "en"
},
{
"@value": "Hostel",
"@language": "ar"
},
{
"@value": "Hostel",
"@language": "ku"
},
{
"@value": "Hostel",
"@language": "es"
},
{
"@value": "Ostello",
"@language": "it"
},
{
"@value": "Hostel",
"@language": "de"
},
{
"@value": "Hostel",
"@language": "sw"
},
{
"@value": "Albergue",
"@language": "pt"
},
{
"@value": "Hostel",
"@language": "oc"
},
{
"@value": "Хостел",
"@language": "ru"
},
{
"@value": "Hostel",
"@language": "cy"
},
{
"@value": "ホステル",
"@language": "ja"
},
{
"@value": "brú",
"@language": "ga"
},
{
"@value": "छात्रावास",
"@language": "hi"
},
{
"@value": "人质",
"@language": "zh"
},
{
"@value": "Hostel",
"@language": "fr"
},
{
"@value": "Hostel",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Hostel",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Yurt",
"rdfs:label": [
{
"@value": "Yurt",
"@language": "en"
},
{
"@value": "يوت",
"@language": "ar"
},
{
"@value": "Yurt",
"@language": "ku"
},
{
"@value": "Yurt",
"@language": "es"
},
{
"@value": "Yurt",
"@language": "it"
},
{
"@value": "Rind",
"@language": "de"
},
{
"@value": "Yurt",
"@language": "sw"
},
{
"@value": "Yurt.",
"@language": "pt"
},
{
"@value": "Yurt",
"@language": "oc"
},
{
"@value": "Юрт",
"@language": "ru"
},
{
"@value": "Yurt",
"@language": "cy"
},
{
"@value": "ユルト",
"@language": "ja"
},
{
"@value": "taiseachas aeir: fliuch",
"@language": "ga"
},
{
"@value": "युर्ट",
"@language": "hi"
},
{
"@value": "导 言",
"@language": "zh"
},
{
"@value": "Yurt",
"@language": "fr"
},
{
"@value": "Yurt",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Yurt",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Tipi",
"rdfs:label": [
{
"@value": "Tipi",
"@language": "en"
},
{
"@value": "Tipi",
"@language": "ar"
},
{
"@value": "Tipi",
"@language": "ku"
},
{
"@value": "Tipi",
"@language": "es"
},
{
"@value": "Tipi di",
"@language": "it"
},
{
"@value": "Tipi",
"@language": "de"
},
{
"@value": "Tipi",
"@language": "sw"
},
{
"@value": "Sugestões",
"@language": "pt"
},
{
"@value": "Tipi",
"@language": "oc"
},
{
"@value": "Советы",
"@language": "ru"
},
{
"@value": "Tipi",
"@language": "cy"
},
{
"@value": "ログイン",
"@language": "ja"
},
{
"@value": "An tSeapáin",
"@language": "ga"
},
{
"@value": "टीका",
"@language": "hi"
},
{
"@value": "注",
"@language": "zh"
},
{
"@value": "Tipi",
"@language": "fr"
},
{
"@value": "Tipi",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#Tipi",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/accommodationTypes.json#RV",
"rdfs:label": [
{
"@value": "RV",
"@language": "en"
},
{
"@value": "RV",
"@language": "ar"
},
{
"@value": "RV",
"@language": "ku"
},
{
"@value": "RV",
"@language": "es"
},
{
"@value": "RV",
"@language": "it"
},
{
"@value": "RV",
"@language": "de"
},
{
"@value": "RV",
"@language": "sw"
},
{
"@value": "RV",
"@language": "pt"
},
{
"@value": "RV",
"@language": "oc"
},
{
"@value": "РВ",
"@language": "ru"
},
{
"@value": "RV",
"@language": "cy"
},
{
"@value": "RVの特長",
"@language": "ja"
},
{
"@value": "RV",
"@language": "ga"
},
{
"@value": "आरवी",
"@language": "hi"
},
{
"@value": "RV",
"@language": "zh"
},
{
"@value": "RV",
"@language": "fr"
},
{
"@value": "RV",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/accommodationTypes.json#RV",
"@type": "dfc-p:ProductType"
}
]
}

View File

@ -2,16 +2,16 @@
"@context": {
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
"dfc-b": "http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl#",
"dfc-p": "http://static.datafoodconsortium.org/ontologies/DFC_ProductOntology.owl#",
"dfc-p": "http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#",
"dfc-t": "http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#",
"dfc-u": "http://static.datafoodconsortium.org/data/units.rdf#",
"dfc-u": "http://static.datafoodconsortium.org/data/units.json#",
"dfc-p:specialize": {
"@type": "@id"
}
},
"@graph": [
{
"@id": "https://clothes/data/clothesTypes.rdf#shirt",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#shirt",
"rdfs:label": [
{
"@value": "La chemise",
@ -82,11 +82,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/clothesTypes.rdf#shirt",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/clothesTypes.json#shirt",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#belt",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#belt",
"rdfs:label": [
{
"@value": "Ceinture",
@ -157,11 +157,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#belt",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#belt",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#childrens-clothing",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#childrens-clothing",
"rdfs:label": [
{
"@value": "Vêtements pour enfants",
@ -232,11 +232,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#childrens-clothing",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#childrens-clothing",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#coat",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#coat",
"rdfs:label": [
{
"@value": "Manteau",
@ -307,11 +307,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#coat",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#coat",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#dress",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#dress",
"rdfs:label": [
{
"@value": "Robe",
@ -382,11 +382,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#shoes",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#shoes",
"rdfs:label": [
{
"@value": "Des chaussures",
@ -457,11 +457,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#footwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#footwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#boots",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#boots",
"rdfs:label": [
{
"@value": "Bottes",
@ -532,11 +532,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#footwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#footwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#gown",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#gown",
"rdfs:label": [
{
"@value": "Robe",
@ -607,11 +607,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#gown",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#gown",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#hat",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#hat",
"rdfs:label": [
{
"@value": "Chapeau",
@ -682,11 +682,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#hat",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#hat",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#hosiery",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#hosiery",
"rdfs:label": [
{
"@value": "Hosiery",
@ -757,11 +757,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#hosiery",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#hosiery",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#jacket",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#jacket",
"rdfs:label": [
{
"@value": "Veste",
@ -832,11 +832,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#jacket",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#jacket",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#jeans",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#jeans",
"rdfs:label": [
{
"@value": "Jeans",
@ -907,11 +907,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#jeans",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#jeans",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#mask",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#mask",
"rdfs:label": [
{
"@value": "Masquer",
@ -982,11 +982,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#mask",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#mask",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#neckwear",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#neckwear",
"rdfs:label": [
{
"@value": "Vêtements de cou",
@ -1057,11 +1057,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#neckwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#neckwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#scarf",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#scarf",
"rdfs:label": [
{
"@value": "Écharpe",
@ -1132,11 +1132,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#neckwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#neckwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#suit",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#suit",
"rdfs:label": [
{
"@value": "Costume",
@ -1207,11 +1207,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#suit",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#suit",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#poncho",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#poncho",
"rdfs:label": [
{
"@value": "Poncho",
@ -1282,11 +1282,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#poncho",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#poncho",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#cloak",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#cloak",
"rdfs:label": [
{
"@value": "Manteau",
@ -1357,11 +1357,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#cloak",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#cloak",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#sari",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#sari",
"rdfs:label": [
{
"@value": "Sari",
@ -1432,11 +1432,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#sash",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#sash",
"rdfs:label": [
{
"@value": "Ceinture",
@ -1507,11 +1507,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#sash",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#sash",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#shawl",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#shawl",
"rdfs:label": [
{
"@value": "Châle",
@ -1582,11 +1582,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#skirt",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#skirt",
"rdfs:label": [
{
"@value": "Jupe",
@ -1657,11 +1657,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#trousers",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#trousers",
"rdfs:label": [
{
"@value": "Pantalon",
@ -1732,11 +1732,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#trousers",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#trousers",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#shorts",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#shorts",
"rdfs:label": [
{
"@value": "Shorts",
@ -1807,11 +1807,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#shorts",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#shorts",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#underwear",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#underwear",
"rdfs:label": [
{
"@value": "Sous-vêtement",
@ -1882,11 +1882,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#underwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#underwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#socks",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#socks",
"rdfs:label": [
{
"@value": "Des chaussettes",
@ -1957,11 +1957,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#footwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#footwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#helmet",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#helmet",
"rdfs:label": [
{
"@value": "Casque",
@ -2032,11 +2032,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#helmet",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#helmet",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#gloves",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#gloves",
"rdfs:label": [
{
"@value": "Gants",
@ -2107,11 +2107,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#gloves",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#gloves",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#kurta",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#kurta",
"rdfs:label": [
{
"@value": "Kurta",
@ -2182,11 +2182,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#kurta",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#kurta",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#sherwani",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#sherwani",
"rdfs:label": [
{
"@value": "Sherwani",
@ -2257,11 +2257,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#mens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#mens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#shalwar-kameez",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#shalwar-kameez",
"rdfs:label": [
{
"@value": "Shalwar Kameez",
@ -2332,11 +2332,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#cheongsam",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#cheongsam",
"rdfs:label": [
{
"@value": "Cheongsam",
@ -2407,11 +2407,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#áo-bà-ba",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#áo-bà-ba",
"rdfs:label": [
{
"@value": "Áo bà ba",
@ -2482,11 +2482,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#áo-bà-ba",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#áo-bà-ba",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#áo-dài",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#áo-dài",
"rdfs:label": [
{
"@value": "Áo dài",
@ -2557,11 +2557,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#áo-dài",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#áo-dài",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#halter-top",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#halter-top",
"rdfs:label": [
{
"@value": "Halter haut",
@ -2632,11 +2632,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#womens",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#womens",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#sandals",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#sandals",
"rdfs:label": [
{
"@value": "Des sandales",
@ -2707,11 +2707,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#footwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#footwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#slippers",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#slippers",
"rdfs:label": [
{
"@value": "Chaussons",
@ -2782,11 +2782,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#footwear",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#footwear",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://clothes/data/clothesTypes.rdf#kilt",
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#kilt",
"rdfs:label": [
{
"@value": "Kilt",
@ -2857,7 +2857,157 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://clothes/data/toolTypes.rdf#kilt",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#kilt",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#apron",
"rdfs:label": [
{
"@value": "Apron",
"@language": "en"
},
{
"@value": "Apron",
"@language": "ar"
},
{
"@value": "Apron",
"@language": "ku"
},
{
"@value": "Apron",
"@language": "es"
},
{
"@value": "Grembiule",
"@language": "it"
},
{
"@value": "Apres",
"@language": "de"
},
{
"@value": "Apron",
"@language": "sw"
},
{
"@value": "Avental",
"@language": "pt"
},
{
"@value": "Apron",
"@language": "oc"
},
{
"@value": "Абон",
"@language": "ru"
},
{
"@value": "Apron",
"@language": "cy"
},
{
"@value": "エプロン",
"@language": "ja"
},
{
"@value": "An tAthrú",
"@language": "ga"
},
{
"@value": "एप्रन",
"@language": "hi"
},
{
"@value": "环境",
"@language": "zh"
},
{
"@value": "Apron",
"@language": "fr"
},
{
"@value": "Apron",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#apron",
"@type": "dfc-p:ProductType"
},
{
"@id": "http://static.datafoodconsortium.org/data/clothesTypes.json#corset",
"rdfs:label": [
{
"@value": "Corset",
"@language": "en"
},
{
"@value": "Corset",
"@language": "ar"
},
{
"@value": "Corset",
"@language": "ku"
},
{
"@value": "Corset",
"@language": "es"
},
{
"@value": "Corse",
"@language": "it"
},
{
"@value": "Korsett",
"@language": "de"
},
{
"@value": "Corset",
"@language": "sw"
},
{
"@value": "Espartilho",
"@language": "pt"
},
{
"@value": "Corset",
"@language": "oc"
},
{
"@value": "Корсет",
"@language": "ru"
},
{
"@value": "Corset",
"@language": "cy"
},
{
"@value": "コルセット",
"@language": "ja"
},
{
"@value": "Sraith",
"@language": "ga"
},
{
"@value": "कोर्सेट",
"@language": "hi"
},
{
"@value": "Cset",
"@language": "zh"
},
{
"@value": "Corset",
"@language": "fr"
},
{
"@value": "Corset",
"@language": "ca"
}
],
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/toolTypes.rdf#corset",
"@type": "dfc-p:ProductType"
}
]

File diff suppressed because it is too large Load Diff

View File

@ -2,16 +2,16 @@
"@context": {
"rdfs": "http://www.w3.org/2000/01/rdf-schema#",
"dfc-b": "http://static.datafoodconsortium.org/ontologies/DFC_BusinessOntology.owl#",
"dfc-p": "http://static.datafoodconsortium.org/ontologies/DFC_ProductOntology.owl#",
"dfc-p": "http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#",
"dfc-t": "http://static.datafoodconsortium.org/ontologies/DFC_TechnicalOntology.owl#",
"dfc-u": "http://static.datafoodconsortium.org/data/units.rdf#",
"dfc-u": "http://static.datafoodconsortium.org/data/units.json#",
"dfc-p:specialize": {
"@type": "@id"
}
},
"@graph": [
{
"@id": "https://medical/data/medicalTypes.rdf#gas-mask",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#gas-mask",
"rdfs:label": [
{
"@value": "Gas Mask",
@ -82,11 +82,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#body-protection",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#body-protection",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#gas-mask-filter",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#gas-mask-filter",
"rdfs:label": [
{
"@value": "Gas Mask Filter",
@ -157,11 +157,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#body-protection",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#body-protection",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#bandages",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"rdfs:label": [
{
"@value": "Bandages",
@ -232,11 +232,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#gauze-wrap",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#gauze-wrap",
"rdfs:label": [
{
"@value": "Gauze Wrap",
@ -307,11 +307,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#gauze-pad",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#gauze-pad",
"rdfs:label": [
{
"@value": "Gauze Pad",
@ -382,11 +382,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#nonstick-pad",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#nonstick-pad",
"rdfs:label": [
{
"@value": "Nonstick Pad",
@ -457,11 +457,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#triangle-bandage",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#triangle-bandage",
"rdfs:label": [
{
"@value": "Triangle Bandage",
@ -532,11 +532,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#wound-closure-strip",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#wound-closure-strip",
"rdfs:label": [
{
"@value": "Wound Closure Strip",
@ -607,11 +607,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#paper-tape",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#paper-tape",
"rdfs:label": [
{
"@value": "Paper Tape",
@ -682,11 +682,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#tape",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#tape",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#plastic-tape",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#plastic-tape",
"rdfs:label": [
{
"@value": "Plastic Tape",
@ -757,11 +757,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#tape",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#tape",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#examination-gloves",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#examination-gloves",
"rdfs:label": [
{
"@value": "Examination Gloves",
@ -832,11 +832,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#gloves",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#gloves",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#stick-on-bandage",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#stick-on-bandage",
"rdfs:label": [
{
"@value": "Stick-on Bandage",
@ -907,11 +907,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#saline-solution",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#saline-solution",
"rdfs:label": [
{
"@value": "Saline Solution",
@ -982,11 +982,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#fluid",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#fluid",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#antibiotic-ointment",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#antibiotic-ointment",
"rdfs:label": [
{
"@value": "Antibiotic Ointment",
@ -1057,11 +1057,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#medicine",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#medicine",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#anti-hemorrhagic-agent",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#anti-hemorrhagic-agent",
"rdfs:label": [
{
"@value": "Anti-hemorrhagic Agent",
@ -1132,11 +1132,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#fluids",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#fluids",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#sunblock",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#sunblock",
"rdfs:label": [
{
"@value": "Sunblock",
@ -1207,11 +1207,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#body-protection",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#body-protection",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#bandage-shears",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandage-shears",
"rdfs:label": [
{
"@value": "Bandage Shears",
@ -1282,11 +1282,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bandages",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandages",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#tweezers",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#tweezers",
"rdfs:label": [
{
"@value": "Tweezers",
@ -1357,11 +1357,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#medical-tools",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#medical-tools",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#protein-bar",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#protein-bar",
"rdfs:label": [
{
"@value": "Protein Bar",
@ -1432,11 +1432,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#energy",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#energy",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#bandanna",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#bandanna",
"rdfs:label": [
{
"@value": "Bandanna",
@ -1507,11 +1507,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#body-protection",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#body-protection",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#water-bottle",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#water-bottle",
"rdfs:label": [
{
"@value": "Water Bottle",
@ -1582,11 +1582,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#fluids",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#fluids",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#ice-pack",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#ice-pack",
"rdfs:label": [
{
"@value": "Ice Pack",
@ -1657,11 +1657,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#anti-inflamatory",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#anti-inflamatory",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#messenger-bag",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#messenger-bag",
"rdfs:label": [
{
"@value": "Messenger Bag",
@ -1732,11 +1732,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#bag",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#bag",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#glucose-tablets",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#glucose-tablets",
"rdfs:label": [
{
"@value": "Glucose tablets",
@ -1807,11 +1807,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#energy",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#energy",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#liquid-antacid-water-mixture",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#liquid-antacid-water-mixture",
"rdfs:label": [
{
"@value": "Liquid Antacid Water Mixture",
@ -1882,11 +1882,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#fluids",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#fluids",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#re-hydration-mixture",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#re-hydration-mixture",
"rdfs:label": [
{
"@value": "Re-hydration Mixture",
@ -1957,11 +1957,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#fluids",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#fluids",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#ear-plugs",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#ear-plugs",
"rdfs:label": [
{
"@value": "Ear Plugs",
@ -2032,11 +2032,11 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#body-protection",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#body-protection",
"@type": "dfc-p:ProductType"
},
{
"@id": "https://medical/data/medicalTypes.rdf#cpr-mask",
"@id": "http://static.datafoodconsortium.org/data/medicalTypes.json#cpr-mask",
"rdfs:label": [
{
"@value": "CPR Mask",
@ -2107,7 +2107,7 @@
"@language": "ca"
}
],
"dfc-p:specialize": "https://medical/data/medicalTypes.rdf#mask",
"dfc-p:specialize": "http://static.datafoodconsortium.org/data/medicalTypes.json#mask",
"@type": "dfc-p:ProductType"
}
]

View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8" ?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:foaf="http://xmlns.com/foaf/0.1/"
xmlns:owl="http://www.w3.org/2002/07/owl#">
<foaf:Person rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#simonL">
<foaf:familyName xml:lang="fr">Louvet</foaf:familyName>
<foaf:firstName xml:lang="fr">Simon</foaf:firstName>
<owl:sameAs rdf:resource="https://orcid.org/0000-0002-3528-6577"/>
</foaf:Person>
<foaf:Person rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#rachelA">
<foaf:familyName xml:lang="fr">Arnould</foaf:familyName>
<foaf:firstName xml:lang="fr">Rachel</foaf:firstName>
</foaf:Person>
<foaf:Person rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#bernardC">
<foaf:familyName xml:lang="fr">Chabot</foaf:familyName>
<foaf:firstName xml:lang="fr">Bernard</foaf:firstName>
</foaf:Person>
<foaf:Organization rdf:about="http://static.datafoodconsortium.org/data/publication.rdf#dataFoodConsortium">
<foaf:homepage rdf:resource="http://static.datafoodconsortium.org/"/>
<foaf:name xml:lang="fr">Data Food Consortium</foaf:name>
</foaf:Organization>
</rdf:RDF>

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"@context":{
"dfc-p": "http://static.datafoodconsortium.org/ontologies/dfc_ProductGlossary.owl#",
"dfc-p": "http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#",
"dfc-u":"http://static.datafoodconsortium.org/data/units.rdf#"
},
"@graph":[

23
ontology/units.rdf 100644
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8" ?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:dfc-u="http://static.datafoodconsortium.org/data/units.rdf#"
xmlns:dfc-b="http://static.datafoodconsortium.org/ontologies/DFC_ProductGlossary.owl#">
<dfc-b:Unit rdf:about="http://static.datafoodconsortium.org/data/unit.rdf#kg">
<rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">kilogramme</rdfs:label>
</dfc-b:Unit>
<dfc-b:Unit rdf:about="http://static.datafoodconsortium.org/data/unit.rdf#u">
<rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">unité</rdfs:label>
</dfc-b:Unit>
<dfc-b:Unit rdf:about="http://static.datafoodconsortium.org/data/unit.rdf#g">
<rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">gramme</rdfs:label>
</dfc-b:Unit>
<dfc-b:Unit rdf:about="http://static.datafoodconsortium.org/data/unit.rdf#l">
<rdfs:label rdf:datatype="http://www.w3.org/2001/XMLSchema#string">litre</rdfs:label>
</dfc-b:Unit>
</rdf:RDF>

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Timeline"
@ -34,12 +34,14 @@ from blocking import outboxUndoBlock
from blocking import outboxMute
from blocking import outboxUndoMute
from media import replaceYouTube
from media import replaceTwitter
from media import getMediaPath
from media import createMediaDirs
from inbox import inboxUpdateIndex
from announce import outboxAnnounce
from announce import outboxUndoAnnounce
from follow import outboxUndoFollow
from follow import followerApprovalActive
from skills import outboxSkills
from availability import outboxAvailability
from like import outboxLike
@ -49,6 +51,7 @@ from bookmarks import outboxUndoBookmark
from delete import outboxDelete
from shares import outboxShareUpload
from shares import outboxUndoShareUpload
from webapp_post import individualPostAsHtml
def _outboxPersonReceiveUpdate(recentPostsCache: {},
@ -189,12 +192,17 @@ def postMessageToOutbox(session, translate: {},
personCache: {}, allowDeletion: bool,
proxyType: str, version: str, debug: bool,
YTReplacementDomain: str,
twitterReplacementDomain: str,
showPublishedDateOnly: bool,
allowLocalNetworkAccess: bool,
city: str, systemLanguage: str,
sharedItemsFederatedDomains: [],
sharedItemFederationTokens: {},
lowBandwidth: bool) -> bool:
lowBandwidth: bool,
signingPrivateKeyPem: str,
peertubeInstances: str, theme: str,
maxLikeCount: int,
maxRecentPosts: int) -> bool:
"""post is received by the outbox
Client to server message post
https://www.w3.org/TR/activitypub/#client-to-server-outbox-delivery
@ -281,6 +289,9 @@ def postMessageToOutbox(session, translate: {},
return False
# replace youtube, so that google gets less tracking data
replaceYouTube(messageJson, YTReplacementDomain, systemLanguage)
# replace twitter, so that twitter posts can be shown without
# having a twitter account
replaceTwitter(messageJson, twitterReplacementDomain, systemLanguage)
# https://www.w3.org/TR/activitypub/#create-activity-outbox
messageJson['object']['attributedTo'] = messageJson['actor']
if messageJson['object'].get('attachment'):
@ -318,7 +329,7 @@ def postMessageToOutbox(session, translate: {},
# generate a path for the uploaded image
mPath = getMediaPath()
mediaPath = mPath + '/' + \
createPassword(32) + '.' + fileExtension
createPassword(16).lower() + '.' + fileExtension
createMediaDirs(baseDir, mPath)
mediaFilename = baseDir + '/' + mediaPath
# move the uploaded image to its new path
@ -384,7 +395,10 @@ def postMessageToOutbox(session, translate: {},
baseDir + '/accounts/' + \
postToNickname + '@' + domain + '/.citations.txt'
if os.path.isfile(citationsFilename):
os.remove(citationsFilename)
try:
os.remove(citationsFilename)
except BaseException:
pass
# The following activity types get added to the index files
indexedActivities = (
@ -404,10 +418,13 @@ def postMessageToOutbox(session, translate: {},
if isImageMedia(session, baseDir, httpPrefix,
postToNickname, domain,
messageJson,
translate, YTReplacementDomain,
translate,
YTReplacementDomain,
twitterReplacementDomain,
allowLocalNetworkAccess,
recentPostsCache, debug, systemLanguage,
domainFull, personCache):
domainFull, personCache,
signingPrivateKeyPem):
inboxUpdateIndex('tlmedia', baseDir,
postToNickname + '@' + domain,
savedFilename, debug)
@ -423,6 +440,37 @@ def postMessageToOutbox(session, translate: {},
inboxUpdateIndex(boxNameIndex, baseDir,
postToNickname + '@' + domain,
savedFilename, debug)
# regenerate the html
useCacheOnly = False
pageNumber = 1
showIndividualPostIcons = True
manuallyApproveFollowers = \
followerApprovalActive(baseDir, postToNickname, domain)
individualPostAsHtml(signingPrivateKeyPem,
False, recentPostsCache,
maxRecentPosts,
translate, pageNumber,
baseDir, session,
cachedWebfingers,
personCache,
postToNickname, domain, port,
messageJson, None, True,
allowDeletion,
httpPrefix, __version__,
boxNameIndex,
YTReplacementDomain,
twitterReplacementDomain,
showPublishedDateOnly,
peertubeInstances,
allowLocalNetworkAccess,
theme, systemLanguage,
maxLikeCount,
boxNameIndex != 'dm',
showIndividualPostIcons,
manuallyApproveFollowers,
False, True, useCacheOnly)
if outboxAnnounce(recentPostsCache,
baseDir, messageJson, debug):
if debug:
@ -468,7 +516,8 @@ def postMessageToOutbox(session, translate: {},
messageJson, debug,
version,
sharedItemsFederatedDomains,
sharedItemFederationTokens)
sharedItemFederationTokens,
signingPrivateKeyPem)
followersThreads.append(followersThread)
if debug:
@ -592,5 +641,6 @@ def postMessageToOutbox(session, translate: {},
messageJson, debug,
version,
sharedItemsFederatedDomains,
sharedItemFederationTokens)
sharedItemFederationTokens,
signingPrivateKeyPem)
return True

256
person.py
View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"
@ -37,6 +37,7 @@ from roles import setRole
from roles import setRolesFromList
from roles import getActorRolesList
from media import processMetaData
from utils import replaceUsersWithAt
from utils import removeLineEndings
from utils import removeDomainPort
from utils import getStatusNumber
@ -55,6 +56,7 @@ from utils import acctDir
from utils import getUserPaths
from utils import getGroupPaths
from utils import localActorUrl
from utils import dangerousSVG
from session import createSession
from session import getJson
from webfinger import webfingerHandle
@ -185,6 +187,117 @@ def randomizeActorImages(personJson: {}) -> None:
'/image' + randStr + '.' + existingExtension
def getActorUpdateJson(actorJson: {}) -> {}:
"""Returns the json for an Person Update
"""
pubNumber, _ = getStatusNumber()
manuallyApprovesFollowers = actorJson['manuallyApprovesFollowers']
return {
'@context': [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"toot": "http://joinmastodon.org/ns#",
"featured":
{
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags":
{
"@id": "toot:featuredTags",
"@type": "@id"
},
"alsoKnownAs":
{
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"movedTo":
{
"@id": "as:movedTo",
"@type": "@id"
},
"schema": "http://schema.org#",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"IdentityProof": "toot:IdentityProof",
"discoverable": "toot:discoverable",
"Device": "toot:Device",
"Ed25519Signature": "toot:Ed25519Signature",
"Ed25519Key": "toot:Ed25519Key",
"Curve25519Key": "toot:Curve25519Key",
"EncryptedMessage": "toot:EncryptedMessage",
"publicKeyBase64": "toot:publicKeyBase64",
"deviceId": "toot:deviceId",
"claim":
{
"@type": "@id",
"@id": "toot:claim"
},
"fingerprintKey":
{
"@type": "@id",
"@id": "toot:fingerprintKey"
},
"identityKey":
{
"@type": "@id",
"@id": "toot:identityKey"
},
"devices":
{
"@type": "@id",
"@id": "toot:devices"
},
"messageFranking": "toot:messageFranking",
"messageType": "toot:messageType",
"cipherText": "toot:cipherText",
"suspended": "toot:suspended",
"focalPoint":
{
"@container": "@list",
"@id": "toot:focalPoint"
}
}
],
'id': actorJson['id'] + '#updates/' + pubNumber,
'type': 'Update',
'actor': actorJson['id'],
'to': ['https://www.w3.org/ns/activitystreams#Public'],
'cc': [actorJson['id'] + '/followers'],
'object': {
'id': actorJson['id'],
'type': actorJson['type'],
'icon': {
'type': 'Image',
'url': actorJson['icon']['url']
},
'image': {
'type': 'Image',
'url': actorJson['image']['url']
},
'attachment': actorJson['attachment'],
'following': actorJson['id'] + '/following',
'followers': actorJson['id'] + '/followers',
'inbox': actorJson['id'] + '/inbox',
'outbox': actorJson['id'] + '/outbox',
'featured': actorJson['id'] + '/collections/featured',
'featuredTags': actorJson['id'] + '/collections/tags',
'preferredUsername': actorJson['preferredUsername'],
'name': actorJson['name'],
'summary': actorJson['summary'],
'url': actorJson['url'],
'manuallyApprovesFollowers': manuallyApprovesFollowers,
'discoverable': actorJson['discoverable'],
'published': actorJson['published'],
'devices': actorJson['devices'],
"publicKey": actorJson['publicKey'],
}
}
def getDefaultPersonContext() -> str:
"""Gets the default actor context
"""
@ -702,7 +815,7 @@ def personUpgradeActor(baseDir: str, personJson: {},
# update domain/@nickname in actors cache
actorCacheFilename = \
baseDir + '/accounts/cache/actors/' + \
personJson['id'].replace('/users/', '/@').replace('/', '#') + \
replaceUsersWithAt(personJson['id']).replace('/', '#') + \
'.json'
if os.path.isfile(actorCacheFilename):
saveJson(personJson, actorCacheFilename)
@ -717,7 +830,7 @@ def personLookup(domain: str, path: str, baseDir: str) -> {}:
isSharedInbox = False
if path == '/inbox' or path == '/users/inbox' or path == '/sharedInbox':
# shared inbox actor on @domain@domain
path = '/users/' + domain
path = '/users/inbox'
isSharedInbox = True
else:
notPersonLookup = ('/inbox', '/outbox', '/outboxarchive',
@ -741,7 +854,8 @@ def personLookup(domain: str, path: str, baseDir: str) -> {}:
if not os.path.isfile(filename):
return None
personJson = loadJson(filename)
personUpgradeActor(baseDir, personJson, handle, filename)
if not isSharedInbox:
personUpgradeActor(baseDir, personJson, handle, filename)
# if not personJson:
# personJson={"user": "unknown"}
return personJson
@ -917,10 +1031,16 @@ def suspendAccount(baseDir: str, nickname: str, domain: str) -> None:
saltFilename = acctDir(baseDir, nickname, domain) + '/.salt'
if os.path.isfile(saltFilename):
os.remove(saltFilename)
try:
os.remove(saltFilename)
except BaseException:
pass
tokenFilename = acctDir(baseDir, nickname, domain) + '/.token'
if os.path.isfile(tokenFilename):
os.remove(tokenFilename)
try:
os.remove(tokenFilename)
except BaseException:
pass
suspendedFilename = baseDir + '/accounts/suspended.txt'
if os.path.isfile(suspendedFilename):
@ -1023,17 +1143,32 @@ def removeAccount(baseDir: str, nickname: str,
if os.path.isdir(baseDir + '/accounts/' + handle):
shutil.rmtree(baseDir + '/accounts/' + handle)
if os.path.isfile(baseDir + '/accounts/' + handle + '.json'):
os.remove(baseDir + '/accounts/' + handle + '.json')
try:
os.remove(baseDir + '/accounts/' + handle + '.json')
except BaseException:
pass
if os.path.isfile(baseDir + '/wfendpoints/' + handle + '.json'):
os.remove(baseDir + '/wfendpoints/' + handle + '.json')
try:
os.remove(baseDir + '/wfendpoints/' + handle + '.json')
except BaseException:
pass
if os.path.isfile(baseDir + '/keys/private/' + handle + '.key'):
os.remove(baseDir + '/keys/private/' + handle + '.key')
try:
os.remove(baseDir + '/keys/private/' + handle + '.key')
except BaseException:
pass
if os.path.isfile(baseDir + '/keys/public/' + handle + '.pem'):
os.remove(baseDir + '/keys/public/' + handle + '.pem')
try:
os.remove(baseDir + '/keys/public/' + handle + '.pem')
except BaseException:
pass
if os.path.isdir(baseDir + '/sharefiles/' + nickname):
shutil.rmtree(baseDir + '/sharefiles/' + nickname)
if os.path.isfile(baseDir + '/wfdeactivated/' + handle + '.json'):
os.remove(baseDir + '/wfdeactivated/' + handle + '.json')
try:
os.remove(baseDir + '/wfdeactivated/' + handle + '.json')
except BaseException:
pass
if os.path.isdir(baseDir + '/sharefilesdeactivated/' + nickname):
shutil.rmtree(baseDir + '/sharefilesdeactivated/' + nickname)
@ -1215,7 +1350,8 @@ def _detectUsersPath(url: str) -> str:
def getActorJson(hostDomain: str, handle: str, http: bool, gnunet: bool,
debug: bool, quiet: bool = False) -> ({}, {}):
debug: bool, quiet: bool,
signingPrivateKeyPem: str) -> ({}, {}):
"""Returns the actor json
"""
if debug:
@ -1302,52 +1438,68 @@ def getActorJson(hostDomain: str, handle: str, http: bool, gnunet: bool,
if nickname == 'inbox':
nickname = domain
handle = nickname + '@' + domain
wfRequest = webfingerHandle(session, handle,
httpPrefix, cachedWebfingers,
None, __version__, debug,
groupAccount)
if not wfRequest:
if not quiet:
print('getActorJson Unable to webfinger ' + handle)
return None, None
if not isinstance(wfRequest, dict):
if not quiet:
print('getActorJson Webfinger for ' + handle +
' did not return a dict. ' + str(wfRequest))
return None, None
if not quiet:
pprint(wfRequest)
personUrl = None
if wfRequest.get('errors'):
if not quiet or debug:
print('getActorJson wfRequest error: ' +
str(wfRequest['errors']))
if hasUsersPath(handle):
personUrl = originalActor
else:
if debug:
print('No users path in ' + handle)
wfRequest = None
if '://' in originalActor and \
originalActor.lower().endswith('/actor'):
if debug:
print(originalActor + ' is an instance actor')
personUrl = originalActor
elif '://' in originalActor and groupAccount:
if debug:
print(originalActor + ' is a group actor')
personUrl = originalActor
else:
handle = nickname + '@' + domain
wfRequest = webfingerHandle(session, handle,
httpPrefix, cachedWebfingers,
hostDomain, __version__, debug,
groupAccount, signingPrivateKeyPem)
if not wfRequest:
if not quiet:
print('getActorJson Unable to webfinger ' + handle)
return None, None
if not isinstance(wfRequest, dict):
if not quiet:
print('getActorJson Webfinger for ' + handle +
' did not return a dict. ' + str(wfRequest))
return None, None
if not quiet:
pprint(wfRequest)
if wfRequest.get('errors'):
if not quiet or debug:
print('getActorJson wfRequest error: ' +
str(wfRequest['errors']))
if hasUsersPath(handle):
personUrl = originalActor
else:
if debug:
print('No users path in ' + handle)
return None, None
profileStr = 'https://www.w3.org/ns/activitystreams'
headersList = (
"activity+json", "ld+json", "jrd+json"
)
if not personUrl:
if not personUrl and wfRequest:
personUrl = getUserUrl(wfRequest, 0, debug)
if nickname == domain:
paths = getUserPaths()
for userPath in paths:
personUrl = personUrl.replace(userPath, '/actor/')
if not personUrl and groupAccount:
personUrl = httpPrefix + '://' + domain + '/c/' + nickname
if not personUrl:
# try single user instance
personUrl = httpPrefix + '://' + domain + '/' + nickname
headersList = (
"ld+json", "jrd+json", "activity+json"
)
if debug:
print('Trying single user instance ' + personUrl)
if '/channel/' in personUrl or '/accounts/' in personUrl:
headersList = (
"ld+json", "jrd+json", "activity+json"
@ -1360,7 +1512,7 @@ def getActorJson(hostDomain: str, handle: str, http: bool, gnunet: bool,
'Accept': headerMimeType + '; profile="' + profileStr + '"'
}
personJson = \
getJson(session, personUrl, asHeader, None,
getJson(signingPrivateKeyPem, session, personUrl, asHeader, None,
debug, __version__, httpPrefix, hostDomain, 20, quiet)
if personJson:
if not quiet:
@ -1386,12 +1538,24 @@ def getPersonAvatarUrl(baseDir: str, personUrl: str, personCache: {},
imageExtension = getImageExtensions()
for ext in imageExtension:
if os.path.isfile(avatarImagePath + '.' + ext):
return '/avatars/' + actorStr + '.' + ext
elif os.path.isfile(avatarImagePath.lower() + '.' + ext):
return '/avatars/' + actorStr.lower() + '.' + ext
imFilename = avatarImagePath + '.' + ext
imPath = '/avatars/' + actorStr + '.' + ext
if not os.path.isfile(imFilename):
imFilename = avatarImagePath.lower() + '.' + ext
imPath = '/avatars/' + actorStr.lower() + '.' + ext
if not os.path.isfile(imFilename):
continue
if ext != 'svg':
return imPath
else:
content = ''
with open(imFilename, 'r') as fp:
content = fp.read()
if not dangerousSVG(content, False):
return imPath
if personJson.get('icon'):
if personJson['icon'].get('url'):
return personJson['icon']['url']
if '.svg' not in personJson['icon']['url'].lower():
return personJson['icon']['url']
return None

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"

44
pgp.py
View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"
@ -16,6 +16,7 @@ from utils import isPGPEncrypted
from utils import getFullDomain
from utils import getStatusNumber
from utils import localActorUrl
from utils import replaceUsersWithAt
from webfinger import webfingerHandle
from posts import getPersonBox
from auth import createBasicAuthHeader
@ -333,14 +334,16 @@ def _pgpEncrypt(content: str, recipientPubKey: str) -> str:
return encryptResult
def _getPGPPublicKeyFromActor(domain: str, handle: str,
def _getPGPPublicKeyFromActor(signingPrivateKeyPem: str,
domain: str, handle: str,
actorJson: {} = None) -> str:
"""Searches tags on the actor to see if there is any PGP
public key specified
"""
if not actorJson:
actorJson, asHeader = \
getActorJson(domain, handle, False, False, False, True)
getActorJson(domain, handle, False, False, False, True,
signingPrivateKeyPem)
if not actorJson:
return None
if not actorJson.get('attachment'):
@ -372,18 +375,21 @@ def hasLocalPGPkey() -> bool:
return False
def pgpEncryptToActor(domain: str, content: str, toHandle: str) -> str:
def pgpEncryptToActor(domain: str, content: str, toHandle: str,
signingPrivateKeyPem: str) -> str:
"""PGP encrypt a message to the given actor or handle
"""
# get the actor and extract the pgp public key from it
recipientPubKey = _getPGPPublicKeyFromActor(domain, toHandle)
recipientPubKey = \
_getPGPPublicKeyFromActor(signingPrivateKeyPem, domain, toHandle)
if not recipientPubKey:
return None
# encrypt using the recipient public key
return _pgpEncrypt(content, recipientPubKey)
def pgpDecrypt(domain: str, content: str, fromHandle: str) -> str:
def pgpDecrypt(domain: str, content: str, fromHandle: str,
signingPrivateKeyPem: str) -> str:
""" Encrypt using your default pgp key to the given recipient
fromHandle can be a handle or actor url
"""
@ -394,7 +400,9 @@ def pgpDecrypt(domain: str, content: str, fromHandle: str) -> str:
if containsPGPPublicKey(content):
pubKey = extractPGPPublicKey(content)
else:
pubKey = _getPGPPublicKeyFromActor(domain, content, fromHandle)
pubKey = \
_getPGPPublicKeyFromActor(signingPrivateKeyPem,
domain, content, fromHandle)
if pubKey:
_pgpImportPubKey(pubKey)
@ -449,7 +457,8 @@ def pgpPublicKeyUpload(baseDir: str, session,
domain: str, port: int,
httpPrefix: str,
cachedWebfingers: {}, personCache: {},
debug: bool, test: str) -> {}:
debug: bool, test: str,
signingPrivateKeyPem: str) -> {}:
if debug:
print('pgpPublicKeyUpload')
@ -481,7 +490,8 @@ def pgpPublicKeyUpload(baseDir: str, session,
print('Getting actor for ' + handle)
actorJson, asHeader = \
getActorJson(domain, handle, False, False, debug, True)
getActorJson(domainFull, handle, False, False, debug, True,
signingPrivateKeyPem)
if not actorJson:
if debug:
print('No actor returned for ' + handle)
@ -491,7 +501,7 @@ def pgpPublicKeyUpload(baseDir: str, session,
print('Actor for ' + handle + ' obtained')
actor = localActorUrl(httpPrefix, nickname, domainFull)
handle = actor.replace('/users/', '/@')
handle = replaceUsersWithAt(actor)
# check that this looks like the correct actor
if not actorJson.get('id'):
@ -548,7 +558,8 @@ def pgpPublicKeyUpload(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
domain, __version__, debug, False)
domain, __version__, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: pgp actor update webfinger failed for ' +
@ -563,11 +574,12 @@ def pgpPublicKeyUpload(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox, avatarUrl,
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
__version__, httpPrefix, nickname,
domain, postToBox, 52025)
originDomain = domain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem, originDomain,
baseDir, session, wfRequest, personCache,
__version__, httpPrefix, nickname,
domain, postToBox, 35725)
if not inboxUrl:
if debug:

890
posts.py

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "ActivityPub"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Calendar"
@ -46,7 +46,10 @@ def _updatePostSchedule(baseDir: str, handle: str, httpd,
if deleteSchedulePost:
# delete extraneous scheduled posts
if os.path.isfile(postFilename):
os.remove(postFilename)
try:
os.remove(postFilename)
except BaseException:
pass
continue
# create the new index file
indexLines.append(line)
@ -110,14 +113,23 @@ def _updatePostSchedule(baseDir: str, handle: str, httpd,
httpd.projectVersion,
httpd.debug,
httpd.YTReplacementDomain,
httpd.twitterReplacementDomain,
httpd.showPublishedDateOnly,
httpd.allowLocalNetworkAccess,
httpd.city, httpd.systemLanguage,
httpd.sharedItemsFederatedDomains,
httpd.sharedItemFederationTokens,
httpd.lowBandwidth):
httpd.lowBandwidth,
httpd.signingPrivateKeyPem,
httpd.peertubeInstances,
httpd.themeName,
httpd.maxLikeCount,
httpd.maxRecentPosts):
indexLines.remove(line)
os.remove(postFilename)
try:
os.remove(postFilename)
except BaseException:
pass
continue
# move to the outbox
@ -185,7 +197,10 @@ def removeScheduledPosts(baseDir: str, nickname: str, domain: str) -> None:
scheduleIndexFilename = \
acctDir(baseDir, nickname, domain) + '/schedule.index'
if os.path.isfile(scheduleIndexFilename):
os.remove(scheduleIndexFilename)
try:
os.remove(scheduleIndexFilename)
except BaseException:
pass
# remove the scheduled posts
scheduledDir = acctDir(baseDir, nickname, domain) + '/scheduled'
if not os.path.isdir(scheduledDir):
@ -194,6 +209,9 @@ def removeScheduledPosts(baseDir: str, nickname: str, domain: str) -> None:
filePath = os.path.join(scheduledDir, scheduledPostFilename)
try:
if os.path.isfile(filePath):
os.remove(filePath)
try:
os.remove(filePath)
except BaseException:
pass
except BaseException:
pass

View File

@ -11,7 +11,7 @@
# License
# =======
#
# Copyright (C) 2020-2021 Bob Mottram <bob@freedombone.net>
# Copyright (C) 2020-2021 Bob Mottram <bob@libreserver.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by

View File

@ -3,14 +3,15 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"
__module_group__ = "Session"
import os
import requests
from utils import urlPermitted
from utils import isImageFile
from httpsig import createSignedHeader
import json
from socket import error as SocketError
import errno
@ -84,43 +85,23 @@ def urlExists(session, url: str, timeoutSec: int = 3,
return False
def getJson(session, url: str, headers: {}, params: {}, debug: bool,
version: str = '1.2.0', httpPrefix: str = 'https',
domain: str = 'testdomain',
timeoutSec: int = 20, quiet: bool = False) -> {}:
if not isinstance(url, str):
if debug and not quiet:
print('url: ' + str(url))
print('ERROR: getJson failed, url should be a string')
return None
sessionParams = {}
sessionHeaders = {}
if headers:
sessionHeaders = headers
if params:
sessionParams = params
sessionHeaders['User-Agent'] = 'Epicyon/' + version
if domain:
sessionHeaders['User-Agent'] += \
'; +' + httpPrefix + '://' + domain + '/'
if not session:
if not quiet:
print('WARN: getJson failed, no session specified for getJson')
return None
if debug:
HTTPConnection.debuglevel = 1
def _getJsonRequest(session, url: str, domainFull: str, sessionHeaders: {},
sessionParams: {}, timeoutSec: int,
signingPrivateKeyPem: str, quiet: bool, debug: bool) -> {}:
"""http GET for json
"""
try:
result = session.get(url, headers=sessionHeaders,
params=sessionParams, timeout=timeoutSec)
if result.status_code != 200:
if result.status_code == 401:
print('WARN: getJson Unauthorized url: ' + url)
print("WARN: getJson " + url + ' rejected by secure mode')
elif result.status_code == 403:
print('WARN: getJson Forbidden url: ' + url)
elif result.status_code == 404:
print('WARN: getJson Not Found url: ' + url)
elif result.status_code == 410:
print('WARN: getJson no longer available url: ' + url)
else:
print('WARN: getJson url: ' + url +
' failed with error code ' +
@ -151,6 +132,115 @@ def getJson(session, url: str, headers: {}, params: {}, debug: bool,
return None
def _getJsonSigned(session, url: str, domainFull: str, sessionHeaders: {},
sessionParams: {}, timeoutSec: int,
signingPrivateKeyPem: str, quiet: bool, debug: bool) -> {}:
"""Authorized fetch - a signed version of GET
"""
if not domainFull:
if debug:
print('No sending domain for signed GET')
return None
if '://' not in url:
print('Invalid url: ' + url)
return None
httpPrefix = url.split('://')[0]
toDomainFull = url.split('://')[1]
if '/' in toDomainFull:
toDomainFull = toDomainFull.split('/')[0]
if ':' in domainFull:
domain = domainFull.split(':')[0]
port = domainFull.split(':')[1]
else:
domain = domainFull
if httpPrefix == 'https':
port = 443
else:
port = 80
if ':' in toDomainFull:
toDomain = toDomainFull.split(':')[0]
toPort = toDomainFull.split(':')[1]
else:
toDomain = toDomainFull
if httpPrefix == 'https':
toPort = 443
else:
toPort = 80
if debug:
print('Signed GET domain: ' + domain + ' ' + str(port))
print('Signed GET toDomain: ' + toDomain + ' ' + str(toPort))
print('Signed GET url: ' + url)
print('Signed GET httpPrefix: ' + httpPrefix)
messageStr = ''
withDigest = False
if toDomainFull + '/' in url:
path = '/' + url.split(toDomainFull + '/')[1]
else:
path = '/actor'
contentType = 'application/activity+json'
if sessionHeaders.get('Accept'):
contentType = sessionHeaders['Accept']
signatureHeaderJson = \
createSignedHeader(None, signingPrivateKeyPem, 'actor', domain, port,
toDomain, toPort, path, httpPrefix, withDigest,
messageStr, contentType)
if debug:
print('Signed GET signatureHeaderJson ' + str(signatureHeaderJson))
# update the session headers from the signature headers
sessionHeaders['Host'] = signatureHeaderJson['host']
sessionHeaders['Date'] = signatureHeaderJson['date']
sessionHeaders['Accept'] = signatureHeaderJson['accept']
sessionHeaders['Signature'] = signatureHeaderJson['signature']
sessionHeaders['Content-Length'] = '0'
# if debug:
print('Signed GET sessionHeaders ' + str(sessionHeaders))
return _getJsonRequest(session, url, domainFull, sessionHeaders,
sessionParams, timeoutSec, None, quiet, debug)
def getJson(signingPrivateKeyPem: str,
session, url: str, headers: {}, params: {}, debug: bool,
version: str = '1.2.0', httpPrefix: str = 'https',
domain: str = 'testdomain',
timeoutSec: int = 20, quiet: bool = False) -> {}:
if not isinstance(url, str):
if debug and not quiet:
print('url: ' + str(url))
print('ERROR: getJson failed, url should be a string')
return None
sessionParams = {}
sessionHeaders = {}
if headers:
sessionHeaders = headers
if params:
sessionParams = params
sessionHeaders['User-Agent'] = 'Epicyon/' + version
if domain:
sessionHeaders['User-Agent'] += \
'; +' + httpPrefix + '://' + domain + '/'
if not session:
if not quiet:
print('WARN: getJson failed, no session specified for getJson')
return None
if debug:
HTTPConnection.debuglevel = 1
if signingPrivateKeyPem:
return _getJsonSigned(session, url, domain,
sessionHeaders, sessionParams,
timeoutSec, signingPrivateKeyPem,
quiet, debug)
else:
return _getJsonRequest(session, url, domain, sessionHeaders,
sessionParams, timeoutSec,
None, quiet, debug)
def postJson(httpPrefix: str, domainFull: str,
session, postJsonObject: {}, federationList: [],
inboxUrl: str, headers: {}, timeoutSec: int = 60,

View File

@ -2,9 +2,9 @@
name = epicyon
version = 1.3.0
author = Bob Mottram
author_email = bob@freedombone.net
author_email = bob@libreserver.org
maintainer = Bob Mottram
maintainer_email = bob@freedombone.net
maintainer_email = bob@libreserver.org
description = A modern ActivityPub compliant server implementing both S2S and C2S protocols.
long_description = file: README.md
long_description_content_type = text/markdown

174
shares.py
View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Timeline"
@ -47,7 +47,8 @@ from blocking import isBlocked
def _loadDfcIds(baseDir: str, systemLanguage: str,
productType: str) -> {}:
productType: str,
httpPrefix: str, domainFull: str) -> {}:
"""Loads the product types ontology
This is used to add an id to shared items
"""
@ -92,7 +93,10 @@ def _loadDfcIds(baseDir: str, systemLanguage: str,
if not label.get('@value'):
continue
if label['@language'] == systemLanguage:
dfcIds[label['@value'].lower()] = item['@id']
itemId = \
item['@id'].replace('http://static.datafoodconsortium.org',
httpPrefix + '://' + domainFull)
dfcIds[label['@value'].lower()] = itemId
break
return dfcIds
@ -142,7 +146,10 @@ def removeSharedItem(baseDir: str, nickname: str, domain: str,
for ext in formats:
if sharesJson[itemID]['imageUrl'].endswith('.' + ext):
if os.path.isfile(itemIDfile + '.' + ext):
os.remove(itemIDfile + '.' + ext)
try:
os.remove(itemIDfile + '.' + ext)
except BaseException:
pass
# remove the item itself
del sharesJson[itemID]
saveJson(sharesJson, sharesFilename)
@ -193,7 +200,9 @@ def _dfcProductTypeFromCategory(baseDir: str,
def _getshareDfcId(baseDir: str, systemLanguage: str,
itemType: str, itemCategory: str,
translate: {}, dfcIds: {} = None) -> str:
translate: {},
httpPrefix: str, domainFull: str,
dfcIds: {} = None) -> str:
"""Attempts to obtain a DFC Id for the shared item,
based upon productTypes ontology.
See https://github.com/datafoodconsortium/ontology
@ -207,7 +216,8 @@ def _getshareDfcId(baseDir: str, systemLanguage: str,
itemType = itemType.replace('.', '')
return 'epicyon#' + itemType
if not dfcIds:
dfcIds = _loadDfcIds(baseDir, systemLanguage, matchedProductType)
dfcIds = _loadDfcIds(baseDir, systemLanguage, matchedProductType,
httpPrefix, domainFull)
if not dfcIds:
return ''
itemTypeLower = itemType.lower()
@ -316,7 +326,8 @@ def addShare(baseDir: str,
actor = localActorUrl(httpPrefix, nickname, domainFull)
itemID = _getValidSharedItemID(actor, displayName)
dfcId = _getshareDfcId(baseDir, systemLanguage,
itemType, itemCategory, translate)
itemType, itemCategory, translate,
httpPrefix, domainFull)
# has an image for this share been uploaded?
imageUrl = None
@ -350,7 +361,10 @@ def addShare(baseDir: str,
imageFilename, itemIDfile + '.' + ext,
city)
if moveImage:
os.remove(imageFilename)
try:
os.remove(imageFilename)
except BaseException:
pass
imageUrl = \
httpPrefix + '://' + domainFull + \
'/sharefiles/' + nickname + '/' + itemID + '.' + ext
@ -419,7 +433,10 @@ def _expireSharesForAccount(baseDir: str, nickname: str, domain: str,
formats = getImageExtensions()
for ext in formats:
if os.path.isfile(itemIDfile + '.' + ext):
os.remove(itemIDfile + '.' + ext)
try:
os.remove(itemIDfile + '.' + ext)
except BaseException:
pass
saveJson(sharesJson, sharesFilename)
@ -535,7 +552,8 @@ def sendShareViaServer(baseDir, session,
location: str, duration: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str,
itemPrice: str, itemCurrency: str) -> {}:
itemPrice: str, itemCurrency: str,
signingPrivateKeyPem: str) -> {}:
"""Creates an item share via c2s
"""
if not session:
@ -585,7 +603,8 @@ def sendShareViaServer(baseDir, session,
wfRequest = \
webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: share webfinger failed for ' + handle)
@ -598,13 +617,15 @@ def sendShareViaServer(baseDir, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
83653)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
83653)
if not inboxUrl:
if debug:
@ -652,7 +673,8 @@ def sendUndoShareViaServer(baseDir: str, session,
fromDomain: str, fromPort: int,
httpPrefix: str, displayName: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Undoes a share via c2s
"""
if not session:
@ -685,7 +707,8 @@ def sendUndoShareViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: unshare webfinger failed for ' + handle)
@ -698,13 +721,15 @@ def sendUndoShareViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
12663)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
12663)
if not inboxUrl:
if debug:
@ -747,7 +772,8 @@ def sendWantedViaServer(baseDir, session,
location: str, duration: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str,
itemMaxPrice: str, itemCurrency: str) -> {}:
itemMaxPrice: str, itemCurrency: str,
signingPrivateKeyPem: str) -> {}:
"""Creates a wanted item via c2s
"""
if not session:
@ -797,7 +823,8 @@ def sendWantedViaServer(baseDir, session,
wfRequest = \
webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: share webfinger failed for ' + handle)
@ -810,13 +837,15 @@ def sendWantedViaServer(baseDir, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
83653)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
23653)
if not inboxUrl:
if debug:
@ -864,7 +893,8 @@ def sendUndoWantedViaServer(baseDir: str, session,
fromDomain: str, fromPort: int,
httpPrefix: str, displayName: str,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Undoes a wanted item via c2s
"""
if not session:
@ -897,7 +927,8 @@ def sendUndoWantedViaServer(baseDir: str, session,
# lookup the inbox for the To handle
wfRequest = \
webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
fromDomain, projectVersion, debug, False)
fromDomain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: unwant webfinger failed for ' + handle)
@ -910,13 +941,15 @@ def sendUndoWantedViaServer(baseDir: str, session,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
12663)
originDomain = fromDomain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, fromNickname,
fromDomain, postToBox,
12693)
if not inboxUrl:
if debug:
@ -953,7 +986,8 @@ def sendUndoWantedViaServer(baseDir: str, session,
def getSharedItemsCatalogViaServer(baseDir, session,
nickname: str, password: str,
domain: str, port: int,
httpPrefix: str, debug: bool) -> {}:
httpPrefix: str, debug: bool,
signingPrivateKeyPem: str) -> {}:
"""Returns the shared items catalog via c2s
"""
if not session:
@ -972,8 +1006,8 @@ def getSharedItemsCatalogViaServer(baseDir, session,
url = localActorUrl(httpPrefix, nickname, domainFull) + '/catalog'
if debug:
print('Shared items catalog request to: ' + url)
catalogJson = getJson(session, url, headers, None, debug,
__version__, httpPrefix, None)
catalogJson = getJson(signingPrivateKeyPem, session, url, headers, None,
debug, __version__, httpPrefix, None)
if not catalogJson:
if debug:
print('DEBUG: GET shared items catalog failed for c2s to ' + url)
@ -1129,12 +1163,14 @@ def sharesCatalogAccountEndpoint(baseDir: str, httpPrefix: str,
sharesFileType: str) -> {}:
"""Returns the endpoint for the shares catalog of a particular account
See https://github.com/datafoodconsortium/ontology
Also the subdirectory ontology/DFC
"""
today, minPrice, maxPrice, matchPattern = _sharesCatalogParams(path)
dfcUrl = \
"http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#"
httpPrefix + '://' + domainFull + '/ontologies/DFC_FullModel.owl#'
dfcPtUrl = \
"http://static.datafoodconsortium.org/data/productTypes.rdf#"
httpPrefix + '://' + domainFull + \
'/ontologies/DFC_ProductGlossary.rdf#'
owner = localActorUrl(httpPrefix, nickname, domainFull)
if sharesFileType == 'shares':
dfcInstanceId = owner + '/catalog'
@ -1217,12 +1253,14 @@ def sharesCatalogEndpoint(baseDir: str, httpPrefix: str,
path: str, sharesFileType: str) -> {}:
"""Returns the endpoint for the shares catalog for the instance
See https://github.com/datafoodconsortium/ontology
Also the subdirectory ontology/DFC
"""
today, minPrice, maxPrice, matchPattern = _sharesCatalogParams(path)
dfcUrl = \
"http://static.datafoodconsortium.org/ontologies/DFC_FullModel.owl#"
httpPrefix + '://' + domainFull + '/ontologies/DFC_FullModel.owl#'
dfcPtUrl = \
"http://static.datafoodconsortium.org/data/productTypes.rdf#"
httpPrefix + '://' + domainFull + \
'/ontologies/DFC_ProductGlossary.rdf#'
dfcInstanceId = httpPrefix + '://' + domainFull + '/catalog'
endpoint = {
"@context": {
@ -1323,7 +1361,8 @@ def sharesCatalogCSVEndpoint(baseDir: str, httpPrefix: str,
csvStr += str(item['DFC:quantity']) + ','
csvStr += item['DFC:price'].split(' ')[0] + ','
csvStr += '"' + item['DFC:price'].split(' ')[1] + '",'
csvStr += '"' + item['DFC:Image'] + '",'
if item.get('DFC:Image'):
csvStr += '"' + item['DFC:Image'] + '",'
description = item['DFC:description'].replace('"', "'")
csvStr += '"' + description + '",\n'
return csvStr
@ -1550,7 +1589,8 @@ def _updateFederatedSharesCache(session, sharedItemsFederatedDomains: [],
if saveJson(catalogJson, catalogFilename):
print('Downloaded shared items catalog for ' + federatedDomainFull)
sharesJson = _dfcToSharesFormat(catalogJson,
baseDir, systemLanguage)
baseDir, systemLanguage,
httpPrefix, domainFull)
if sharesJson:
sharesFilename = \
catalogsDir + '/' + federatedDomainFull + '.' + \
@ -1709,7 +1749,8 @@ def runFederatedSharesDaemon(baseDir: str, httpd, httpPrefix: str,
def _dfcToSharesFormat(catalogJson: {},
baseDir: str, systemLanguage: str) -> {}:
baseDir: str, systemLanguage: str,
httpPrefix: str, domainFull: str) -> {}:
"""Converts DFC format into the internal formal used to store shared items.
This simplifies subsequent search and display
"""
@ -1720,7 +1761,8 @@ def _dfcToSharesFormat(catalogJson: {},
dfcIds = {}
productTypesList = getCategoryTypes(baseDir)
for productType in productTypesList:
dfcIds[productType] = _loadDfcIds(baseDir, systemLanguage, productType)
dfcIds[productType] = _loadDfcIds(baseDir, systemLanguage, productType,
httpPrefix, domainFull)
currTime = int(time.time())
for item in catalogJson['DFC:supplies']:
@ -1731,7 +1773,6 @@ def _dfcToSharesFormat(catalogJson: {},
not item.get('DFC:expiryDate') or \
not item.get('DFC:quantity') or \
not item.get('DFC:price') or \
not item.get('DFC:Image') or \
not item.get('DFC:description'):
continue
@ -1780,10 +1821,13 @@ def _dfcToSharesFormat(catalogJson: {},
itemID = item['@id']
description = item['DFC:description'].split(':', 1)[1].strip()
imageUrl = ''
if item.get('DFC:Image'):
imageUrl = item['DFC:Image']
sharesJson[itemID] = {
"displayName": item['DFC:description'].split(':')[0],
"summary": description,
"imageUrl": item['DFC:Image'],
"imageUrl": imageUrl,
"itemQty": float(item['DFC:quantity']),
"dfcId": dfcId,
"itemType": itemType,
@ -1795,3 +1839,17 @@ def _dfcToSharesFormat(catalogJson: {},
"itemCurrency": item['DFC:price'].split(' ')[1]
}
return sharesJson
def shareCategoryIcon(category: str) -> str:
"""Returns unicode icon for the given category
"""
categoryIcons = {
'accommodation': '🏠',
'clothes': '👚',
'tools': '🔧',
'food': '🍏'
}
if categoryIcons.get(category):
return categoryIcons[category]
return ''

View File

@ -4,7 +4,7 @@ __credits__ = ["webchk"]
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Core"

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Profile Metadata"
@ -177,7 +177,8 @@ def sendSkillViaServer(baseDir: str, session, nickname: str, password: str,
httpPrefix: str,
skill: str, skillLevelPercent: int,
cachedWebfingers: {}, personCache: {},
debug: bool, projectVersion: str) -> {}:
debug: bool, projectVersion: str,
signingPrivateKeyPem: str) -> {}:
"""Sets a skill for a person via c2s
"""
if not session:
@ -209,7 +210,8 @@ def sendSkillViaServer(baseDir: str, session, nickname: str, password: str,
wfRequest = \
webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
domain, projectVersion, debug, False)
domain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
if debug:
print('DEBUG: skill webfinger failed for ' + handle)
@ -222,12 +224,14 @@ def sendSkillViaServer(baseDir: str, session, nickname: str, password: str,
postToBox = 'outbox'
# get the actor inbox for the To handle
(inboxUrl, pubKeyId, pubKey,
fromPersonId, sharedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, nickname, domain,
postToBox, 86725)
originDomain = domain
(inboxUrl, pubKeyId, pubKey, fromPersonId, sharedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache, projectVersion,
httpPrefix, nickname, domain,
postToBox, 76121)
if not inboxUrl:
if debug:

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Moderation"
@ -18,7 +18,7 @@ def instancesGraph(baseDir: str, handles: str,
proxyType: str,
port: int, httpPrefix: str,
debug: bool, projectVersion: str,
systemLanguage: str) -> str:
systemLanguage: str, signingPrivateKeyPem: str) -> str:
""" Returns a dot graph of federating instances
based upon a few sample handles.
The handles argument should contain a comma separated list
@ -54,7 +54,8 @@ def instancesGraph(baseDir: str, handles: str,
wfRequest = \
webfingerHandle(session, handle, httpPrefix,
cachedWebfingers,
domain, projectVersion, debug, False)
domain, projectVersion, debug, False,
signingPrivateKeyPem)
if not wfRequest:
return dotGraphStr + '}\n'
if not isinstance(wfRequest, dict):
@ -62,20 +63,23 @@ def instancesGraph(baseDir: str, handles: str,
str(wfRequest))
return dotGraphStr + '}\n'
(personUrl, pubKeyId, pubKey,
personId, shaedInbox,
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain, 'outbox',
27261)
originDomain = None
(personUrl, pubKeyId, pubKey, personId, shaedInbox, avatarUrl,
displayName, _) = getPersonBox(signingPrivateKeyPem,
originDomain,
baseDir, session, wfRequest,
personCache,
projectVersion, httpPrefix,
nickname, domain, 'outbox',
27261)
wordFrequency = {}
postDomains = \
getPostDomains(session, personUrl, 64, maxMentions, maxEmoji,
maxAttachments, federationList,
personCache, debug,
projectVersion, httpPrefix, domain,
wordFrequency, [], systemLanguage)
wordFrequency, [], systemLanguage,
signingPrivateKeyPem)
postDomains.sort()
for fedDomain in postDomains:
dotLineStr = ' "' + domain + '" -> "' + fedDomain + '";\n'

View File

@ -3,7 +3,7 @@ __author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.2.0"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__email__ = "bob@libreserver.org"
__status__ = "Production"
__module_group__ = "Accessibility"
@ -11,6 +11,7 @@ import os
import html
import random
import urllib.parse
from utils import removeIdEnding
from utils import isDM
from utils import isReply
from utils import camelCaseSplit
@ -489,7 +490,7 @@ def _postToSpeakerJson(baseDir: str, httpPrefix: str,
announcedHandle + '. ' + content
postId = None
if postJsonObject['object'].get('id'):
postId = postJsonObject['object']['id']
postId = removeIdEnding(postJsonObject['object']['id'])
followRequestsExist = False
followRequestsList = []

Some files were not shown because too many files have changed in this diff Show More