Compare commits
373 Commits
main
...
alt-html-c
Author | SHA1 | Date |
---|---|---|
|
e32210f9bd | |
|
6a193f1835 | |
|
310888f447 | |
|
aa1371f043 | |
|
eba44e1485 | |
|
bc7ebacf09 | |
|
18de03cac2 | |
|
29a030bcad | |
|
ce430565cc | |
|
720354066d | |
|
f7507500b7 | |
|
74b89a5dc7 | |
|
b5e8be4074 | |
|
9038753abc | |
|
47ac4ef70c | |
|
7d60cdacb2 | |
|
0724c3468d | |
|
ccfcbe4ad4 | |
|
c625043ebb | |
|
2171fd07a1 | |
|
5d34e0d4be | |
|
66a96015cd | |
|
bd0c75ed7b | |
|
0d36569a27 | |
|
81743813f7 | |
|
f97c0a43a5 | |
|
24af23e70a | |
|
3c86807116 | |
|
13b57ff16a | |
|
b12cf702aa | |
|
f922fc338e | |
|
226996c042 | |
|
685959b976 | |
|
e004327824 | |
|
45c5ff0924 | |
|
e44dbc2497 | |
|
c3554124fa | |
|
c6032377ba | |
|
9c9ffc301d | |
|
6aa1cc8389 | |
|
5fc36f6ebb | |
|
a8906b25d7 | |
|
5a327d281e | |
|
edf0c8880e | |
|
bc575dc6c0 | |
|
2449b57005 | |
|
adebd3c3bd | |
|
b0eaa6835d | |
|
77f965162c | |
|
b9d33296a1 | |
|
be14587011 | |
|
183141ee80 | |
|
8c3ca5a69c | |
|
1a74ec6d53 | |
|
e8290d99d5 | |
|
c09596f1e2 | |
|
0accfaea39 | |
|
03044b9819 | |
|
a9fcabd366 | |
|
74547ca8d9 | |
|
2bbdbe9774 | |
|
db7043dfd2 | |
|
b280cf990f | |
|
1df8ed9c76 | |
|
8fb223e1aa | |
|
0d0d49ac89 | |
|
4bee4e236a | |
|
9efecfc28e | |
|
9d937826bc | |
|
6e21a3b84f | |
|
2302dd6a1a | |
|
fa84a01239 | |
|
2c36f0a92d | |
|
8e8dda6370 | |
|
9b3140aa1a | |
|
a153a78601 | |
|
3158c06b18 | |
|
3b23ae5927 | |
|
7fbdb2445f | |
|
7c177ad035 | |
|
adbddb95c3 | |
|
cce7941b39 | |
|
3554bf1c20 | |
|
8518c68a60 | |
|
47f93d59cb | |
|
da3ffa2b32 | |
|
0d973f07c0 | |
|
14fbed456a | |
|
2083a2d59a | |
|
3f8f60515d | |
|
7438d647e1 | |
|
a2cca64b32 | |
|
69f80f2573 | |
|
1f83cf9a0d | |
|
3bc7dfb837 | |
|
11d804bb4e | |
|
f3122480d2 | |
|
908e6b5c84 | |
|
48fbe27849 | |
|
c515b8a5bd | |
|
ece2118680 | |
|
db4cbc4121 | |
|
7abc31e5a9 | |
|
99a706c994 | |
|
b3bb866f69 | |
|
0e47940eee | |
|
cb1456d535 | |
|
805e28761f | |
|
070596e010 | |
|
041d83fec5 | |
|
2224771099 | |
|
d8daaf8a76 | |
|
595c2c7135 | |
|
2815cc3bfd | |
|
26dd7e2cca | |
|
096698ff0a | |
|
15f87f9b7c | |
|
1f895198a2 | |
|
0f42746f56 | |
|
8abadf0129 | |
|
1c781e7ebb | |
|
3c26bc6dac | |
|
42023ade49 | |
|
26e82060f1 | |
|
0401708c5d | |
|
27f797c8af | |
|
00eadf8423 | |
|
1f46af59a8 | |
|
2920890dff | |
|
98f5bab920 | |
|
f16ae830b0 | |
|
3d16d93d86 | |
|
670bbd2984 | |
|
f40739e986 | |
|
dadf850e5b | |
|
cf4d9466eb | |
|
d7da2f2e65 | |
|
0dc869f77d | |
|
cae1484599 | |
|
e86559775f | |
|
0fe9b2adcd | |
|
7c2786535e | |
|
db875e375c | |
|
f7d9864093 | |
|
d3261de520 | |
|
549faf8b85 | |
|
76a634b9dd | |
|
72b07306f9 | |
|
cfd1e7b24c | |
|
0aa42a1d71 | |
|
838d853a59 | |
|
61280ac6d0 | |
|
82a7cb2cb2 | |
|
b927863f7e | |
|
b42964ff59 | |
|
fa0bdab523 | |
|
3ff0866deb | |
|
49e052d5f0 | |
|
152d6228c7 | |
|
ce885947ca | |
|
2fd901d682 | |
|
5cd9aa8d66 | |
|
0cf0841402 | |
|
59357c0c4f | |
|
a7b094f84f | |
|
96e813181b | |
|
679c06b20e | |
|
69a24ad454 | |
|
6957578b32 | |
|
a3cbc28c1e | |
|
b257f90df2 | |
|
ae7e0a8912 | |
|
0d85534cf4 | |
|
a4c9ab3480 | |
|
dd11c9bfa8 | |
|
2e05de11c5 | |
|
d0abee719e | |
|
ce232a005b | |
|
5a28a3b222 | |
|
fd64616964 | |
|
46e2a24c96 | |
|
c53fbad688 | |
|
f7c1a70c43 | |
|
79c6653772 | |
|
02cb81aaed | |
|
483c4c48f9 | |
|
f132de9d4f | |
|
08227dac51 | |
|
ca53d2521e | |
|
c631c49d98 | |
|
100c9c557f | |
|
23301b35a5 | |
|
31a48db4a4 | |
|
fcf94fdb93 | |
|
2cf3c806ba | |
|
49d99488df | |
|
82636e7c2b | |
|
2a66ef4c5c | |
|
81a49a63ce | |
|
862526819d | |
|
8e596603d0 | |
|
dc47b9015f | |
|
cd5f9de93a | |
|
03b19ec168 | |
|
677ccb4a49 | |
|
86dfa128c6 | |
|
59f4d75d78 | |
|
d60d7d608c | |
|
9874280ce1 | |
|
ff13cefc82 | |
|
b10cd7486a | |
|
beb1442a2d | |
|
42531e476f | |
|
3a0644a2ae | |
|
c9376f0a2b | |
|
da256d24d2 | |
|
2926b690ce | |
|
5e6e2f80c5 | |
|
e42ba610c2 | |
|
13ad1e57fb | |
|
178d869877 | |
|
b7c4a14042 | |
|
fb614c675d | |
|
d1658852fb | |
|
0c28e84ead | |
|
8f21dafc6b | |
|
2d0fdc2f64 | |
|
d312da3cfe | |
|
2f2071f209 | |
|
fba6679b21 | |
|
99374a63dc | |
|
b2658ebc31 | |
|
de1bfa8ed4 | |
|
7c6d7d158e | |
|
50ae2cec61 | |
|
c7051f191f | |
|
dfffd4e296 | |
|
536a69e8ef | |
|
86c1a678dc | |
|
2e1efdbee8 | |
|
a7d5c50222 | |
|
97a5b774d4 | |
|
cc2b9735fc | |
|
c447f90ec9 | |
|
bc1ab2dc37 | |
|
cc01a4187a | |
|
083627457e | |
|
dae2f1f178 | |
|
eda542cd34 | |
|
0d9a12ac22 | |
|
84a00f3946 | |
|
8f1ded15a3 | |
|
93da5f9b72 | |
|
9dd5ac532f | |
|
30d95182d3 | |
|
77ea08b7fc | |
|
1ec334a6d9 | |
|
5acc6fe3b2 | |
|
8150fc9c11 | |
|
e438d1bd8f | |
|
61a9c5d605 | |
|
f56a96a0d3 | |
|
f7fc448d9b | |
|
5ba7bed9a1 | |
|
20e92a4220 | |
|
7ee21c5c30 | |
|
8c7071b6e3 | |
|
af919974d7 | |
|
2736b7252b | |
|
22f7f57b74 | |
|
ddaa94146f | |
|
bb31ce37cd | |
|
347626fd8a | |
|
0392e69cac | |
|
a1f677e380 | |
|
3330ed1601 | |
|
7ee5c89ff2 | |
|
886e4f2beb | |
|
ef72b93984 | |
|
d8d6751c2f | |
|
a15b2be343 | |
|
ab5a871a22 | |
|
576a72d102 | |
|
57156455d9 | |
|
0c9d753c22 | |
|
0a9fd492d3 | |
|
f17c601c88 | |
|
b3ab3b3998 | |
|
a2796e405f | |
|
fa7741a650 | |
|
3122b8b3a6 | |
|
93e4ab9337 | |
|
1500e8e02e | |
|
aa7a63e891 | |
|
e50aa8d9df | |
|
5dfdf402be | |
|
78837aaa9b | |
|
2f0e743d5d | |
|
f2c5f7e3e3 | |
|
fba9b10b09 | |
|
396556f6e9 | |
|
15f2873829 | |
|
2484b98f36 | |
|
6391d6aafa | |
|
69d4dc28f4 | |
|
68ce6119d7 | |
|
a1117c20cc | |
|
14255d691b | |
|
68f35a92fd | |
|
18e572e21f | |
|
db98be75dd | |
|
0dee8649c3 | |
|
de5f688be8 | |
|
fc8932f627 | |
|
b0e0bdbd69 | |
|
721e26a38e | |
|
c5c8be599a | |
|
75fb7c4f03 | |
|
312d368028 | |
|
4c0e099056 | |
|
416c7d1188 | |
|
0cbbba4c16 | |
|
f289ca3f63 | |
|
b5349762b4 | |
|
146d1413c3 | |
|
7d5b639d64 | |
|
a521713cae | |
|
15924d79b1 | |
|
d853ba1978 | |
|
7084a2059f | |
|
32477e867b | |
|
825fb76066 | |
|
6ec956e312 | |
|
c2404d0943 | |
|
c8e9804b4a | |
|
77210c0f41 | |
|
83dcc00ee4 | |
|
48394d06d9 | |
|
4c00980459 | |
|
0903737a64 | |
|
f94f242d38 | |
|
ec87fbf4f8 | |
|
96c139a63e | |
|
8d29ac3cbe | |
|
a5597d1dcf | |
|
5fe459e656 | |
|
f1886ddd9c | |
|
f3e0d4e17c | |
|
498d903511 | |
|
e87fd5a168 | |
|
12cccfdeb6 | |
|
966dff9921 | |
|
88b0a6aa6f | |
|
9d176ab77d | |
|
e9c0c4c3a9 | |
|
3872426f35 | |
|
121a24d432 | |
|
f94f6eb997 | |
|
e1123279c8 | |
|
e727cc4c22 | |
|
2127252ca0 | |
|
9ed343865d | |
|
e4c34ec6ad | |
|
7905c9b35d | |
|
d6e60ff3d3 | |
|
9ba729c6fd | |
|
1cf2ea73f9 | |
|
a23f47cdf3 | |
|
11883701ab | |
|
76eb19b311 | |
|
49091dea9f | |
|
5dfd647f81 | |
|
249cc1ab5a |
|
@ -7,6 +7,8 @@ __email__ = "bob@freedombone.net"
|
|||
__status__ = "Production"
|
||||
|
||||
import os
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import urlPermitted
|
||||
from utils import getDomainFromActor
|
||||
from utils import getNicknameFromActor
|
||||
|
@ -14,10 +16,10 @@ from utils import domainPermitted
|
|||
from utils import followPerson
|
||||
|
||||
|
||||
def createAcceptReject(baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
toUrl: str, ccUrl: str, httpPrefix: str,
|
||||
objectJson: {}, acceptType: str) -> {}:
|
||||
def _createAcceptReject(baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
toUrl: str, ccUrl: str, httpPrefix: str,
|
||||
objectJson: {}, acceptType: str) -> {}:
|
||||
"""Accepts or rejects something (eg. a follow request or offer)
|
||||
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
|
||||
and ccUrl might be a specific person favorited or repeated and
|
||||
|
@ -30,10 +32,7 @@ def createAcceptReject(baseDir: str, federationList: [],
|
|||
if not urlPermitted(objectJson['actor'], federationList):
|
||||
return None
|
||||
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domain = domain + ':' + str(port)
|
||||
domain = getFullDomain(domain, port)
|
||||
|
||||
newAccept = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
|
@ -53,24 +52,24 @@ def createAccept(baseDir: str, federationList: [],
|
|||
nickname: str, domain: str, port: int,
|
||||
toUrl: str, ccUrl: str, httpPrefix: str,
|
||||
objectJson: {}) -> {}:
|
||||
return createAcceptReject(baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
toUrl, ccUrl, httpPrefix,
|
||||
objectJson, 'Accept')
|
||||
return _createAcceptReject(baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
toUrl, ccUrl, httpPrefix,
|
||||
objectJson, 'Accept')
|
||||
|
||||
|
||||
def createReject(baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
toUrl: str, ccUrl: str, httpPrefix: str,
|
||||
objectJson: {}) -> {}:
|
||||
return createAcceptReject(baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
toUrl, ccUrl,
|
||||
httpPrefix, objectJson, 'Reject')
|
||||
return _createAcceptReject(baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
toUrl, ccUrl,
|
||||
httpPrefix, objectJson, 'Reject')
|
||||
|
||||
|
||||
def acceptFollow(baseDir: str, domain: str, messageJson: {},
|
||||
federationList: [], debug: bool) -> None:
|
||||
def _acceptFollow(baseDir: str, domain: str, messageJson: {},
|
||||
federationList: [], debug: bool) -> None:
|
||||
"""Receiving a follow Accept activity
|
||||
"""
|
||||
if not messageJson.get('object'):
|
||||
|
@ -184,10 +183,7 @@ def receiveAcceptReject(session, baseDir: str,
|
|||
if debug:
|
||||
print('DEBUG: ' + messageJson['type'] + ' has no actor')
|
||||
return False
|
||||
if '/users/' not in messageJson['actor'] and \
|
||||
'/accounts/' not in messageJson['actor'] and \
|
||||
'/channel/' not in messageJson['actor'] and \
|
||||
'/profile/' not in messageJson['actor']:
|
||||
if not hasUsersPath(messageJson['actor']):
|
||||
if debug:
|
||||
print('DEBUG: "users" or "profile" missing from actor in ' +
|
||||
messageJson['type'] + '. Assuming single user instance.')
|
||||
|
@ -206,7 +202,7 @@ def receiveAcceptReject(session, baseDir: str,
|
|||
' does not contain a nickname. ' +
|
||||
'Assuming single user instance.')
|
||||
# receive follow accept
|
||||
acceptFollow(baseDir, domain, messageJson, federationList, debug)
|
||||
_acceptFollow(baseDir, domain, messageJson, federationList, debug)
|
||||
if debug:
|
||||
print('DEBUG: Uh, ' + messageJson['type'] + ', I guess')
|
||||
return True
|
||||
|
|
172
announce.py
|
@ -6,6 +6,8 @@ __maintainer__ = "Bob Mottram"
|
|||
__email__ = "bob@freedombone.net"
|
||||
__status__ = "Production"
|
||||
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import getStatusNumber
|
||||
from utils import createOutboxDir
|
||||
from utils import urlPermitted
|
||||
|
@ -113,11 +115,7 @@ def createAnnounce(session, baseDir: str, federationList: [],
|
|||
|
||||
if ':' in domain:
|
||||
domain = domain.split(':')[0]
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
fullDomain = getFullDomain(domain, port)
|
||||
|
||||
statusNumber, published = getStatusNumber()
|
||||
newAnnounceId = httpPrefix + '://' + fullDomain + \
|
||||
|
@ -146,10 +144,7 @@ def createAnnounce(session, baseDir: str, federationList: [],
|
|||
announceNickname = None
|
||||
announceDomain = None
|
||||
announcePort = None
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
if hasUsersPath(objectUrl):
|
||||
announceNickname = getNicknameFromActor(objectUrl)
|
||||
announceDomain, announcePort = getDomainFromActor(objectUrl)
|
||||
|
||||
|
@ -172,11 +167,7 @@ def announcePublic(session, baseDir: str, federationList: [],
|
|||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Makes a public announcement
|
||||
"""
|
||||
fromDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fromDomain = domain + ':' + str(port)
|
||||
fromDomain = getFullDomain(domain, port)
|
||||
|
||||
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
ccUrl = httpPrefix + '://' + fromDomain + '/users/' + nickname + \
|
||||
|
@ -190,151 +181,6 @@ def announcePublic(session, baseDir: str, federationList: [],
|
|||
debug, projectVersion)
|
||||
|
||||
|
||||
def repeatPost(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
announceNickname: str, announceDomain: str,
|
||||
announcePort: int, announceHttpsPrefix: str,
|
||||
announceStatusNumber: int, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Repeats a given status post
|
||||
"""
|
||||
announcedDomain = announceDomain
|
||||
if announcePort:
|
||||
if announcePort != 80 and announcePort != 443:
|
||||
if ':' not in announcedDomain:
|
||||
announcedDomain = announcedDomain + ':' + str(announcePort)
|
||||
|
||||
objectUrl = announceHttpsPrefix + '://' + announcedDomain + '/users/' + \
|
||||
announceNickname + '/statuses/' + str(announceStatusNumber)
|
||||
|
||||
return announcePublic(session, baseDir, federationList,
|
||||
nickname, domain, port, httpPrefix,
|
||||
objectUrl, clientToServer,
|
||||
sendThreads, postLog,
|
||||
personCache, cachedWebfingers,
|
||||
debug, projectVersion)
|
||||
|
||||
|
||||
def undoAnnounce(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
toUrl: str, ccUrl: str, httpPrefix: str,
|
||||
objectUrl: str, saveToFile: bool,
|
||||
clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Undoes an announce message
|
||||
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
|
||||
and ccUrl might be a specific person whose post was repeated and the
|
||||
objectUrl is typically the url of the message which was repeated,
|
||||
corresponding to url or atomUri in createPostBase
|
||||
"""
|
||||
if not urlPermitted(objectUrl, federationList):
|
||||
return None
|
||||
|
||||
if ':' in domain:
|
||||
domain = domain.split(':')[0]
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
|
||||
newUndoAnnounce = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
'actor': httpPrefix+'://'+fullDomain+'/users/'+nickname,
|
||||
'type': 'Undo',
|
||||
'cc': [],
|
||||
'to': [toUrl],
|
||||
'object': {
|
||||
'actor': httpPrefix+'://'+fullDomain+'/users/'+nickname,
|
||||
'cc': [],
|
||||
'object': objectUrl,
|
||||
'to': [toUrl],
|
||||
'type': 'Announce'
|
||||
}
|
||||
}
|
||||
if ccUrl:
|
||||
if len(ccUrl) > 0:
|
||||
newUndoAnnounce['object']['cc'] = [ccUrl]
|
||||
|
||||
announceNickname = None
|
||||
announceDomain = None
|
||||
announcePort = None
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
announceNickname = getNicknameFromActor(objectUrl)
|
||||
announceDomain, announcePort = getDomainFromActor(objectUrl)
|
||||
|
||||
if announceNickname and announceDomain:
|
||||
sendSignedJson(newUndoAnnounce, session, baseDir,
|
||||
nickname, domain, port,
|
||||
announceNickname, announceDomain, announcePort,
|
||||
'https://www.w3.org/ns/activitystreams#Public',
|
||||
httpPrefix, True, clientToServer, federationList,
|
||||
sendThreads, postLog, cachedWebfingers,
|
||||
personCache, debug)
|
||||
|
||||
return newUndoAnnounce
|
||||
|
||||
|
||||
def undoAnnouncePublic(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
objectUrl: str, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Undoes a public announcement
|
||||
"""
|
||||
fromDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fromDomain = domain + ':' + str(port)
|
||||
|
||||
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
ccUrl = httpPrefix + '://' + fromDomain + '/users/' + nickname + \
|
||||
'/followers'
|
||||
return undoAnnounce(session, baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
toUrl, ccUrl, httpPrefix,
|
||||
objectUrl, True, clientToServer,
|
||||
sendThreads, postLog,
|
||||
personCache, cachedWebfingers,
|
||||
debug)
|
||||
|
||||
|
||||
def undoRepeatPost(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
announceNickname: str, announceDomain: str,
|
||||
announcePort: int, announceHttpsPrefix: str,
|
||||
announceStatusNumber: int, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Undoes a status post repeat
|
||||
"""
|
||||
announcedDomain = announceDomain
|
||||
if announcePort:
|
||||
if announcePort != 80 and announcePort != 443:
|
||||
if ':' not in announcedDomain:
|
||||
announcedDomain = announcedDomain + ':' + str(announcePort)
|
||||
|
||||
objectUrl = announceHttpsPrefix + '://' + announcedDomain + '/users/' + \
|
||||
announceNickname + '/statuses/' + str(announceStatusNumber)
|
||||
|
||||
return undoAnnouncePublic(session, baseDir, federationList,
|
||||
nickname, domain, port, httpPrefix,
|
||||
objectUrl, clientToServer,
|
||||
sendThreads, postLog,
|
||||
personCache, cachedWebfingers,
|
||||
debug)
|
||||
|
||||
|
||||
def sendAnnounceViaServer(baseDir: str, session,
|
||||
fromNickname: str, password: str,
|
||||
fromDomain: str, fromPort: int,
|
||||
|
@ -347,11 +193,7 @@ def sendAnnounceViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendAnnounceViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
ccUrl = httpPrefix + '://' + fromDomainFull + '/users/' + fromNickname + \
|
||||
|
@ -396,7 +238,7 @@ def sendAnnounceViaServer(baseDir: str, session,
|
|||
personCache,
|
||||
projectVersion, httpPrefix,
|
||||
fromNickname, fromDomain,
|
||||
postToBox)
|
||||
postToBox, 73528)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
|
18
auth.py
|
@ -12,9 +12,10 @@ import binascii
|
|||
import os
|
||||
import secrets
|
||||
from utils import isSystemAccount
|
||||
from utils import hasUsersPath
|
||||
|
||||
|
||||
def hashPassword(password: str) -> str:
|
||||
def _hashPassword(password: str) -> str:
|
||||
"""Hash a password for storing
|
||||
"""
|
||||
salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii')
|
||||
|
@ -25,7 +26,7 @@ def hashPassword(password: str) -> str:
|
|||
return (salt + pwdhash).decode('ascii')
|
||||
|
||||
|
||||
def getPasswordHash(salt: str, providedPassword: str) -> str:
|
||||
def _getPasswordHash(salt: str, providedPassword: str) -> str:
|
||||
"""Returns the hash of a password
|
||||
"""
|
||||
pwdhash = hashlib.pbkdf2_hmac('sha512',
|
||||
|
@ -57,7 +58,7 @@ def constantTimeStringCheck(string1: str, string2: str) -> bool:
|
|||
return matched
|
||||
|
||||
|
||||
def verifyPassword(storedPassword: str, providedPassword: str) -> bool:
|
||||
def _verifyPassword(storedPassword: str, providedPassword: str) -> bool:
|
||||
"""Verify a stored password against one provided by user
|
||||
"""
|
||||
if not storedPassword:
|
||||
|
@ -66,7 +67,7 @@ def verifyPassword(storedPassword: str, providedPassword: str) -> bool:
|
|||
return False
|
||||
salt = storedPassword[:64]
|
||||
storedPassword = storedPassword[64:]
|
||||
pwHash = getPasswordHash(salt, providedPassword)
|
||||
pwHash = _getPasswordHash(salt, providedPassword)
|
||||
return constantTimeStringCheck(pwHash, storedPassword)
|
||||
|
||||
|
||||
|
@ -89,10 +90,7 @@ def authorizeBasic(baseDir: str, path: str, authHeader: str,
|
|||
print('DEBUG: basic auth - Authorixation header does not ' +
|
||||
'contain a space character')
|
||||
return False
|
||||
if '/users/' not in path and \
|
||||
'/accounts/' not in path and \
|
||||
'/channel/' not in path and \
|
||||
'/profile/' not in path:
|
||||
if not hasUsersPath(path):
|
||||
if debug:
|
||||
print('DEBUG: basic auth - ' +
|
||||
'path for Authorization does not contain a user')
|
||||
|
@ -137,7 +135,7 @@ def authorizeBasic(baseDir: str, path: str, authHeader: str,
|
|||
if line.startswith(nickname+':'):
|
||||
storedPassword = \
|
||||
line.split(':')[1].replace('\n', '').replace('\r', '')
|
||||
success = verifyPassword(storedPassword, providedPassword)
|
||||
success = _verifyPassword(storedPassword, providedPassword)
|
||||
if not success:
|
||||
if debug:
|
||||
print('DEBUG: Password check failed for ' + nickname)
|
||||
|
@ -159,7 +157,7 @@ def storeBasicCredentials(baseDir: str, nickname: str, password: str) -> bool:
|
|||
os.mkdir(baseDir + '/accounts')
|
||||
|
||||
passwordFile = baseDir + '/accounts/passwords'
|
||||
storeStr = nickname + ':' + hashPassword(password)
|
||||
storeStr = nickname + ':' + _hashPassword(password)
|
||||
if os.path.isfile(passwordFile):
|
||||
if nickname + ':' in open(passwordFile).read():
|
||||
with open(passwordFile, "r") as fin:
|
||||
|
|
|
@ -11,6 +11,7 @@ from webfinger import webfingerHandle
|
|||
from auth import createBasicAuthHeader
|
||||
from posts import getPersonBox
|
||||
from session import postJson
|
||||
from utils import getFullDomain
|
||||
from utils import getNicknameFromActor
|
||||
from utils import getDomainFromActor
|
||||
from utils import loadJson
|
||||
|
@ -85,11 +86,7 @@ def sendAvailabilityViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendAvailabilityViaServer')
|
||||
return 6
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
toUrl = httpPrefix + '://' + domainFull + '/users/' + nickname
|
||||
ccUrl = httpPrefix + '://' + domainFull + '/users/' + nickname + \
|
||||
|
@ -126,7 +123,7 @@ def sendAvailabilityViaServer(baseDir: str, session,
|
|||
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
|
||||
personCache, projectVersion,
|
||||
httpPrefix, nickname,
|
||||
domain, postToBox)
|
||||
domain, postToBox, 57262)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
|
24
blocking.py
|
@ -7,6 +7,8 @@ __email__ = "bob@freedombone.net"
|
|||
__status__ = "Production"
|
||||
|
||||
import os
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import removeIdEnding
|
||||
from utils import isEvil
|
||||
from utils import locatePost
|
||||
|
@ -245,10 +247,7 @@ def outboxBlock(baseDir: str, httpPrefix: str,
|
|||
if debug:
|
||||
print('DEBUG: c2s block object is not a status')
|
||||
return
|
||||
if '/users/' not in messageId and \
|
||||
'/accounts/' not in messageId and \
|
||||
'/channel/' not in messageId and \
|
||||
'/profile/' not in messageId:
|
||||
if not hasUsersPath(messageId):
|
||||
if debug:
|
||||
print('DEBUG: c2s block object has no nickname')
|
||||
return
|
||||
|
@ -265,11 +264,7 @@ def outboxBlock(baseDir: str, httpPrefix: str,
|
|||
print('WARN: unable to find nickname in ' + messageJson['object'])
|
||||
return
|
||||
domainBlocked, portBlocked = getDomainFromActor(messageJson['object'])
|
||||
domainBlockedFull = domainBlocked
|
||||
if portBlocked:
|
||||
if portBlocked != 80 and portBlocked != 443:
|
||||
if ':' not in domainBlocked:
|
||||
domainBlockedFull = domainBlocked + ':' + str(portBlocked)
|
||||
domainBlockedFull = getFullDomain(domainBlocked, portBlocked)
|
||||
|
||||
addBlock(baseDir, nickname, domain,
|
||||
nicknameBlocked, domainBlockedFull)
|
||||
|
@ -324,10 +319,7 @@ def outboxUndoBlock(baseDir: str, httpPrefix: str,
|
|||
if debug:
|
||||
print('DEBUG: c2s undo block object is not a status')
|
||||
return
|
||||
if '/users/' not in messageId and \
|
||||
'/accounts/' not in messageId and \
|
||||
'/channel/' not in messageId and \
|
||||
'/profile/' not in messageId:
|
||||
if not hasUsersPath(messageId):
|
||||
if debug:
|
||||
print('DEBUG: c2s undo block object has no nickname')
|
||||
return
|
||||
|
@ -346,11 +338,7 @@ def outboxUndoBlock(baseDir: str, httpPrefix: str,
|
|||
return
|
||||
domainObject = messageJson['object']['object']
|
||||
domainBlocked, portBlocked = getDomainFromActor(domainObject)
|
||||
domainBlockedFull = domainBlocked
|
||||
if portBlocked:
|
||||
if portBlocked != 80 and portBlocked != 443:
|
||||
if ':' not in domainBlocked:
|
||||
domainBlockedFull = domainBlocked + ':' + str(portBlocked)
|
||||
domainBlockedFull = getFullDomain(domainBlocked, portBlocked)
|
||||
|
||||
removeBlock(baseDir, nickname, domain,
|
||||
nicknameBlocked, domainBlockedFull)
|
||||
|
|
214
blog.py
|
@ -10,10 +10,12 @@ import os
|
|||
from datetime import datetime
|
||||
|
||||
from content import replaceEmojiFromTags
|
||||
from webapp_utils import htmlHeaderWithExternalStyle
|
||||
from webapp_utils import htmlHeaderWithExternalStyle, htmlHeaderWithExternalStyles
|
||||
from webapp_utils import htmlFooter
|
||||
from webapp_utils import getPostAttachmentsAsHtml
|
||||
from webapp_media import addEmbeddedElements
|
||||
from utils import getFullDomain
|
||||
from utils import getConfigParam
|
||||
from utils import getMediaFormats
|
||||
from utils import getNicknameFromActor
|
||||
from utils import getDomainFromActor
|
||||
|
@ -25,9 +27,9 @@ from newswire import rss2Header
|
|||
from newswire import rss2Footer
|
||||
|
||||
|
||||
def noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postId: str, depth=0) -> int:
|
||||
def _noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postId: str, depth=0) -> int:
|
||||
"""Returns the number of replies on the post
|
||||
This is recursive, so can handle replies to replies
|
||||
"""
|
||||
|
@ -65,9 +67,10 @@ def noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
|||
replyPostId = replyPostId.replace('.json', '')
|
||||
if locatePost(baseDir, nickname, domain, replyPostId):
|
||||
replyPostId = replyPostId.replace('.replies', '')
|
||||
replies += 1 + noOfBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
replyPostId, depth+1)
|
||||
replies += \
|
||||
1 + _noOfBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
replyPostId, depth+1)
|
||||
else:
|
||||
# remove post which no longer exists
|
||||
removals.append(replyPostId)
|
||||
|
@ -85,9 +88,9 @@ def noOfBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
|||
return replies
|
||||
|
||||
|
||||
def getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postId: str, depth=0) -> str:
|
||||
def _getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postId: str, depth=0) -> str:
|
||||
"""Returns a string containing html blog posts
|
||||
"""
|
||||
if depth > 4:
|
||||
|
@ -135,9 +138,9 @@ def getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
|||
continue
|
||||
with open(postFilename, "r") as postFile:
|
||||
repliesStr += postFile.read() + '\n'
|
||||
rply = getBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
replyPostId, depth+1)
|
||||
rply = _getBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
replyPostId, depth+1)
|
||||
if rply not in repliesStr:
|
||||
repliesStr += rply
|
||||
|
||||
|
@ -151,12 +154,13 @@ def getBlogReplies(baseDir: str, httpPrefix: str, translate: {},
|
|||
return ''
|
||||
|
||||
|
||||
def htmlBlogPostContent(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {},
|
||||
handle: str, restrictToDomain: bool,
|
||||
blogSeparator='<hr>') -> str:
|
||||
def _htmlBlogPostContent(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {},
|
||||
handle: str, restrictToDomain: bool,
|
||||
peertubeInstances: [],
|
||||
blogSeparator='<hr>') -> str:
|
||||
"""Returns the content for a single blog post
|
||||
"""
|
||||
linkedAuthor = False
|
||||
|
@ -229,7 +233,8 @@ def htmlBlogPostContent(authorized: bool,
|
|||
|
||||
if postJsonObject['object'].get('content'):
|
||||
contentStr = addEmbeddedElements(translate,
|
||||
postJsonObject['object']['content'])
|
||||
postJsonObject['object']['content'],
|
||||
peertubeInstances)
|
||||
if postJsonObject['object'].get('tag'):
|
||||
contentStr = replaceEmojiFromTags(contentStr,
|
||||
postJsonObject['object']['tag'],
|
||||
|
@ -268,9 +273,9 @@ def htmlBlogPostContent(authorized: bool,
|
|||
'/users/' + nickname + '">' + translate['About the author'] + \
|
||||
'</a></p>\n'
|
||||
|
||||
replies = noOfBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
postJsonObject['object']['id'])
|
||||
replies = _noOfBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
postJsonObject['object']['id'])
|
||||
|
||||
# separator between blogs should be centered
|
||||
if '<center>' not in blogSeparator:
|
||||
|
@ -287,23 +292,23 @@ def htmlBlogPostContent(authorized: bool,
|
|||
else:
|
||||
blogStr += blogSeparator + '<h1>' + translate['Replies'] + '</h1>\n'
|
||||
if not titleStr:
|
||||
blogStr += getBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
postJsonObject['object']['id'])
|
||||
blogStr += _getBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
postJsonObject['object']['id'])
|
||||
else:
|
||||
blogRepliesStr = getBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
postJsonObject['object']['id'])
|
||||
blogRepliesStr = _getBlogReplies(baseDir, httpPrefix, translate,
|
||||
nickname, domain, domainFull,
|
||||
postJsonObject['object']['id'])
|
||||
blogStr += blogRepliesStr.replace('>' + titleStr + '<', '')
|
||||
|
||||
return blogStr
|
||||
|
||||
|
||||
def htmlBlogPostRSS2(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {},
|
||||
handle: str, restrictToDomain: bool) -> str:
|
||||
def _htmlBlogPostRSS2(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {},
|
||||
handle: str, restrictToDomain: bool) -> str:
|
||||
"""Returns the RSS version 2 feed for a single blog post
|
||||
"""
|
||||
rssStr = ''
|
||||
|
@ -330,11 +335,11 @@ def htmlBlogPostRSS2(authorized: bool,
|
|||
return rssStr
|
||||
|
||||
|
||||
def htmlBlogPostRSS3(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {},
|
||||
handle: str, restrictToDomain: bool) -> str:
|
||||
def _htmlBlogPostRSS3(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {},
|
||||
handle: str, restrictToDomain: bool) -> str:
|
||||
"""Returns the RSS version 3 feed for a single blog post
|
||||
"""
|
||||
rssStr = ''
|
||||
|
@ -358,7 +363,7 @@ def htmlBlogPostRSS3(authorized: bool,
|
|||
return rssStr
|
||||
|
||||
|
||||
def htmlBlogRemoveCwButton(blogStr: str, translate: {}) -> str:
|
||||
def _htmlBlogRemoveCwButton(blogStr: str, translate: {}) -> str:
|
||||
"""Removes the CW button from blog posts, where the
|
||||
summary field is instead used as the blog title
|
||||
"""
|
||||
|
@ -373,22 +378,39 @@ def htmlBlogRemoveCwButton(blogStr: str, translate: {}) -> str:
|
|||
def htmlBlogPost(authorized: bool,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, domainFull: str,
|
||||
postJsonObject: {}) -> str:
|
||||
postJsonObject: {},
|
||||
peertubeInstances: []) -> str:
|
||||
"""Returns a html blog post
|
||||
"""
|
||||
blogStr = ''
|
||||
|
||||
cssFilename = baseDir + '/epicyon-blog.css'
|
||||
if os.path.isfile(baseDir + '/blog.css'):
|
||||
cssFilename = baseDir + '/blog.css'
|
||||
blogStr = htmlHeaderWithExternalStyle(cssFilename)
|
||||
htmlBlogRemoveCwButton(blogStr, translate)
|
||||
cssFiles = []
|
||||
|
||||
blogStr += htmlBlogPostContent(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, postJsonObject,
|
||||
None, False)
|
||||
# the css filename
|
||||
cssFiles.append(baseDir + '/epicyon-blog.css')
|
||||
if os.path.isfile(baseDir + '/blog.css'):
|
||||
cssFiles[0] = baseDir + '/blog.css'
|
||||
|
||||
# TODO: Clean up and remove this override
|
||||
cssFiles[0] = 'base.css'
|
||||
|
||||
# Get theme-specific css if exists - must be named '<theme-name>.css'
|
||||
themeName = getConfigParam(baseDir, 'theme')
|
||||
|
||||
themePath = f'{baseDir}/theme/{themeName}.css'
|
||||
if os.path.isfile(themePath):
|
||||
cssFiles.append('theme/' + themeName + '.css')
|
||||
|
||||
blogStr = htmlHeaderWithExternalStyles(cssFiles)
|
||||
|
||||
_htmlBlogRemoveCwButton(blogStr, translate)
|
||||
|
||||
blogStr += _htmlBlogPostContent(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, postJsonObject,
|
||||
None, False,
|
||||
peertubeInstances)
|
||||
|
||||
# show rss links
|
||||
blogStr += '<p class="rssfeed">'
|
||||
|
@ -415,7 +437,8 @@ def htmlBlogPost(authorized: bool,
|
|||
def htmlBlogPage(authorized: bool, session,
|
||||
baseDir: str, httpPrefix: str, translate: {},
|
||||
nickname: str, domain: str, port: int,
|
||||
noOfItems: int, pageNumber: int) -> str:
|
||||
noOfItems: int, pageNumber: int,
|
||||
peertubeInstances: []) -> str:
|
||||
"""Returns a html blog page containing posts
|
||||
"""
|
||||
if ' ' in nickname or '@' in nickname or \
|
||||
|
@ -427,7 +450,7 @@ def htmlBlogPage(authorized: bool, session,
|
|||
if os.path.isfile(baseDir + '/epicyon.css'):
|
||||
cssFilename = baseDir + '/epicyon.css'
|
||||
blogStr = htmlHeaderWithExternalStyle(cssFilename)
|
||||
htmlBlogRemoveCwButton(blogStr, translate)
|
||||
_htmlBlogRemoveCwButton(blogStr, translate)
|
||||
|
||||
blogsIndex = baseDir + '/accounts/' + \
|
||||
nickname + '@' + domain + '/tlblogs.index'
|
||||
|
@ -443,10 +466,7 @@ def htmlBlogPage(authorized: bool, session,
|
|||
if not timelineJson:
|
||||
return blogStr + htmlFooter()
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
# show previous and next buttons
|
||||
if pageNumber is not None:
|
||||
|
@ -474,11 +494,12 @@ def htmlBlogPage(authorized: bool, session,
|
|||
if item['type'] != 'Create':
|
||||
continue
|
||||
|
||||
blogStr += htmlBlogPostContent(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, item,
|
||||
None, True)
|
||||
blogStr += _htmlBlogPostContent(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, item,
|
||||
None, True,
|
||||
peertubeInstances)
|
||||
|
||||
if len(timelineJson['orderedItems']) >= noOfItems:
|
||||
blogStr += navigateStr
|
||||
|
@ -513,10 +534,7 @@ def htmlBlogPageRSS2(authorized: bool, session,
|
|||
'\n' in nickname or '\r' in nickname:
|
||||
return None
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
blogRSS2 = ''
|
||||
if includeHeader:
|
||||
|
@ -549,11 +567,11 @@ def htmlBlogPageRSS2(authorized: bool, session,
|
|||
continue
|
||||
|
||||
blogRSS2 += \
|
||||
htmlBlogPostRSS2(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, item,
|
||||
None, True)
|
||||
_htmlBlogPostRSS2(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, item,
|
||||
None, True)
|
||||
|
||||
if includeHeader:
|
||||
return blogRSS2 + rss2Footer()
|
||||
|
@ -571,10 +589,7 @@ def htmlBlogPageRSS3(authorized: bool, session,
|
|||
'\n' in nickname or '\r' in nickname:
|
||||
return None
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
blogRSS3 = ''
|
||||
|
||||
|
@ -598,34 +613,16 @@ def htmlBlogPageRSS3(authorized: bool, session,
|
|||
continue
|
||||
|
||||
blogRSS3 += \
|
||||
htmlBlogPostRSS3(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, item,
|
||||
None, True)
|
||||
_htmlBlogPostRSS3(authorized, baseDir,
|
||||
httpPrefix, translate,
|
||||
nickname, domain,
|
||||
domainFull, item,
|
||||
None, True)
|
||||
|
||||
return blogRSS3
|
||||
|
||||
|
||||
def getBlogIndexesForAccounts(baseDir: str) -> {}:
|
||||
""" Get the index files for blogs for each account
|
||||
and add them to a dict
|
||||
"""
|
||||
blogIndexes = {}
|
||||
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
||||
for acct in dirs:
|
||||
if '@' not in acct:
|
||||
continue
|
||||
if 'inbox@' in acct:
|
||||
continue
|
||||
accountDir = os.path.join(baseDir + '/accounts', acct)
|
||||
blogsIndex = accountDir + '/tlblogs.index'
|
||||
if os.path.isfile(blogsIndex):
|
||||
blogIndexes[acct] = blogsIndex
|
||||
return blogIndexes
|
||||
|
||||
|
||||
def noOfBlogAccounts(baseDir: str) -> int:
|
||||
def _noOfBlogAccounts(baseDir: str) -> int:
|
||||
"""Returns the number of blog accounts
|
||||
"""
|
||||
ctr = 0
|
||||
|
@ -639,10 +636,11 @@ def noOfBlogAccounts(baseDir: str) -> int:
|
|||
blogsIndex = accountDir + '/tlblogs.index'
|
||||
if os.path.isfile(blogsIndex):
|
||||
ctr += 1
|
||||
break
|
||||
return ctr
|
||||
|
||||
|
||||
def singleBlogAccountNickname(baseDir: str) -> str:
|
||||
def _singleBlogAccountNickname(baseDir: str) -> str:
|
||||
"""Returns the nickname of a single blog account
|
||||
"""
|
||||
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
||||
|
@ -655,13 +653,15 @@ def singleBlogAccountNickname(baseDir: str) -> str:
|
|||
blogsIndex = accountDir + '/tlblogs.index'
|
||||
if os.path.isfile(blogsIndex):
|
||||
return acct.split('@')[0]
|
||||
break
|
||||
return None
|
||||
|
||||
|
||||
def htmlBlogView(authorized: bool,
|
||||
session, baseDir: str, httpPrefix: str,
|
||||
translate: {}, domain: str, port: int,
|
||||
noOfItems: int) -> str:
|
||||
noOfItems: int,
|
||||
peertubeInstances: []) -> str:
|
||||
"""Show the blog main page
|
||||
"""
|
||||
blogStr = ''
|
||||
|
@ -671,18 +671,15 @@ def htmlBlogView(authorized: bool,
|
|||
cssFilename = baseDir + '/epicyon.css'
|
||||
blogStr = htmlHeaderWithExternalStyle(cssFilename)
|
||||
|
||||
if noOfBlogAccounts(baseDir) <= 1:
|
||||
nickname = singleBlogAccountNickname(baseDir)
|
||||
if _noOfBlogAccounts(baseDir) <= 1:
|
||||
nickname = _singleBlogAccountNickname(baseDir)
|
||||
if nickname:
|
||||
return htmlBlogPage(authorized, session,
|
||||
baseDir, httpPrefix, translate,
|
||||
nickname, domain, port,
|
||||
noOfItems, 1)
|
||||
noOfItems, 1, peertubeInstances)
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
||||
for acct in dirs:
|
||||
|
@ -698,6 +695,7 @@ def htmlBlogView(authorized: bool,
|
|||
httpPrefix + '://' + domainFull + '/blog/' + \
|
||||
acct.split('@')[0] + '">' + acct + '</a>'
|
||||
blogStr += '</p>'
|
||||
break
|
||||
|
||||
return blogStr + htmlFooter()
|
||||
|
||||
|
|
140
blurhash.py
|
@ -39,17 +39,7 @@ alphabet = \
|
|||
alphabet_values = dict(zip(alphabet, range(len(alphabet))))
|
||||
|
||||
|
||||
def base83_decode(base83_str):
|
||||
"""
|
||||
Decodes a base83 string, as used in blurhash, to an integer.
|
||||
"""
|
||||
value = 0
|
||||
for base83_char in base83_str:
|
||||
value = value * 83 + alphabet_values[base83_char]
|
||||
return value
|
||||
|
||||
|
||||
def base83_encode(value, length):
|
||||
def _base83_encode(value, length):
|
||||
"""
|
||||
Decodes an integer to a base83 string, as used in blurhash.
|
||||
|
||||
|
@ -67,7 +57,7 @@ def base83_encode(value, length):
|
|||
return result
|
||||
|
||||
|
||||
def srgb_to_linear(value):
|
||||
def _srgb_to_linear(value):
|
||||
"""
|
||||
srgb 0-255 integer to linear 0.0-1.0 floating point conversion.
|
||||
"""
|
||||
|
@ -77,14 +67,14 @@ def srgb_to_linear(value):
|
|||
return math.pow((value + 0.055) / 1.055, 2.4)
|
||||
|
||||
|
||||
def sign_pow(value, exp):
|
||||
def _sign_pow(value, exp):
|
||||
"""
|
||||
Sign-preserving exponentiation.
|
||||
"""
|
||||
return math.copysign(math.pow(abs(value), exp), value)
|
||||
|
||||
|
||||
def linear_to_srgb(value):
|
||||
def _linear_to_srgb(value):
|
||||
"""
|
||||
linear 0.0-1.0 floating point to srgb 0-255 integer conversion.
|
||||
"""
|
||||
|
@ -94,101 +84,6 @@ def linear_to_srgb(value):
|
|||
return int((1.055 * math.pow(value, 1 / 2.4) - 0.055) * 255 + 0.5)
|
||||
|
||||
|
||||
def blurhash_components(blurhash):
|
||||
"""
|
||||
Decodes and returns the number of x and y components in the given blurhash.
|
||||
"""
|
||||
if len(blurhash) < 6:
|
||||
raise ValueError("BlurHash must be at least 6 characters long.")
|
||||
|
||||
# Decode metadata
|
||||
size_info = base83_decode(blurhash[0])
|
||||
size_y = int(size_info / 9) + 1
|
||||
size_x = (size_info % 9) + 1
|
||||
|
||||
return size_x, size_y
|
||||
|
||||
|
||||
def blurhash_decode(blurhash, width, height, punch=1.0, linear=False):
|
||||
"""
|
||||
Decodes the given blurhash to an image of the specified size.
|
||||
|
||||
Returns the resulting image a list of lists of 3-value sRGB 8 bit integer
|
||||
lists. Set linear to True if you would prefer to get linear floating point
|
||||
RGB back.
|
||||
|
||||
The punch parameter can be used to de- or increase the contrast of the
|
||||
resulting image.
|
||||
|
||||
As per the original implementation it is suggested to only decode
|
||||
to a relatively small size and then scale the result up, as it
|
||||
basically looks the same anyways.
|
||||
"""
|
||||
if len(blurhash) < 6:
|
||||
raise ValueError("BlurHash must be at least 6 characters long.")
|
||||
|
||||
# Decode metadata
|
||||
size_info = base83_decode(blurhash[0])
|
||||
size_y = int(size_info / 9) + 1
|
||||
size_x = (size_info % 9) + 1
|
||||
|
||||
quant_max_value = base83_decode(blurhash[1])
|
||||
real_max_value = (float(quant_max_value + 1) / 166.0) * punch
|
||||
|
||||
# Make sure we at least have the right number of characters
|
||||
if len(blurhash) != 4 + 2 * size_x * size_y:
|
||||
raise ValueError("Invalid BlurHash length.")
|
||||
|
||||
# Decode DC component
|
||||
dc_value = base83_decode(blurhash[2:6])
|
||||
colours = [(
|
||||
srgb_to_linear(dc_value >> 16),
|
||||
srgb_to_linear((dc_value >> 8) & 255),
|
||||
srgb_to_linear(dc_value & 255)
|
||||
)]
|
||||
|
||||
# Decode AC components
|
||||
for component in range(1, size_x * size_y):
|
||||
ac_value = base83_decode(blurhash[4+component*2:4+(component+1)*2])
|
||||
colours.append((
|
||||
sign_pow((float(int(ac_value / (19 * 19))) - 9.0)
|
||||
/ 9.0, 2.0) * real_max_value,
|
||||
sign_pow((float(int(ac_value / 19) % 19) - 9.0)
|
||||
/ 9.0, 2.0) * real_max_value,
|
||||
sign_pow((float(ac_value % 19) - 9.0)
|
||||
/ 9.0, 2.0) * real_max_value
|
||||
))
|
||||
|
||||
# Return image RGB values, as a list of lists of lists,
|
||||
# consumable by something like numpy or PIL.
|
||||
pixels = []
|
||||
for y in range(height):
|
||||
pixel_row = []
|
||||
for x in range(width):
|
||||
pixel = [0.0, 0.0, 0.0]
|
||||
|
||||
for j in range(size_y):
|
||||
for i in range(size_x):
|
||||
basis = \
|
||||
math.cos(math.pi * float(x) * float(i) /
|
||||
float(width)) * \
|
||||
math.cos(math.pi * float(y) * float(j) / float(height))
|
||||
colour = colours[i + j * size_x]
|
||||
pixel[0] += colour[0] * basis
|
||||
pixel[1] += colour[1] * basis
|
||||
pixel[2] += colour[2] * basis
|
||||
if linear is False:
|
||||
pixel_row.append([
|
||||
linear_to_srgb(pixel[0]),
|
||||
linear_to_srgb(pixel[1]),
|
||||
linear_to_srgb(pixel[2]),
|
||||
])
|
||||
else:
|
||||
pixel_row.append(pixel)
|
||||
pixels.append(pixel_row)
|
||||
return pixels
|
||||
|
||||
|
||||
def blurhash_encode(image, components_x=4, components_y=4, linear=False):
|
||||
"""
|
||||
Calculates the blurhash for an image using the given x and y
|
||||
|
@ -218,9 +113,9 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
|
|||
image_linear_line = []
|
||||
for x in range(int(width)):
|
||||
image_linear_line.append([
|
||||
srgb_to_linear(image[y][x][0]),
|
||||
srgb_to_linear(image[y][x][1]),
|
||||
srgb_to_linear(image[y][x][2])
|
||||
_srgb_to_linear(image[y][x][0]),
|
||||
_srgb_to_linear(image[y][x][1]),
|
||||
_srgb_to_linear(image[y][x][2])
|
||||
])
|
||||
image_linear.append(image_linear_line)
|
||||
else:
|
||||
|
@ -254,9 +149,9 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
|
|||
abs(component[1]), abs(component[2]))
|
||||
|
||||
# Encode components
|
||||
dc_value = (linear_to_srgb(components[0][0]) << 16) + \
|
||||
(linear_to_srgb(components[0][1]) << 8) + \
|
||||
linear_to_srgb(components[0][2])
|
||||
dc_value = (_linear_to_srgb(components[0][0]) << 16) + \
|
||||
(_linear_to_srgb(components[0][1]) << 8) + \
|
||||
_linear_to_srgb(components[0][2])
|
||||
|
||||
quant_max_ac_component = int(max(0, min(82,
|
||||
math.floor(max_ac_component *
|
||||
|
@ -268,9 +163,9 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
|
|||
r2 = r / ac_component_norm_factor
|
||||
g2 = g / ac_component_norm_factor
|
||||
b2 = b / ac_component_norm_factor
|
||||
r3 = math.floor(sign_pow(r2, 0.5) * 9.0 + 9.5)
|
||||
g3 = math.floor(sign_pow(g2, 0.5) * 9.0 + 9.5)
|
||||
b3 = math.floor(sign_pow(b2, 0.5) * 9.0 + 9.5)
|
||||
r3 = math.floor(_sign_pow(r2, 0.5) * 9.0 + 9.5)
|
||||
g3 = math.floor(_sign_pow(g2, 0.5) * 9.0 + 9.5)
|
||||
b3 = math.floor(_sign_pow(b2, 0.5) * 9.0 + 9.5)
|
||||
ac_values.append(
|
||||
int(max(0.0, min(18.0, r3))) * 19 * 19 +
|
||||
int(max(0.0, min(18.0, g3))) * 19 +
|
||||
|
@ -279,10 +174,11 @@ def blurhash_encode(image, components_x=4, components_y=4, linear=False):
|
|||
|
||||
# Build final blurhash
|
||||
blurhash = ""
|
||||
blurhash += base83_encode((components_x - 1) + (components_y - 1) * 9, 1)
|
||||
blurhash += base83_encode(quant_max_ac_component, 1)
|
||||
blurhash += base83_encode(dc_value, 4)
|
||||
blurhashValue = (components_x - 1) + (components_y - 1) * 9
|
||||
blurhash += _base83_encode(blurhashValue, 1)
|
||||
blurhash += _base83_encode(quant_max_ac_component, 1)
|
||||
blurhash += _base83_encode(dc_value, 4)
|
||||
for ac_value in ac_values:
|
||||
blurhash += base83_encode(ac_value, 2)
|
||||
blurhash += _base83_encode(ac_value, 2)
|
||||
|
||||
return blurhash
|
||||
|
|
257
bookmarks.py
|
@ -8,6 +8,8 @@ __status__ = "Production"
|
|||
|
||||
import os
|
||||
from pprint import pprint
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import removeIdEnding
|
||||
from utils import removePostFromCache
|
||||
from utils import urlPermitted
|
||||
|
@ -17,10 +19,6 @@ from utils import locatePost
|
|||
from utils import getCachedPostFilename
|
||||
from utils import loadJson
|
||||
from utils import saveJson
|
||||
from session import postJson
|
||||
from webfinger import webfingerHandle
|
||||
from auth import createBasicAuthHeader
|
||||
from posts import getPersonBox
|
||||
|
||||
|
||||
def undoBookmarksCollectionEntry(recentPostsCache: {},
|
||||
|
@ -110,7 +108,7 @@ def undoBookmarksCollectionEntry(recentPostsCache: {},
|
|||
def bookmarkedByPerson(postJsonObject: {}, nickname: str, domain: str) -> bool:
|
||||
"""Returns True if the given post is bookmarked by the given person
|
||||
"""
|
||||
if noOfBookmarks(postJsonObject) == 0:
|
||||
if _noOfBookmarks(postJsonObject) == 0:
|
||||
return False
|
||||
actorMatch = domain + '/users/' + nickname
|
||||
for item in postJsonObject['object']['bookmarks']['items']:
|
||||
|
@ -119,7 +117,7 @@ def bookmarkedByPerson(postJsonObject: {}, nickname: str, domain: str) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def noOfBookmarks(postJsonObject: {}) -> int:
|
||||
def _noOfBookmarks(postJsonObject: {}) -> int:
|
||||
"""Returns the number of bookmarks ona given post
|
||||
"""
|
||||
if not postJsonObject.get('object'):
|
||||
|
@ -206,10 +204,11 @@ def updateBookmarksCollection(recentPostsCache: {},
|
|||
try:
|
||||
with open(bookmarksIndexFilename, 'r+') as bmIndexFile:
|
||||
content = bmIndexFile.read()
|
||||
bmIndexFile.seek(0, 0)
|
||||
bmIndexFile.write(bookmarkIndex + '\n' + content)
|
||||
if debug:
|
||||
print('DEBUG: bookmark added to index')
|
||||
if bookmarkIndex + '\n' not in content:
|
||||
bmIndexFile.seek(0, 0)
|
||||
bmIndexFile.write(bookmarkIndex + '\n' + content)
|
||||
if debug:
|
||||
print('DEBUG: bookmark added to index')
|
||||
except Exception as e:
|
||||
print('WARN: Failed to write entry to bookmarks index ' +
|
||||
bookmarksIndexFilename + ' ' + str(e))
|
||||
|
@ -237,11 +236,7 @@ def bookmark(recentPostsCache: {},
|
|||
if not urlPermitted(objectUrl, federationList):
|
||||
return None
|
||||
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
fullDomain = getFullDomain(domain, port)
|
||||
|
||||
newBookmarkJson = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
|
@ -262,10 +257,7 @@ def bookmark(recentPostsCache: {},
|
|||
bookmarkedPostNickname = getNicknameFromActor(acBm)
|
||||
bookmarkedPostDomain, bookmarkedPostPort = getDomainFromActor(acBm)
|
||||
else:
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
if hasUsersPath(objectUrl):
|
||||
ou = objectUrl
|
||||
bookmarkedPostNickname = getNicknameFromActor(ou)
|
||||
bookmarkedPostDomain, bookmarkedPostPort = getDomainFromActor(ou)
|
||||
|
@ -286,36 +278,6 @@ def bookmark(recentPostsCache: {},
|
|||
return newBookmarkJson
|
||||
|
||||
|
||||
def bookmarkPost(recentPostsCache: {},
|
||||
session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
bookmarkNickname: str, bookmarkedomain: str,
|
||||
bookmarkPort: int,
|
||||
ccList: [],
|
||||
bookmarkStatusNumber: int, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Bookmarks a given status post. This is only used by unit tests
|
||||
"""
|
||||
bookmarkedomain = bookmarkedomain
|
||||
if bookmarkPort:
|
||||
if bookmarkPort != 80 and bookmarkPort != 443:
|
||||
if ':' not in bookmarkedomain:
|
||||
bookmarkedomain = bookmarkedomain + ':' + str(bookmarkPort)
|
||||
|
||||
actorBookmarked = httpPrefix + '://' + bookmarkedomain + \
|
||||
'/users/' + bookmarkNickname
|
||||
objectUrl = actorBookmarked + '/statuses/' + str(bookmarkStatusNumber)
|
||||
|
||||
return bookmark(recentPostsCache,
|
||||
session, baseDir, federationList, nickname, domain, port,
|
||||
ccList, httpPrefix, objectUrl, actorBookmarked,
|
||||
clientToServer,
|
||||
sendThreads, postLog, personCache, cachedWebfingers,
|
||||
debug, projectVersion)
|
||||
|
||||
|
||||
def undoBookmark(recentPostsCache: {},
|
||||
session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
|
@ -333,11 +295,7 @@ def undoBookmark(recentPostsCache: {},
|
|||
if not urlPermitted(objectUrl, federationList):
|
||||
return None
|
||||
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
fullDomain = getFullDomain(domain, port)
|
||||
|
||||
newUndoBookmarkJson = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
|
@ -363,10 +321,7 @@ def undoBookmark(recentPostsCache: {},
|
|||
bookmarkedPostNickname = getNicknameFromActor(acBm)
|
||||
bookmarkedPostDomain, bookmarkedPostPort = getDomainFromActor(acBm)
|
||||
else:
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
if hasUsersPath(objectUrl):
|
||||
ou = objectUrl
|
||||
bookmarkedPostNickname = getNicknameFromActor(ou)
|
||||
bookmarkedPostDomain, bookmarkedPostPort = getDomainFromActor(ou)
|
||||
|
@ -386,192 +341,6 @@ def undoBookmark(recentPostsCache: {},
|
|||
return newUndoBookmarkJson
|
||||
|
||||
|
||||
def undoBookmarkPost(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
bookmarkNickname: str, bookmarkedomain: str,
|
||||
bookmarkPort: int, ccList: [],
|
||||
bookmarkStatusNumber: int, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Removes a bookmarked post
|
||||
"""
|
||||
bookmarkedomain = bookmarkedomain
|
||||
if bookmarkPort:
|
||||
if bookmarkPort != 80 and bookmarkPort != 443:
|
||||
if ':' not in bookmarkedomain:
|
||||
bookmarkedomain = bookmarkedomain + ':' + str(bookmarkPort)
|
||||
|
||||
objectUrl = httpPrefix + '://' + bookmarkedomain + \
|
||||
'/users/' + bookmarkNickname + \
|
||||
'/statuses/' + str(bookmarkStatusNumber)
|
||||
|
||||
return undoBookmark(session, baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
ccList, httpPrefix, objectUrl, clientToServer,
|
||||
sendThreads, postLog, personCache,
|
||||
cachedWebfingers, debug)
|
||||
|
||||
|
||||
def sendBookmarkViaServer(baseDir: str, session,
|
||||
fromNickname: str, password: str,
|
||||
fromDomain: str, fromPort: int,
|
||||
httpPrefix: str, bookmarkUrl: str,
|
||||
cachedWebfingers: {}, personCache: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Creates a bookmark via c2s
|
||||
"""
|
||||
if not session:
|
||||
print('WARN: No session for sendBookmarkViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
|
||||
newBookmarkJson = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
'type': 'Bookmark',
|
||||
'actor': httpPrefix+'://'+fromDomainFull+'/users/'+fromNickname,
|
||||
'object': bookmarkUrl
|
||||
}
|
||||
|
||||
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
||||
|
||||
# lookup the inbox for the To handle
|
||||
wfRequest = webfingerHandle(session, handle, httpPrefix,
|
||||
cachedWebfingers,
|
||||
fromDomain, projectVersion)
|
||||
if not wfRequest:
|
||||
if debug:
|
||||
print('DEBUG: announce webfinger failed for ' + handle)
|
||||
return 1
|
||||
if not isinstance(wfRequest, dict):
|
||||
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
|
||||
str(wfRequest))
|
||||
return 1
|
||||
|
||||
postToBox = 'outbox'
|
||||
|
||||
# get the actor inbox for the To handle
|
||||
(inboxUrl, pubKeyId, pubKey,
|
||||
fromPersonId, sharedInbox, avatarUrl,
|
||||
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
|
||||
projectVersion, httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
print('DEBUG: No ' + postToBox + ' was found for ' + handle)
|
||||
return 3
|
||||
if not fromPersonId:
|
||||
if debug:
|
||||
print('DEBUG: No actor was found for ' + handle)
|
||||
return 4
|
||||
|
||||
authHeader = createBasicAuthHeader(fromNickname, password)
|
||||
|
||||
headers = {
|
||||
'host': fromDomain,
|
||||
'Content-type': 'application/json',
|
||||
'Authorization': authHeader
|
||||
}
|
||||
postResult = postJson(session, newBookmarkJson, [],
|
||||
inboxUrl, headers)
|
||||
if not postResult:
|
||||
if debug:
|
||||
print('DEBUG: POST announce failed for c2s to ' + inboxUrl)
|
||||
return 5
|
||||
|
||||
if debug:
|
||||
print('DEBUG: c2s POST bookmark success')
|
||||
|
||||
return newBookmarkJson
|
||||
|
||||
|
||||
def sendUndoBookmarkViaServer(baseDir: str, session,
|
||||
fromNickname: str, password: str,
|
||||
fromDomain: str, fromPort: int,
|
||||
httpPrefix: str, bookmarkUrl: str,
|
||||
cachedWebfingers: {}, personCache: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Undo a bookmark via c2s
|
||||
"""
|
||||
if not session:
|
||||
print('WARN: No session for sendUndoBookmarkViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
|
||||
newUndoBookmarkJson = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
'type': 'Undo',
|
||||
'actor': httpPrefix+'://'+fromDomainFull+'/users/'+fromNickname,
|
||||
'object': {
|
||||
'type': 'Bookmark',
|
||||
'actor': httpPrefix+'://'+fromDomainFull+'/users/'+fromNickname,
|
||||
'object': bookmarkUrl
|
||||
}
|
||||
}
|
||||
|
||||
handle = httpPrefix + '://' + fromDomainFull + '/@' + fromNickname
|
||||
|
||||
# lookup the inbox for the To handle
|
||||
wfRequest = webfingerHandle(session, handle, httpPrefix, cachedWebfingers,
|
||||
fromDomain, projectVersion)
|
||||
if not wfRequest:
|
||||
if debug:
|
||||
print('DEBUG: announce webfinger failed for ' + handle)
|
||||
return 1
|
||||
if not isinstance(wfRequest, dict):
|
||||
print('WARN: Webfinger for ' + handle + ' did not return a dict. ' +
|
||||
str(wfRequest))
|
||||
return 1
|
||||
|
||||
postToBox = 'outbox'
|
||||
|
||||
# get the actor inbox for the To handle
|
||||
(inboxUrl, pubKeyId, pubKey,
|
||||
fromPersonId, sharedInbox, avatarUrl,
|
||||
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
|
||||
projectVersion, httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
print('DEBUG: No ' + postToBox + ' was found for ' + handle)
|
||||
return 3
|
||||
if not fromPersonId:
|
||||
if debug:
|
||||
print('DEBUG: No actor was found for ' + handle)
|
||||
return 4
|
||||
|
||||
authHeader = createBasicAuthHeader(fromNickname, password)
|
||||
|
||||
headers = {
|
||||
'host': fromDomain,
|
||||
'Content-type': 'application/json',
|
||||
'Authorization': authHeader
|
||||
}
|
||||
postResult = postJson(session, newUndoBookmarkJson, [],
|
||||
inboxUrl, headers)
|
||||
if not postResult:
|
||||
if debug:
|
||||
print('DEBUG: POST announce failed for c2s to ' + inboxUrl)
|
||||
return 5
|
||||
|
||||
if debug:
|
||||
print('DEBUG: c2s POST undo bookmark success')
|
||||
|
||||
return newUndoBookmarkJson
|
||||
|
||||
|
||||
def outboxBookmark(recentPostsCache: {},
|
||||
baseDir: str, httpPrefix: str,
|
||||
nickname: str, domain: str, port: int,
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
__filename__ = "briar.py"
|
||||
__author__ = "Bob Mottram"
|
||||
__license__ = "AGPL3+"
|
||||
__version__ = "1.1.0"
|
||||
__maintainer__ = "Bob Mottram"
|
||||
__email__ = "bob@freedombone.net"
|
||||
__status__ = "Production"
|
||||
|
||||
|
||||
def getBriarAddress(actorJson: {}) -> str:
|
||||
"""Returns briar address for the given actor
|
||||
"""
|
||||
if not actorJson.get('attachment'):
|
||||
return ''
|
||||
for propertyValue in actorJson['attachment']:
|
||||
if not propertyValue.get('name'):
|
||||
continue
|
||||
if not propertyValue['name'].lower().startswith('briar'):
|
||||
continue
|
||||
if not propertyValue.get('type'):
|
||||
continue
|
||||
if not propertyValue.get('value'):
|
||||
continue
|
||||
if propertyValue['type'] != 'PropertyValue':
|
||||
continue
|
||||
propertyValue['value'] = propertyValue['value'].strip()
|
||||
if len(propertyValue['value']) < 50:
|
||||
continue
|
||||
if not propertyValue['value'].startswith('briar://'):
|
||||
continue
|
||||
if propertyValue['value'].lower() != propertyValue['value']:
|
||||
continue
|
||||
if '"' in propertyValue['value']:
|
||||
continue
|
||||
if ' ' in propertyValue['value']:
|
||||
continue
|
||||
if ',' in propertyValue['value']:
|
||||
continue
|
||||
if '.' in propertyValue['value']:
|
||||
continue
|
||||
return propertyValue['value']
|
||||
return ''
|
||||
|
||||
|
||||
def setBriarAddress(actorJson: {}, briarAddress: str) -> None:
|
||||
"""Sets an briar address for the given actor
|
||||
"""
|
||||
notBriarAddress = False
|
||||
|
||||
if len(briarAddress) < 50:
|
||||
notBriarAddress = True
|
||||
if not briarAddress.startswith('briar://'):
|
||||
notBriarAddress = True
|
||||
if briarAddress.lower() != briarAddress:
|
||||
notBriarAddress = True
|
||||
if '"' in briarAddress:
|
||||
notBriarAddress = True
|
||||
if ' ' in briarAddress:
|
||||
notBriarAddress = True
|
||||
if '.' in briarAddress:
|
||||
notBriarAddress = True
|
||||
if ',' in briarAddress:
|
||||
notBriarAddress = True
|
||||
if '<' in briarAddress:
|
||||
notBriarAddress = True
|
||||
|
||||
if not actorJson.get('attachment'):
|
||||
actorJson['attachment'] = []
|
||||
|
||||
# remove any existing value
|
||||
propertyFound = None
|
||||
for propertyValue in actorJson['attachment']:
|
||||
if not propertyValue.get('name'):
|
||||
continue
|
||||
if not propertyValue.get('type'):
|
||||
continue
|
||||
if not propertyValue['name'].lower().startswith('briar'):
|
||||
continue
|
||||
propertyFound = propertyValue
|
||||
break
|
||||
if propertyFound:
|
||||
actorJson['attachment'].remove(propertyFound)
|
||||
if notBriarAddress:
|
||||
return
|
||||
|
||||
for propertyValue in actorJson['attachment']:
|
||||
if not propertyValue.get('name'):
|
||||
continue
|
||||
if not propertyValue.get('type'):
|
||||
continue
|
||||
if not propertyValue['name'].lower().startswith('briar'):
|
||||
continue
|
||||
if propertyValue['type'] != 'PropertyValue':
|
||||
continue
|
||||
propertyValue['value'] = briarAddress
|
||||
return
|
||||
|
||||
newBriarAddress = {
|
||||
"name": "Briar",
|
||||
"type": "PropertyValue",
|
||||
"value": briarAddress
|
||||
}
|
||||
actorJson['attachment'].append(newBriarAddress)
|
|
@ -0,0 +1,184 @@
|
|||
__filename__ = "categories.py"
|
||||
__author__ = "Bob Mottram"
|
||||
__license__ = "AGPL3+"
|
||||
__version__ = "1.1.0"
|
||||
__maintainer__ = "Bob Mottram"
|
||||
__email__ = "bob@freedombone.net"
|
||||
__status__ = "Production"
|
||||
|
||||
import os
|
||||
import datetime
|
||||
|
||||
|
||||
def getHashtagCategory(baseDir: str, hashtag: str) -> str:
|
||||
"""Returns the category for the hashtag
|
||||
"""
|
||||
categoryFilename = baseDir + '/tags/' + hashtag + '.category'
|
||||
if not os.path.isfile(categoryFilename):
|
||||
categoryFilename = baseDir + '/tags/' + hashtag.title() + '.category'
|
||||
if not os.path.isfile(categoryFilename):
|
||||
categoryFilename = \
|
||||
baseDir + '/tags/' + hashtag.upper() + '.category'
|
||||
if not os.path.isfile(categoryFilename):
|
||||
return ''
|
||||
|
||||
with open(categoryFilename, 'r') as fp:
|
||||
categoryStr = fp.read()
|
||||
if categoryStr:
|
||||
return categoryStr
|
||||
return ''
|
||||
|
||||
|
||||
def getHashtagCategories(baseDir: str, recent=False, category=None) -> None:
|
||||
"""Returns a dictionary containing hashtag categories
|
||||
"""
|
||||
hashtagCategories = {}
|
||||
|
||||
if recent:
|
||||
currTime = datetime.datetime.utcnow()
|
||||
daysSinceEpoch = (currTime - datetime.datetime(1970, 1, 1)).days
|
||||
recently = daysSinceEpoch - 1
|
||||
|
||||
for subdir, dirs, files in os.walk(baseDir + '/tags'):
|
||||
for f in files:
|
||||
if not f.endswith('.category'):
|
||||
continue
|
||||
categoryFilename = os.path.join(baseDir + '/tags', f)
|
||||
if not os.path.isfile(categoryFilename):
|
||||
continue
|
||||
hashtag = f.split('.')[0]
|
||||
with open(categoryFilename, 'r') as fp:
|
||||
categoryStr = fp.read()
|
||||
|
||||
if not categoryStr:
|
||||
continue
|
||||
|
||||
if category:
|
||||
# only return a dictionary for a specific category
|
||||
if categoryStr != category:
|
||||
continue
|
||||
|
||||
if recent:
|
||||
tagsFilename = baseDir + '/tags/' + hashtag + '.txt'
|
||||
if not os.path.isfile(tagsFilename):
|
||||
continue
|
||||
modTimesinceEpoc = \
|
||||
os.path.getmtime(tagsFilename)
|
||||
lastModifiedDate = \
|
||||
datetime.datetime.fromtimestamp(modTimesinceEpoc)
|
||||
fileDaysSinceEpoch = \
|
||||
(lastModifiedDate -
|
||||
datetime.datetime(1970, 1, 1)).days
|
||||
if fileDaysSinceEpoch < recently:
|
||||
continue
|
||||
|
||||
if not hashtagCategories.get(categoryStr):
|
||||
hashtagCategories[categoryStr] = [hashtag]
|
||||
else:
|
||||
if hashtag not in hashtagCategories[categoryStr]:
|
||||
hashtagCategories[categoryStr].append(hashtag)
|
||||
break
|
||||
return hashtagCategories
|
||||
|
||||
|
||||
def _updateHashtagCategories(baseDir: str) -> None:
|
||||
"""Regenerates the list of hashtag categories
|
||||
"""
|
||||
categoryListFilename = baseDir + '/accounts/categoryList.txt'
|
||||
hashtagCategories = getHashtagCategories(baseDir)
|
||||
if not hashtagCategories:
|
||||
if os.path.isfile(categoryListFilename):
|
||||
os.remove(categoryListFilename)
|
||||
return
|
||||
|
||||
categoryList = []
|
||||
for categoryStr, hashtagList in hashtagCategories.items():
|
||||
categoryList.append(categoryStr)
|
||||
categoryList.sort()
|
||||
|
||||
categoryListStr = ''
|
||||
for categoryStr in categoryList:
|
||||
categoryListStr += categoryStr + '\n'
|
||||
|
||||
# save a list of available categories for quick lookup
|
||||
with open(categoryListFilename, 'w+') as fp:
|
||||
fp.write(categoryListStr)
|
||||
|
||||
|
||||
def _validHashtagCategory(category: str) -> bool:
|
||||
"""Returns true if the category name is valid
|
||||
"""
|
||||
if not category:
|
||||
return False
|
||||
|
||||
invalidChars = (',', ' ', '<', ';', '\\')
|
||||
for ch in invalidChars:
|
||||
if ch in category:
|
||||
return False
|
||||
|
||||
# too long
|
||||
if len(category) > 40:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def setHashtagCategory(baseDir: str, hashtag: str, category: str,
|
||||
force=False) -> bool:
|
||||
"""Sets the category for the hashtag
|
||||
"""
|
||||
if not _validHashtagCategory(category):
|
||||
return False
|
||||
|
||||
if not force:
|
||||
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
|
||||
if not os.path.isfile(hashtagFilename):
|
||||
hashtag = hashtag.title()
|
||||
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
|
||||
if not os.path.isfile(hashtagFilename):
|
||||
hashtag = hashtag.upper()
|
||||
hashtagFilename = baseDir + '/tags/' + hashtag + '.txt'
|
||||
if not os.path.isfile(hashtagFilename):
|
||||
return False
|
||||
|
||||
if not os.path.isdir(baseDir + '/tags'):
|
||||
os.mkdir(baseDir + '/tags')
|
||||
categoryFilename = baseDir + '/tags/' + hashtag + '.category'
|
||||
if force:
|
||||
# don't overwrite any existing categories
|
||||
if os.path.isfile(categoryFilename):
|
||||
return False
|
||||
with open(categoryFilename, 'w+') as fp:
|
||||
fp.write(category)
|
||||
_updateHashtagCategories(baseDir)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def guessHashtagCategory(tagName: str, hashtagCategories: {}) -> str:
|
||||
"""Tries to guess a category for the given hashtag.
|
||||
This works by trying to find the longest similar hashtag
|
||||
"""
|
||||
categoryMatched = ''
|
||||
tagMatchedLen = 0
|
||||
|
||||
for categoryStr, hashtagList in hashtagCategories.items():
|
||||
for hashtag in hashtagList:
|
||||
if len(hashtag) < 3:
|
||||
# avoid matching very small strings which often
|
||||
# lead to spurious categories
|
||||
continue
|
||||
if hashtag not in tagName:
|
||||
if tagName not in hashtag:
|
||||
continue
|
||||
if not categoryMatched:
|
||||
tagMatchedLen = len(hashtag)
|
||||
categoryMatched = categoryStr
|
||||
else:
|
||||
# match the longest tag
|
||||
if len(hashtag) > tagMatchedLen:
|
||||
categoryMatched = categoryStr
|
||||
if not categoryMatched:
|
||||
return
|
||||
return categoryMatched
|
118
content.py
|
@ -33,7 +33,7 @@ def removeHtmlTag(htmlStr: str, tag: str) -> str:
|
|||
return htmlStr
|
||||
|
||||
|
||||
def removeQuotesWithinQuotes(content: str) -> str:
|
||||
def _removeQuotesWithinQuotes(content: str) -> str:
|
||||
"""Removes any blockquote inside blockquote
|
||||
"""
|
||||
if '<blockquote>' not in content:
|
||||
|
@ -96,7 +96,7 @@ def htmlReplaceEmailQuote(content: str) -> str:
|
|||
else:
|
||||
lineStr = lineStr.replace('>', '<br>')
|
||||
newContent += '<p>' + lineStr + '</blockquote></p>'
|
||||
return removeQuotesWithinQuotes(newContent)
|
||||
return _removeQuotesWithinQuotes(newContent)
|
||||
|
||||
|
||||
def htmlReplaceQuoteMarks(content: str) -> str:
|
||||
|
@ -163,7 +163,7 @@ def dangerousMarkup(content: str, allowLocalNetworkAccess: bool) -> bool:
|
|||
contentSections = content.split('<')
|
||||
invalidPartials = ()
|
||||
if not allowLocalNetworkAccess:
|
||||
invalidPartials = ('127.0.', '192.168', '10.0.')
|
||||
invalidPartials = ('localhost', '127.0.', '192.168', '10.0.')
|
||||
invalidStrings = ('script', 'canvas', 'style', 'abbr',
|
||||
'frame', 'iframe', 'html', 'body',
|
||||
'hr', 'allow-popups', 'allow-scripts')
|
||||
|
@ -196,11 +196,26 @@ def dangerousCSS(filename: str, allowLocalNetworkAccess: bool) -> bool:
|
|||
content = fp.read().lower()
|
||||
|
||||
cssMatches = ('behavior:', ':expression', '?php', '.php',
|
||||
'google')
|
||||
'google', 'regexp', 'localhost',
|
||||
'127.0.', '192.168', '10.0.', '@import')
|
||||
for match in cssMatches:
|
||||
if match in content:
|
||||
return True
|
||||
|
||||
# search for non-local web links
|
||||
if 'url(' in content:
|
||||
urlList = content.split('url(')
|
||||
ctr = 0
|
||||
for urlStr in urlList:
|
||||
if ctr > 0:
|
||||
if ')' in urlStr:
|
||||
urlStr = urlStr.split(')')[0]
|
||||
if 'http' in urlStr:
|
||||
print('ERROR: non-local web link in CSS ' +
|
||||
filename)
|
||||
return True
|
||||
ctr += 1
|
||||
|
||||
# an attacker can include html inside of the css
|
||||
# file as a comment and this may then be run from the html
|
||||
if dangerousMarkup(content, allowLocalNetworkAccess):
|
||||
|
@ -299,7 +314,7 @@ def replaceEmojiFromTags(content: str, tag: [], messageType: str) -> str:
|
|||
return content
|
||||
|
||||
|
||||
def addMusicTag(content: str, tag: str) -> str:
|
||||
def _addMusicTag(content: str, tag: str) -> str:
|
||||
"""If a music link is found then ensure that the post is
|
||||
tagged appropriately
|
||||
"""
|
||||
|
@ -401,8 +416,8 @@ def validHashTag(hashtag: str) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def addHashTags(wordStr: str, httpPrefix: str, domain: str,
|
||||
replaceHashTags: {}, postHashtags: {}) -> bool:
|
||||
def _addHashTags(wordStr: str, httpPrefix: str, domain: str,
|
||||
replaceHashTags: {}, postHashtags: {}) -> bool:
|
||||
"""Detects hashtags and adds them to the replacements dict
|
||||
Also updates the hashtags list to be added to the post
|
||||
"""
|
||||
|
@ -423,38 +438,10 @@ def addHashTags(wordStr: str, httpPrefix: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def loadEmojiDict(emojiDataFilename: str, emojiDict: {}) -> None:
|
||||
"""Creates an emoji dictionary based on emoji/emoji-data.txt
|
||||
"""
|
||||
if not os.path.isfile(emojiDataFilename):
|
||||
return
|
||||
with open(emojiDataFilename, "r") as fileHandler:
|
||||
for line in fileHandler:
|
||||
if len(line) < 5:
|
||||
continue
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
if '; Emoji' not in line:
|
||||
continue
|
||||
if ')' not in line:
|
||||
continue
|
||||
emojiUnicode = line.split(' ')[0]
|
||||
if len(emojiUnicode) < 4:
|
||||
continue
|
||||
if '..' in emojiUnicode:
|
||||
emojiUnicode = emojiUnicode.split('..')[0]
|
||||
emojiName = line.split(')', 1)[1].strip()
|
||||
emojiName = emojiName.replace('\n', '').replace('\r', '')
|
||||
emojiName = emojiName.replace(' ', '').replace('-', '')
|
||||
if '..' in emojiName:
|
||||
emojiName = emojiName.split('..')[0]
|
||||
emojiDict[emojiName.lower()] = emojiUnicode
|
||||
|
||||
|
||||
def addEmoji(baseDir: str, wordStr: str,
|
||||
httpPrefix: str, domain: str,
|
||||
replaceEmoji: {}, postTags: {},
|
||||
emojiDict: {}) -> bool:
|
||||
def _addEmoji(baseDir: str, wordStr: str,
|
||||
httpPrefix: str, domain: str,
|
||||
replaceEmoji: {}, postTags: {},
|
||||
emojiDict: {}) -> bool:
|
||||
"""Detects Emoji and adds them to the replacements dict
|
||||
Also updates the tags list to be added to the post
|
||||
"""
|
||||
|
@ -493,8 +480,17 @@ def addEmoji(baseDir: str, wordStr: str,
|
|||
return True
|
||||
|
||||
|
||||
def addMention(wordStr: str, httpPrefix: str, following: str,
|
||||
replaceMentions: {}, recipients: [], tags: {}) -> bool:
|
||||
def tagExists(tagType: str, tagName: str, tags: {}) -> bool:
|
||||
"""Returns true if a tag exists in the given dict
|
||||
"""
|
||||
for tag in tags:
|
||||
if tag['name'] == tagName and tag['type'] == tagType:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _addMention(wordStr: str, httpPrefix: str, following: str,
|
||||
replaceMentions: {}, recipients: [], tags: {}) -> bool:
|
||||
"""Detects mentions and adds them to the replacements dict and
|
||||
recipients list
|
||||
"""
|
||||
|
@ -676,7 +672,7 @@ def removeLongWords(content: str, maxWordLength: int,
|
|||
return content
|
||||
|
||||
|
||||
def loadAutoTags(baseDir: str, nickname: str, domain: str) -> []:
|
||||
def _loadAutoTags(baseDir: str, nickname: str, domain: str) -> []:
|
||||
"""Loads automatic tags file and returns a list containing
|
||||
the lines of the file
|
||||
"""
|
||||
|
@ -689,9 +685,9 @@ def loadAutoTags(baseDir: str, nickname: str, domain: str) -> []:
|
|||
return []
|
||||
|
||||
|
||||
def autoTag(baseDir: str, nickname: str, domain: str,
|
||||
wordStr: str, autoTagList: [],
|
||||
appendTags: []):
|
||||
def _autoTag(baseDir: str, nickname: str, domain: str,
|
||||
wordStr: str, autoTagList: [],
|
||||
appendTags: []):
|
||||
"""Generates a list of tags to be automatically appended to the content
|
||||
"""
|
||||
for tagRule in autoTagList:
|
||||
|
@ -723,7 +719,7 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
|
|||
maxWordLength = 40
|
||||
content = content.replace('\r', '')
|
||||
content = content.replace('\n', ' --linebreak-- ')
|
||||
content = addMusicTag(content, 'nowplaying')
|
||||
content = _addMusicTag(content, 'nowplaying')
|
||||
contentSimplified = \
|
||||
content.replace(',', ' ').replace(';', ' ').replace('- ', ' ')
|
||||
contentSimplified = contentSimplified.replace('. ', ' ').strip()
|
||||
|
@ -764,7 +760,7 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
|
|||
# extract mentions and tags from words
|
||||
longWordsList = []
|
||||
prevWordStr = ''
|
||||
autoTagsList = loadAutoTags(baseDir, nickname, domain)
|
||||
autoTagsList = _loadAutoTags(baseDir, nickname, domain)
|
||||
appendTags = []
|
||||
for wordStr in words:
|
||||
wordLen = len(wordStr)
|
||||
|
@ -773,13 +769,13 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
|
|||
longWordsList.append(wordStr)
|
||||
firstChar = wordStr[0]
|
||||
if firstChar == '@':
|
||||
if addMention(wordStr, httpPrefix, following,
|
||||
replaceMentions, recipients, hashtags):
|
||||
if _addMention(wordStr, httpPrefix, following,
|
||||
replaceMentions, recipients, hashtags):
|
||||
prevWordStr = ''
|
||||
continue
|
||||
elif firstChar == '#':
|
||||
if addHashTags(wordStr, httpPrefix, originalDomain,
|
||||
replaceHashTags, hashtags):
|
||||
if _addHashTags(wordStr, httpPrefix, originalDomain,
|
||||
replaceHashTags, hashtags):
|
||||
prevWordStr = ''
|
||||
continue
|
||||
elif ':' in wordStr:
|
||||
|
@ -795,18 +791,18 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
|
|||
emojiDict = loadJson(baseDir + '/emoji/emoji.json')
|
||||
|
||||
# print('TAG: looking up emoji for :'+wordStr2+':')
|
||||
addEmoji(baseDir, ':' + wordStr2 + ':', httpPrefix,
|
||||
originalDomain, replaceEmoji, hashtags,
|
||||
emojiDict)
|
||||
_addEmoji(baseDir, ':' + wordStr2 + ':', httpPrefix,
|
||||
originalDomain, replaceEmoji, hashtags,
|
||||
emojiDict)
|
||||
else:
|
||||
if autoTag(baseDir, nickname, domain, wordStr,
|
||||
autoTagsList, appendTags):
|
||||
if _autoTag(baseDir, nickname, domain, wordStr,
|
||||
autoTagsList, appendTags):
|
||||
prevWordStr = ''
|
||||
continue
|
||||
if prevWordStr:
|
||||
if autoTag(baseDir, nickname, domain,
|
||||
prevWordStr + ' ' + wordStr,
|
||||
autoTagsList, appendTags):
|
||||
if _autoTag(baseDir, nickname, domain,
|
||||
prevWordStr + ' ' + wordStr,
|
||||
autoTagsList, appendTags):
|
||||
prevWordStr = ''
|
||||
continue
|
||||
prevWordStr = wordStr
|
||||
|
@ -814,8 +810,8 @@ def addHtmlTags(baseDir: str, httpPrefix: str,
|
|||
# add any auto generated tags
|
||||
for appended in appendTags:
|
||||
content = content + ' ' + appended
|
||||
addHashTags(appended, httpPrefix, originalDomain,
|
||||
replaceHashTags, hashtags)
|
||||
_addHashTags(appended, httpPrefix, originalDomain,
|
||||
replaceHashTags, hashtags)
|
||||
|
||||
# replace words with their html versions
|
||||
for wordStr, replaceStr in replaceMentions.items():
|
||||
|
|
|
@ -3,424 +3,526 @@
|
|||
<channel>
|
||||
<title>#categories</title>
|
||||
<item>
|
||||
<title>gafam</title>
|
||||
<description>zuckerberg apple antitrust youtube ffs facebook interoperability amazon amazonring googleplus Facebook advertising adtech microsoft twitter caffeine skype ff youtubedl degoogled youtubers google dotcoms deleteyoutube Instagram fascistbook FuckGoogle degoogle fuschia ungoogled ring affordances gafam inspiring fuckoffgoogle deletefacebook office365 playstore bigtech</description>
|
||||
<title>sport</title>
|
||||
<description>billiard darts swim motorsport snooker marathon hockey diving baseball Millwall sailing athletics skating skiing sport football</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>events</title>
|
||||
<description>neverforget award OONIbday waybackwednesday notifications throwbackthursday adventskalender live Day deepthoughts screenshotsaturday thursdaythoughts humanrightsday followfriday afediversechristmas wednesdaymotivation showerthoughts anarchymonday 100DaysToOffload ff holiday christmas week concert festival screenshottuesday dontstarve onthisday livestream sunday screenshotsunday liverpool adayinthelife day ccc InternationalCheetahDay interestingtimes christmaslights meetup</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>gafam</title>
|
||||
<description>zuckerberg caringissharing ads apple antitrust SpringerEnteignen GoogleDown AppleSearch bankruptBezos youtube ffs facebook interoperability amazon boycottinstagram amazonring googleplus degooglisation siri Facebook LeiharbeitAbschaffen advertising adtech fuckgoogle microsoft dtm twitter caffeine skype chrome hildebrandt youtubedl degoogled youtubers google sharingiscaring gis dt dotcoms deleteyoutube Instagram fascistbook FuckGoogle degoogle fuschia ungoogled ring affordances googledown gafam inspiring fuckoffgoogle deletefacebook fuckoffgoogleandco office365 instagram MatrixEffect playstore bigtech</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>activitypub</title>
|
||||
<description>followerpower FederatedSocialMedia Fediverse activitypub activertypub pleroma losttoot PeerTube gofed fediblock lazyfedi federation instances fedilab pixiv mastotips mastodev mastotip friendica hiveway misskey siskin followers fediart Pixelfed contentwarnings pixelfed fediverseplaysjackbox fedidb block FediMemories Feditip Fediseminar onlyfedi socialcg monal tusky peertubers imagedescription feditips fedizens Mastodon following epicyon peertubeadmin mastomagic dev fediadmin pixeldev fosstodon instanceblock mastodonmonday isolategab fedireads PeertubeMastodonHost Bookwyrm federated socialhome fedivers MastodonMondays fediverse imagedescriptions mastoadmin smithereen mastodon fedi fediplay peertube lab mobilizon gemifedi</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>programming</title>
|
||||
<description>Easer cpp report programming css objects Python FrancisBacon2020 mixers webdev gui release ada schutzstreifen rustlang ocaml program request_reaction uptronics solidarity hypocritcal profiles typescript forums vscode publiccode FreeSoftware vieprivée early adventofcode scripting warn spyware git solid trevornoah zinccoop tailwindcss raku fedidev c sourcecode publiekecode misc framaforms WendyLPatrick grep django gmic sackthelot gitportal relevance_P1Y kingparrot Leiharbeit programmer haskell Tarifvertrag unicode frgmntscnr github digitalmarketsact openrc tuskydev threema algorithms lisp forge pleaseshare HirsuteHippo resnetting fourtwenty libraries drivers freecode javascript fragment cpm code elisp patterns html terminal rust sauerkraut request spiritbomb r dramasystem go esbuild documentary golang clojurescript ruby contractpatch computers racket python indiedev kabelfernsehen alternatives OpenSource Scheibenwischer</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>nature</title>
|
||||
<description>hiking wat StormBella morning trees light birds nature frogs sunrise coldwater inaturalist forest morningcrew australianwildlife capybara natur amphibians</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>writing</title>
|
||||
<description>blog poetry poem journal</description>
|
||||
<description>blog tootfic authors poem magazine smallstories blogging smallpoems blogs interactivestorytelling WriteFreely storytelling goodreads creativewriting journal poetry</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>hardware</title>
|
||||
<description>plugandplay cyberdeck keyboards screenless modem TrueDelta keyboard cybredeck solarpunk lenovo ibm 3dprinting MechcanicalKeyboards openhardware raspberrypi barcode pinebookpro PinebookPro 3dprint arm thinkpad</description>
|
||||
<description>plugandplay PersonalComputer cyberdeck PineCUBE keyboards screenless modem analogcomputing TrueDelta keyboard ArmWorkstation daretocare printmaker cybredeck laptop solarpunk recycling lenovo fairelectronics fuse ibm 3dprinting MechcanicalKeyboards openhardware raspberrypi barcode pinetime pinebookpro PinebookPro 3dprint arm paperComputer amd openpower devopa thinkpad print electronic</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>places</title>
|
||||
<description>lapaz luanda asunción nouakchott conakry kyiv moscow saipan gibraltar dublin dannibleibt avarua hargeisa delhi niamey chișinău colombo brasília phnompenh mbabane belgrade belmopan pyongyang hannover ulaanbaatar oranjestad gaborone seattle ndjamena raw singapore kingedwardpoint abidjan nuuk pretoria papeete malé zagreb gitega abudhabi flyingfishcove castries georgetown hagåtña basseterre hamburg kinshasa suva valparaíso athens roseau baku charlotteamalie antananarivo pristina santiago sukhumi berlin funafuti libreville hanoi philipsburg tehran banjul prague andorralavella yerevan portauprince dakar paramaribo tifariti capetown tirana klima ankara ipswich managua lisbon bishkek amsterdam portonovo santodomingo bangkok bucharest kathmandu aden madrid sanjuan vienna kingston oi kabul damascus stockholm douglas willemstad thehague panamacity beirut amman newdelhi tórshavn nouméa oslo alofi gustavia paris video cockburntown ottawa stepanakert portofspain honiara asmara florida nicosia helsinki taipei tegucigalpa tokyo tashkent sarajevo algiers nairobi muscat monaco riyadh lusaka wellington bissau juba mariehamn majuro buenosaires ngerulmud dhaka guatemalacity washington vatican kuwaitcity bern mexicocity bratislava bridgetown delhipolice tunis manila stanley matautu copenhagen lomé budapest ouagadougou mogadishu freetown victoria brazzaville portmoresby ashgabat kampala elaaiún vilnius bloemfontein sucre london marseille pagopago bradesestate oakland vaduz addis nürnberg naypyidaw CassetteNavigation khartoum baghdad bandar moroni portvila kingstown reykjavík manama accra windhoek nukualofa tbilisi canberra quito maputo cetinje putrajaya ramallah bogotá dodoma harare havana warsaw münster valletta ljubljana bamako kualalumpur podgorica rabat cotonou plymouth seoul Portland dushanbe bangui westisland tskhinvali palikir caracas jamestown rome munich ass sãotomé jakarta daressalaam sansalvador apia essex yaren cairo jerusalem brussels kigali southtarawa beijing minsk montevideo vientiane maseru hamilton doha tripoli portlouis lima adamstown abuja lilongwe nassau lobamba nyc montreal dili riga lesbos monrovia nursultan sanjosé marigot islamabad malabo tallinn sahara thimphu yaoundé praia bujumbura sofia skopje</description>
|
||||
<description>lapaz luanda asunción nouakchott conakry kyiv moscow saipan gibraltar dublin catalunya dannibleibt avarua hargeisa delhi niamey chișinău colombo brasília phnompenh mbabane belgrade belmopan pyongyang hannover ulaanbaatar oranjestad gaborone seattle ndjamena raw singapore kingedwardpoint abidjan nuuk pretoria papeete malé zagreb gitega abudhabi flyingfishcove castries georgetown hagåtña borikua basseterre hamburg kinshasa suva valparaíso athens roseau baku charlotteamalie antananarivo domi pristina santiago sukhumi berlin uptronicsberlin funafuti libreville hanoi philipsburg tehran banjul prague andorralavella daw yerevan portauprince dakar paramaribo tifariti capetown tirana klima ankara ipswich managua lisbon bishkek amsterdam portonovo santodomingo bangkok bucharest kathmandu aden madrid sanjuan vienna kingston kabul damascus stockholm douglas willemstad thehague panamacity beirut amman newdelhi tórshavn nouméa oslo alofi gustavia paris video cockburntown ottawa stepanakert portofspain fsberlin honiara asmara florida nicosia helsinki taipei tegucigalpa tokyo tashkent larochelle MadeInEU sarajevo algiers nairobi muscat monaco riyadh lusaka wellington bissau juba mariehamn majuro buenosaires ngerulmud dhaka guatemalacity washington vatican kuwaitcity londonboaters bern mexicocity bratislava bridgetown delhipolice tunis manila stanley matautu copenhagen barcelona lomé budapest ouagadougou mogadishu freetown victoria brazzaville portmoresby ashgabat kampala elaaiún vilnius bloemfontein sucre london marseille pagopago bradesestate oakland vaduz addis nürnberg naypyidaw CassetteNavigation khartoum baghdad bandar moroni lehavre portvila kingstown ChrisCrawford reykjavík manama accra windhoek nukualofa ciutatvella tbilisi canberra quito maputo cetinje putrajaya ramallah bogotá dodoma harare havana warsaw münster valletta localberlin ljubljana bamako kualalumpur podgorica rabat cotonou plymouth seoul Portland dushanbe bangui aotearoa westisland tskhinvali palikir caracas jamestown rome munich ass freestuffberlin sãotomé jakarta daressalaam sansalvador apia essex yaren cairo jerusalem brussels kigali southtarawa beijing minsk montevideo vientiane maseru hamilton doha tripoli celtic portlouis lima adamstown deventer abuja lilongwe nassau lobamba heathrow nyc strawberry montreal dili riga assembly lesbos monrovia nursultan gab sanjosé marigot islamabad malabo tallinn sahara thimphu yaoundé praia bujumbura sofia skopje</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>music</title>
|
||||
<description>musicprodution punk ourbeats indiemusic streetpunk bandcamp musicians jamendo ipod skinheadmusic rap mp3 indie Music EnvoieStopHashtagAu81212 thecure vaporwave dubstep synthwave oi rave freemusic nowplaying hiphop experimentalmusic fedimusic soundcloud frankiegoestohollywood dj newwave dorkwave producing musicproduction funkwhale retrosynth NowPlaying libremusicproduction MusicAdvent coinkydink arianagrande synth music darkwave metal fediversemusic cyberpunkmusic BandcampFriday</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>politics</title>
|
||||
<description>TakeOurPowerBack cia wageslavery immigration liberation fascism rojava humanrights leftists Socialism ukpol copwatch freedom abolitionnow anarchism DefundThePolice technews smalltech bjp election generalstrike digitalfreedom mayday hatespeech fascists cyberlaw peerproduction corporations iww commons wageslave RemoveThePolice softwarefreedom neoliberalism socialecology MutualAid capitalism KeirStarmer politics inclusivity nzpol brexit totalitarianism TyskySour Labour SocietalChange facialrecognition propaganda decolonization polizei xp PritiPatel surveillance socialmedia elections ngo prisoners warrants borisjohnson mutuality whitehouse freedomofexpression censorship decolonize decenterwhiteness Biden ChineseAppBan cooperative modi law Capitalism surveillancecapitalism leftist Revolution ukpolitics rentstrike dsa migration mutualaid fascist polizeiproblem uselection ourstreets refugees tech</description>
|
||||
<description>TakeOurPowerBack cia community wageslavery immigration dissent liberation fascism techtuesday skyofmywindow freedomofspeech rojava humanrights leftists Socialism ukpol FreeKeithLamar copwatch capitalismkills petition BorisJohnson freedom abolitionnow anarchism DefundThePolice technews smalltech oilwars kommunismus bjp ThirdRunway hierarchy election sky_of_my_window generalstrike antipolitics digitalfreedom mayday hatespeech fascists lowtech a11y burntheprisons cyberlaw peerproduction corporations iww commons corporatewatch wageslave uspol frontex communism RemoveThePolice Immigration neoliberalism socialecology MutualAid capitalism technology prisons wealth conspiracytheories corporatecrime communist KeirStarmer anarchismus politics inclusivity brightgreen anarchisme DominicCummings nzpol Bookchin ClemencyNow brexit totalitarianism privatisation TyskySour Labour freethemall green BAME decolonizeyourmind privilege AbolishPrisonsAbolishPolice surfaceworldblows ecofascism SocietalChange facialrecognition corruption anarchy propaganda decolonization digitalrights feminism polizei neo xp 18Source radicaltech redandanarchistskinheads PritiPatel latestagecapitalism racist MexicanRevolution elections RussellMaroonShoatz white prisoners warrants policebrutality borisjohnson Anarchist press mutuality whitehouse freedomofexpression censorship decolonize emmet decenterwhiteness Biden ChineseAppBan cooperative modi law deathtoamerica manipulation firetotheprisons britpol Capitalism surveillancecapitalism leftist Revolution ukpolitics JeremyCorbyn blacklivesmatter FreeAlabamaMovement rentstrike dsa techno migration mutualaid multipleexposure AbolishPrison fascist socialcoop anarchistprisoners polizeiproblem uselection IDPol Slavetrade met ourstreets refugees acab freewestpapua tech</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>food</title>
|
||||
<description>vitamind cake margarine dessert salsa caviar cookery pietons food skillet liquor milk bolognese recipe oilwars foodporn yeast plate waffle biscuit glaze omelette filet pastry wine hamburger juice sourdough nuts gras toast broth batter foodie ketchup pandemic seasoning mayo soup pan mayonnaise vegan dish avocado spice bakery cooking yogurt spotify crumble cider butter cook cobbler steak pizza soda aroma oil flour cream nutella pie cuisine tartar tea marinade mushroom entree bread salad beans syrup cookie curd soysauce pudding beer baking fish foodwaste wheat pot grassroots stew chocolate paste wok recipes olive burger candy kitchen coffee bagel taste meat noodle raclette caramel rice eggs grill poutine lard croissant pasta foods cheese drink muffin foie sauce soy cocoa sandwich mousse chili vinegar</description>
|
||||
<description>vitamind cake margarine dessert salsa caviar theexpanse cookery pietons food skillet liquor milk bolognese recipe foodporn yeast plate waffle biscuit glaze omelette filet pastry wine hamburger juice Amazfish sourdough nuts gras toast broth batter foodie ketchup seasoning mayo soup pan voc imateapot teamcapy mayonnaise vegan dish avocado spice bakery cooking yogurt spotify crumble cider butter cook pottery cobbler steak pizza soda fedikitchen aroma oil flour cream nutella pie cuisine tartar tea marinade mushroom entree bread salad beans fresh syrup fermentation mushrooms cookie curd soysauce pudding beer baking fish foodwaste wheat pot TeamFerment stew chocolate paste wok recipes olive burger candy kitchen coffee bagel taste meat noodle raclette caramel rice eggs grill poutine lard croissant pasta foods cheese oregano drink muffin foie sauce soy vore cocoa sandwich mousse chili vinegar</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>farming</title>
|
||||
<description>johndeere</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>events</title>
|
||||
<description>award OONIbday live Day thursdaythoughts humanrightsday followfriday wednesdaymotivation showerthoughts anarchymonday holiday week concert festival livestream day InternationalCheetahDay meetup</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>countries</title>
|
||||
<description>romania burma lithuania solomon chile Instanz opensuse fiji tajikistan benin paraguay eeuu senegal ukraine italy brunei nicaragua guyana Pflanzenbestimmung euphoria zambia iceland morocco netherlands swaziland bosnian suriname elsalvador russia samoa european czech belarus hayabusa2 kyrgyzstan uk abuse translation sanmarino catalonia panama japan venezuela gambia freeNukem kuwait barbados papua greece switzerland uae nigeria usa angola honduras djibouti laos sierraleone cambodia ych vietnam dust3d neofeud seychelles marshall kazakhstan estonia tonga stlucia burundi bangladesh egypt mali congo us jordan speedrun grenada israel algeria ghana bosnia russian eritrea bhutan hungary saudi slovenia bahamas australia kiribati togo koreanorth poland malawi capeverde run armenia american bahrain mozambique southsudan syria micronesia maldives iran sweden ethiopia cuba liberia canada burkina somalia scotland vaticancity easttimor austria turkey yemen Bolivia denmark trunk madagascar finland philippines ivorycoast haiti ecuador Portugal azerbaijan spain albania afghanistan europe mauritania dominica thailand belize macedonia illustration montenegro qatar mongolia costarica boatingeurope birdsofkenya coronavirus latvia uzbekistan ireland iraq malaysia mexico mauritius oman chad nz georgia zimbabwe france serbia lesotho oddmuse tunisia argentina cameroon namibia sudan indonesia colombia smallbusiness tuvalu turkmenistan tanzania germany neuhier norway comoros guatemala kosovo andorra wales servus pakistan belgium china antigua koreasouth newzealand rwanda luxembourg libya italyisntreal nauru moldova palau taiwan kenya trinidad eu botswana CuriosidadesVariadas jamaica vanuatu cyprus malta niger unitedstates myanmar FreeNukum saintvincent guinea nepal peru uganda uruguay india lebanon neurodiversity southafrica croatia europeanunion bolivia chinese dominican srilanka bulgaria slovakia speedrunning gabon stkitts liechtenstein brazil</description>
|
||||
<description>romania burma lithuania solomon chile Instanz fiji tajikistan benin paraguay eeuu senegal ukraine italy brunei nicaragua guyana Pflanzenbestimmung euphoria zambia iceland morocco netherlands swaziland bosnian solo suriname elsalvador russia samoa european czech belarus hayabusa2 kyrgyzstan uk abuse translation sanmarino catalonia panama japan buyused venezuela gambia freeNukem kuwait barbados papua greece switzerland uae nigeria usa angola honduras djibouti laos sierraleone cambodia ych vietnam neofeud seychelles marshall kazakhstan estonia tonga stlucia burundi bangladesh egypt mali congo us jordan speedrun grenada israel algeria ghana bosnia russian industrial eritrea bhutan hungary saudi slovenia tig bahamas australia kiribati togo koreanorth poland malawi capeverde run armenia american hautrauswasgeht bahrain mozambique beleuchtung southsudan syria micronesia maldives iran indigenous sweden ethiopia cuba liberia canada burkina somalia Chile scotland aur vaticancity easttimor austria turkey yemen Bolivia denmark trunk madagascar finland philippines ivorycoast haiti ecuador Portugal azerbaijan gasuk spain albania afghanistan europe mauritania dominica thailand belize westpapuauprising macedonia montenegro qatar mongolia costarica boatingeurope birdsofkenya latvia uzbekistan kabelaufklärung ireland iraq malaysia mexico mauritius oman chad nz georgia zimbabwe france serbia lesotho oddmuse tunisia argentina cameroon namibia sudan indonesia colombia tuvalu britainology beckychambers turkmenistan tanzania germany neuhier norway comoros auteursrecht guatemala Thailand kosovo andorra wales servus pakistan belgium china antigua life koreasouth newzealand einzelfall rwanda luxembourg libya italyisntreal nauru Anarchismus moldova palau taiwan kenya trinidad eu botswana CuriosidadesVariadas jamaica vanuatu cyprus aminus3 malta niger westpapua busse unitedstates myanmar saintvincent guinea nepal peru uganda uruguay india lebanon neurodiversity southafrica croatia europeanunion bolivia chinese dominican srilanka bulgaria slovakia speedrunning gabon psychedelicart stkitts liechtenstein brazil shutdowncanada</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>cycling</title>
|
||||
<description>bicycle cycling bike Snowbike</description>
|
||||
<description>bicycle cycling bike thingsonbikes Snowbike cyclist</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>phones</title>
|
||||
<description>mobileapp fdroid plasmamobile android smartphone BriarProject pinephone mobile fairphone ubuntutouch osmand vodafone postmarketos iOS microg mobileKüfA</description>
|
||||
<description>mobileapp pine fdroid plasmamobile android phones smartphone iOS14 linuxphones QWERTYphones BriarProject librem5 pinephone mobile fairphone ubuntutouch Android ubports osmand vodafone iphones postmarketos iOS microg mobileKüfA</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>science</title>
|
||||
<description>math womeninstem supercollider nextgeneration dna archaeologist dawkins graphTheory psychology biology generation gene paleontology</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>pandemic</title>
|
||||
<description>covid19 corona Coronavirus CoronaWarnApp facemasks vaccines vaccine pandemic contacttracing tier4 covid coronavirus masks virus Lockdown rna codid19 COVID19 YesWeWork ContactTracing COVID</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>software</title>
|
||||
<description>app freedombox windows libre nginx Framasoft drm kubernetes jami FuckOffZoom free docker freesoftware gimp foss nextcloud wechat openscad ikiwiki outreachy lyft nitter opensource diaspora cabal conferencing accessibility devops owncast emacs gemini freiesoftware email chatapps floss plugins deltachat application uifail bittorrent zoom gpl FriendofGNOME usability obnam cryptpad OwnStream mumble grsync irssi mutt design backup apps profanity ffmpeg lemmy OSM win10 jitsi ux rsync libreoffice dino plugin openoffice</description>
|
||||
<description>app freedombox windows libre nginx Framasoft invidious drm publicdomain kubernetes fossmendations jami FuckOffZoom quicksy free docker freesoftware gimp foss matrix thefreethoughtproject nextcloud wechat openscad TabOrder ikiwiki Linux rocketchat outreachy lyft nitter discord opensource diaspora yunohost littlebigdetails cabal conferencing libreboot accessibility devops owncast emacs freiesoftware email chatapps floss plugins deltachat application uifail FOSS bittorrent vlc zoom tiling gpl FriendofGNOME usability obnam snap cryptpad software OwnStream zrythm mumble grsync telegram containers blockchain irssi mutt design gameoftrees backup rotonde GNU thunderbird sysadmin apps licensing screenreaders profanity ffmpeg lemmy OSM distributedledger win10 element nativeApp jitsi wordpress ux rsync libreoffice dino plugin OCUPACAOCARLOSMARIGHELLA whatsapp openoffice</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>security</title>
|
||||
<description>encrypt password cryptography infosec gchq IHaveSomethingToHide cryptowars UseAMaskUseTor cyberattack security tor e2e vpn openssh openssl e2ee encryption ssh crypto giftofencryption opsec torsocks nsa protonvpn yubikey nitrokey openpgp castor9 gpgtools gpg cybersecurity signal noscript np trust openvpn datasecurity tracking cloudflare</description>
|
||||
<description>encrypt omemo password cryptography solarwinds communityalgorithmictrust infosec gchq IHaveSomethingToHide IronySec cryptowars supplychainattacks UseAMaskUseTor cyberattack security tor e2e bruceschneier vpn openssh openssl e2ee ed25519 encryption ssh misshaialert crypto giftofencryption malware opsec keepass torsocks nsa protonvpn yubikey nitrokey openpgp castor9 gpgtools gpg fotopiastory cybersecurity CryptoWars signal noscript np trust cryptocurrency cryptomator openvpn datasecurity tracking cloudflare</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>conferences</title>
|
||||
<description>debconf talk fossdem FreedomBoxSummit schmoocon summit confidenceTricks minidebconf emacsconf defcon flossevent conf rC3 conference flossconf apconf C3 config</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>photos</title>
|
||||
<description>nikon photography photo tokyocameraclub photoshop camera picture</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>programming</title>
|
||||
<description>programming css ada rustlang ocaml program typescript publiccode adventofcode scripting git fedidev sourcecode django programmer github tuskydev lisp forge javascript code elisp html rust clojurescript racket python</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>activitypub</title>
|
||||
<description>activitypub pleroma PeerTube fediblock fedilab mastotips friendica misskey siskin followers Pixelfed pixelfed fedidb Fediseminar monal tusky peertubers feditips fedizens Mastodon epicyon mastomagic pixeldev PeertubeMastodonHost fediverse mastodon fedi peertube lab mobilizon</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>news</title>
|
||||
<description>news doubledownnews journalism</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>games</title>
|
||||
<description>minecraft chess mud game ttrpg guildwars2 TetrisGore gaming Gamesphere rpg dosgaming DnD minetest guildwars dnd</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>gardening</title>
|
||||
<description>blockade mastodev deno cabbage onions florespondence DailyFlowers permaculture flowers gardening de federated deathtoamerica</description>
|
||||
<description>sporespondence blockade inde independant deno cabbage bundeswehr onions bordeaux datenschleuder florespondence garden thyme DailyFlowers permaculture papuamerdeka flowers gardening de devilslettuce fahrräder golden</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>conferences</title>
|
||||
<description>debconf talk fossdem FreedomBoxSummit apconf2020 schmoocon summit confidenceTricks minidebconf rc3worldleaks emacsconf ox defcon flossevent conf rC3 rC3World conference flossconf apconf rC3one C3 config</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>cats</title>
|
||||
<description>dailycatpic DailyCatVid</description>
|
||||
<description>Cat dailycatpic dxp DailyCatVid katze CatsOfMastodon Leopard catbellies LapCats</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>music</title>
|
||||
<description>punk ourbeats streetpunk bandcamp musicians ipod skinheadmusic rap mp3 thecure vaporwave dubstep synthwave rave nowplaying hiphop experimentalmusic fedimusic dj newwave dorkwave producing NowPlaying libremusicproduction MusicAdvent fedivers synth music metal fediversemusic cyberpunkmusic BandcampFriday</description>
|
||||
<title>employment</title>
|
||||
<description>InterviewQuestions mywork reproductivework bullshitjobs antiwork kreaturworks worklog hire hirefedi carework nowhiring work letthenetwork jobs</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>retro</title>
|
||||
<description>A500 atarist commodore teletext floppy 8bit atari floppydisk retrocomputing C64 plan9 80s microcomputing omm z80 8bitdo retro amiga bbcmicro microcomputer</description>
|
||||
<title>linux</title>
|
||||
<description>osdev opensuse linuxisnotanos elementaryos cli kde Debian11 slackware mobian openwrt distros nixos nix DebianBullseye shareyourdesktop wireguard linuxaudio nixpkgs gtk debian trisquel gnome linuxposting showyourdesktop windowmanager desktop ubuntu xubuntu unix fedora centos gentoo usergroup systemd linuxgaming Debian distro destinationlinux qubesos i3wm haiku linuxisnotaplatform linux EMMS netbsd termux btrfs reproduciblebuilds artix gtk4 archlinux rhel debianinstaller linuxisajoke</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>indymedia</title>
|
||||
<description>visionontv indymediaback indymedia omn tv 4opens</description>
|
||||
<title>photos</title>
|
||||
<description>nikon photography photo photogrpahy tokyocameraclub photos photoshop camera myphoto picture streetphotography</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>questions</title>
|
||||
<description>askmastodon askfedi question haskell askmasto askfediverse ask askfosstodon</description>
|
||||
<title>crafts</title>
|
||||
<description>topic_imadethis textile upholstery dust3d hackers hackerspaces sanding sundiy knitting hack biohacking wip jewelry diy upcycling woodworking origami makers quilting hacker quilt 3dmodel woodwork ceramics embroidery</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>pets</title>
|
||||
<description>catpics catofmastodon mastodogs catbehaviour Coolcats dogsofmastodon gentrification cats kittens pet dog caturday catsofmastodon cute catstodon dogs mastocats cat catcontent</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>news</title>
|
||||
<description>news Wikileaks newsletter rt bbc doubledownnews journalism SkyNews</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>games</title>
|
||||
<description>minecraft tetris99 TerraNil runequest boardgames computergames gamedesign chess nintendoswitch mud indiegame game 0ad ttrpg gamedev guildwars2 TetrisGore gaming nintendo Gamesphere rpg tetris dosgaming DnD cyber2077 cyberpunk2077 FreeNukum neopets minetest guildwars dnd games</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>climate</title>
|
||||
<description>energy renewables clouds renewableenergy amp climateemergency climate windenergy coal globalwarming climatechange weather climatecamp windpower science fossilfuels sky climatescience climatecrisis</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>internet</title>
|
||||
<description>spam redecentralize wikipedia rtmp decentralization decentralize w3c torrent data sitejs internetarchaeology WordPress router selfhosting icann PeerToPeer dns openstandards oauth protonmail standards yourdataisyourdata SmallWeb xmpp semanticweb ntp socialnetworks videocast jabber decentralized ssb darknet cookies darkweb netcat server browser p2p www ilovewikipedia web twitch domain rss mail browsers decentralizeit openculture cyberspace offthegrid internet internetarchive js openweb socialweb</description>
|
||||
<description>i2p spam firefox redecentralize wikipedia rtmp decentralization decentralize w3c torrent data sitejs internetarchaeology WordPress self contentmoderation distributed router dataretention selfhosting communityhosting icann discourse PeerToPeer dns openstandards nojs oauth hypercore CDNsAreEvil protonmail standards yourdataisyourdata internetfreedom gemini webui SmallWeb xmpp semanticweb socialnetwork content ntp socialnetworks proton icmp videocast jabber decentralized wiki ssb darknet cookies darkweb netcat Reddit server browser cloudy p2p social antisocial www ilovewikipedia web WebsiteStatus twitch 9front theserverroom socialmedia domain rss ipns mozilla voicemail mail i2pd ipfs internetradio browsers decentralizeit netscape openculture cyberspace offthegrid cloud internet decentralisation internetarchive js dark openweb onlineharms dot ftp internetshutdowns fixtheweb socialweb</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>retro</title>
|
||||
<description>A500 atarist commodore teletext floppy 8bit atari trs80 floppydisk retrocomputing C64 plan9 80s microcomputing omm retrogaming z80 8bitdo retro commissions amiga bbcmicro microcomputer bbsing</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>indymedia</title>
|
||||
<description>visionontv globleIMC indymediaback pga indymedia hs2IMC indymediaIMC network roadsIMC omn tv roadstonowhereIMC UKIMC 4opens openmedianetwork</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>media</title>
|
||||
<description>livestreaming mainstreaming stream streaming weAreAllCrazy maiabeyrouti submedia theatlantic traditionalmedia videos railroads taina ai realmedia media</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>activism</title>
|
||||
<description>protestor grassroot g20 riseup sflc DanniVive reuse fsfe softwarefreedom ann activist xr directaction eff openrightsgroup protest JeffreySDukes actiondirecte kroymann HS2 ngo MarcWittmann fsf StopHS2 grassroots BLM changeisinyourhands conservancy JefferySaunders Kolektiva XR freeolabini announcement isolateByoblu annieleonard</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>questions</title>
|
||||
<description>askmastodon askfedi question askmasto askfediverse ask askfosstodon</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>birds</title>
|
||||
<description>RainbowBeeEater bird</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>ethics</title>
|
||||
<description>digitalethics ethics ethical</description>
|
||||
<description>digitalethics ethics ethicallicense ethical</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>linux</title>
|
||||
<description>cli kde Debian11 mobian openwrt distros DebianBullseye wireguard linuxaudio gtk debian trisquel gnome linuxposting ubuntu xubuntu unix fedora centos gentoo usergroup systemd linuxgaming Debian distro destinationlinux qubesos i3wm linux termux btrfs archlinux rhel debianinstaller</description>
|
||||
<title>disability</title>
|
||||
<description>ableism disabled</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>economics</title>
|
||||
<description>bitcoin theWorkshop feministeconomics WealthConcentration valuesovereignty funding value shop crowdfund startups HenryGeorge crowdfunding limitstogrowth micropatronage monetize smallbusiness GitPay gdp limits</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>art</title>
|
||||
<description>Linke urban glassart artvsartist2020 watercolor autisticartist barrigòtic art open krita urbanart queerart deviantart adultcolouring collage harmreductionart wallpaper streetart coverart fiberart MastoArt culture polArt ink painting opencoop digitalart comic artwork openbsd mandala xkcd comics santa mastoart illustration artopencall gnuimagemanipulationprogram os wireart cartoon webcomic furryart sticker artbreeder arttherapy TattoosOfTheFediverse artvsartist sculpture artist meme cultureshipnames concretepoetry artwithopensource opencallforartists commissionsopen peppertop blackartist zines zine furry opencollective JuliaHartleyBrewer artistsOfMastodon</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>podcasts</title>
|
||||
<description>podcasting IntergalacticWasabiHour podcast til tilderadio podcasts smallisbeautiful</description>
|
||||
<description>podcasting IntergalacticWasabiHour podcast tilde til tilderadio podcasts tildeverse smallisbeautiful tilvids</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>years</title>
|
||||
<description>Year2020 year 1yrago 5yrsago</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>pets</title>
|
||||
<description>catofmastodon mastodogs catbehaviour dogsofmastodon gentrification cats kittens pet dog caturday catsofmastodon cute dogs mastocats cat catcontent</description>
|
||||
<title>culture</title>
|
||||
<description>etiquette</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>identity</title>
|
||||
<description>boomer</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>political</title>
|
||||
<description>copservation linguisticProgramming</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>fashion</title>
|
||||
<description>brasil fashionistas fashionesta bras fashion socks patches</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>month</title>
|
||||
<description>april july march october november august june december september may feburary january month</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>funding</title>
|
||||
<description>disabledcrowdfund patreon</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>science</title>
|
||||
<description>womeninstem supercollider graphTheory biology paleontology</description>
|
||||
<title>books</title>
|
||||
<description>justhollythings earthsea ebooks book amreading bookwyrm bookreview theLibrary wayfarers books ebook epub cookbook</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>comedy</title>
|
||||
<description>laugh humour satire irony standup funny humor</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>techbros</title>
|
||||
<description>einfachredeneben hackernews red reddit</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>health</title>
|
||||
<description>medical burnout cannabis medicine treatment EmotionalFirstAid maryjane autistic health meds marijuana</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>seasons</title>
|
||||
<description>spring autumn winter summer</description>
|
||||
<description>spring autumn winter summer solstice wintersolstice</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>gender</title>
|
||||
<description>transwomen transcrowdfund female trans women estradiol woman transrights</description>
|
||||
<description>transwomen transcrowdfund womensart female nonbinary trans transphobia women estradiol queer genderQuiz woman transrights</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>art</title>
|
||||
<description>artvsartist2020 art krita urbanart adultcolouring collage streetart coverart MastoArt digitalart mandala xkcd mastoart webcomic furryart sticker artvsartist artist meme concretepoetry artwithopensource artix</description>
|
||||
<title>history</title>
|
||||
<description>history anarchisthistory</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>economics</title>
|
||||
<description>funding crowdfunding micropatronage GitPay</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>fiction</title>
|
||||
<description>cyberpunk thehobbit fiction cyberpunk2077</description>
|
||||
<description>cyberpunk thehobbit fiction microfiction genrefiction</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>legal</title>
|
||||
<description>hfgkarlsruhe amro GameSphere OnlineHarmsBill laipower gdpr intros Anticritique learning energyflow digitalservicesact geekproblem dmca</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>introductions</title>
|
||||
<description>newhere firsttoot recommends Introduction Introductions introduction intro introductions</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>audio</title>
|
||||
<description>audioproduction audiofeedback</description>
|
||||
<description>audioproduction audiofeedback audio</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>bots</title>
|
||||
<description>bot</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>climate</title>
|
||||
<description>clouds climateemergency climate weather climatecamp</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>books</title>
|
||||
<description>earthsea ebooks book theLibrary books ebook epub cookbook</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>scifi</title>
|
||||
<description>startrek starwars</description>
|
||||
<description>startrek starwars babylon5</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>pandemic</title>
|
||||
<description>CoronaWarnApp facemasks vaccines vaccine covid Lockdown codid19 COVID19 ContactTracing COVID</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>fashion</title>
|
||||
<description>fashionesta bras fashion patches</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>religion</title>
|
||||
<description>pagan</description>
|
||||
<description>neopagan pagan catholic</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>nature</title>
|
||||
<description>trees birds nature inaturalist</description>
|
||||
<title>astronomy</title>
|
||||
<description>amateurastronomy astronomy space jupiter BackYardAstronomy moon saturn milkyway</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>activism</title>
|
||||
<description>riseup sflc fsfe ann xr eff openrightsgroup protest fsf BLM conservancy annieleonard</description>
|
||||
<title>photography</title>
|
||||
<description>landscapephotography</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>techbros</title>
|
||||
<description>hackernews reddit</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>crafts</title>
|
||||
<description>hackerspaces knitting jewelry diy makers quilting quilt 3dmodel</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>moderation</title>
|
||||
<description>fedblock</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>languages</title>
|
||||
<description>lojban gaelic</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>election</title>
|
||||
<description>voted vote</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>#music</title>
|
||||
<description>trance</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>health</title>
|
||||
<description>medicine meds medical health treatment</description>
|
||||
<title>facts</title>
|
||||
<description>didyouknow lifehack</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>radio</title>
|
||||
<description>radiohost vantaradio radio hamradio radiobroadcasting radioshow radiobroadcast spazradio anonradio</description>
|
||||
<description>radiohost vantascape vantaradio ca radio healthcare listening hamradio FreeAllPoliticalPrisoners card10 radiobroadcasting 3dcad radioshow local california listeningtonow radiobroadcast spazradio anonradio io</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>licenses</title>
|
||||
<description>copyright creative common creativecommons</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>education</title>
|
||||
<description>education tutorial teach</description>
|
||||
<description>education teach tutorial</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>legal</title>
|
||||
<description>weAreAllCrazy redandanarchistskinheads r digitalservicesact</description>
|
||||
<title>privacy</title>
|
||||
<description>surveillancetech privacymatters surveillance dataprivacy privacy</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>employment</title>
|
||||
<description>hirefedi jobs work hire</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>microcontroller</title>
|
||||
<description>microcontroller arduino</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>astronomy</title>
|
||||
<description>space jupiter moon saturn milkyway</description>
|
||||
<title>people</title>
|
||||
<description>monbiot aldoushuxley relationships AskVanta</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>comedy</title>
|
||||
<description>satire irony standup humor humour funny laugh</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>privacy</title>
|
||||
<description>privacymatters dataprivacy privacy</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>sport</title>
|
||||
<description>Millwall snooker darts billiard football baseball skating hockey athletics skiing diving sailing motorsport sport marathon swim</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>environment</title>
|
||||
<description>climate climatechange climatechaos</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>scotland</title>
|
||||
<description>highlands edinburgh glasgow loch</description>
|
||||
<description>glasgow highlands edinburgh loch</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>entertainment</title>
|
||||
<description>watching Thundercat thisisthetypeofmemethatilikecauseitcontainsreptiles entertainment me meow un themandalorian</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>#software</title>
|
||||
<description>flatpak</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>microcontrollers</title>
|
||||
<description>esp8266 esp32</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>help</title>
|
||||
<description>helpful help</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>war</title>
|
||||
<description>weapons</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>philosophy</title>
|
||||
<description>stoic postmodernism</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>france</title>
|
||||
<description>Macronavirus</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>travel</title>
|
||||
<description>travel taxi</description>
|
||||
<link/>
|
||||
<pubDate>Sat, 12 Dec 2020 16:04:56 UT</pubDate>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>environment</title>
|
||||
<description>climatechaos</description>
|
||||
<link/>
|
||||
<pubDate>Tue, 29 Dec 2020 20:59:38 UT</pubDate>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
|
|
138
delete.py
|
@ -8,86 +8,20 @@ __status__ = "Production"
|
|||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import removeIdEnding
|
||||
from utils import getStatusNumber
|
||||
from utils import urlPermitted
|
||||
from utils import getNicknameFromActor
|
||||
from utils import getDomainFromActor
|
||||
from utils import locatePost
|
||||
from utils import deletePost
|
||||
from utils import removeModerationPostFromIndex
|
||||
from posts import sendSignedJson
|
||||
from session import postJson
|
||||
from webfinger import webfingerHandle
|
||||
from auth import createBasicAuthHeader
|
||||
from posts import getPersonBox
|
||||
|
||||
|
||||
def createDelete(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
toUrl: str, ccUrl: str, httpPrefix: str,
|
||||
objectUrl: str, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Creates a delete message
|
||||
Typically toUrl will be https://www.w3.org/ns/activitystreams#Public
|
||||
and ccUrl might be a specific person whose post is to be deleted
|
||||
objectUrl is typically the url of the message, corresponding to url
|
||||
or atomUri in createPostBase
|
||||
"""
|
||||
if not urlPermitted(objectUrl, federationList):
|
||||
return None
|
||||
|
||||
if ':' in domain:
|
||||
domain = domain.split(':')[0]
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
|
||||
statusNumber, published = getStatusNumber()
|
||||
newDeleteId = \
|
||||
httpPrefix + '://' + fullDomain + '/users/' + \
|
||||
nickname + '/statuses/' + statusNumber
|
||||
newDelete = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
'actor': httpPrefix+'://'+fullDomain+'/users/'+nickname,
|
||||
'atomUri': newDeleteId,
|
||||
'cc': [],
|
||||
'id': newDeleteId + '/activity',
|
||||
'object': objectUrl,
|
||||
'published': published,
|
||||
'to': [toUrl],
|
||||
'type': 'Delete'
|
||||
}
|
||||
if ccUrl:
|
||||
if len(ccUrl) > 0:
|
||||
newDelete['cc'] = [ccUrl]
|
||||
|
||||
deleteNickname = None
|
||||
deleteDomain = None
|
||||
deletePort = None
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
deleteNickname = getNicknameFromActor(objectUrl)
|
||||
deleteDomain, deletePort = getDomainFromActor(objectUrl)
|
||||
|
||||
if deleteNickname and deleteDomain:
|
||||
sendSignedJson(newDelete, session, baseDir,
|
||||
nickname, domain, port,
|
||||
deleteNickname, deleteDomain, deletePort,
|
||||
'https://www.w3.org/ns/activitystreams#Public',
|
||||
httpPrefix, True, clientToServer, federationList,
|
||||
sendThreads, postLog, cachedWebfingers,
|
||||
personCache, debug)
|
||||
|
||||
return newDelete
|
||||
|
||||
|
||||
def sendDeleteViaServer(baseDir: str, session,
|
||||
fromNickname: str, password: str,
|
||||
fromDomain: str, fromPort: int,
|
||||
|
@ -100,11 +34,7 @@ def sendDeleteViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendDeleteViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
actor = httpPrefix + '://' + fromDomainFull + \
|
||||
'/users/' + fromNickname
|
||||
|
@ -142,7 +72,7 @@ def sendDeleteViaServer(baseDir: str, session,
|
|||
fromPersonId, sharedInbox, avatarUrl,
|
||||
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
|
||||
projectVersion, httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
fromDomain, postToBox, 53036)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
@ -173,60 +103,6 @@ def sendDeleteViaServer(baseDir: str, session,
|
|||
return newDeleteJson
|
||||
|
||||
|
||||
def deletePublic(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
objectUrl: str, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Makes a public delete activity
|
||||
"""
|
||||
fromDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fromDomain = domain + ':' + str(port)
|
||||
|
||||
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
ccUrl = httpPrefix + '://' + fromDomain + \
|
||||
'/users/' + nickname + '/followers'
|
||||
return createDelete(session, baseDir, federationList,
|
||||
nickname, domain, port,
|
||||
toUrl, ccUrl, httpPrefix,
|
||||
objectUrl, clientToServer,
|
||||
sendThreads, postLog,
|
||||
personCache, cachedWebfingers,
|
||||
debug)
|
||||
|
||||
|
||||
def deletePostPub(session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int, httpPrefix: str,
|
||||
deleteNickname: str, deleteDomain: str,
|
||||
deletePort: int, deleteHttpsPrefix: str,
|
||||
deleteStatusNumber: int, clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool) -> {}:
|
||||
"""Deletes a given status post
|
||||
"""
|
||||
deletedDomain = deleteDomain
|
||||
if deletePort:
|
||||
if deletePort != 80 and deletePort != 443:
|
||||
if ':' not in deletedDomain:
|
||||
deletedDomain = deletedDomain + ':' + str(deletePort)
|
||||
|
||||
objectUrl = \
|
||||
deleteHttpsPrefix + '://' + deletedDomain + '/users/' + \
|
||||
deleteNickname + '/statuses/' + str(deleteStatusNumber)
|
||||
|
||||
return deletePublic(session, baseDir, federationList,
|
||||
nickname, domain, port, httpPrefix,
|
||||
objectUrl, clientToServer,
|
||||
sendThreads, postLog,
|
||||
personCache, cachedWebfingers,
|
||||
debug)
|
||||
|
||||
|
||||
def outboxDelete(baseDir: str, httpPrefix: str,
|
||||
nickname: str, domain: str,
|
||||
messageJson: {}, debug: bool,
|
||||
|
@ -264,10 +140,7 @@ def outboxDelete(baseDir: str, httpPrefix: str,
|
|||
if debug:
|
||||
print('DEBUG: c2s delete object is not a status')
|
||||
return
|
||||
if '/users/' not in messageId and \
|
||||
'/accounts/' not in messageId and \
|
||||
'/channel/' not in messageId and \
|
||||
'/profile/' not in messageId:
|
||||
if not hasUsersPath(messageId):
|
||||
if debug:
|
||||
print('DEBUG: c2s delete object has no nickname')
|
||||
return
|
||||
|
@ -321,6 +194,7 @@ def removeOldHashtags(baseDir: str, maxMonths: int) -> str:
|
|||
# check of the file is too old
|
||||
if fileDaysSinceEpoch < maxDaysSinceEpoch:
|
||||
removeHashtags.append(tagsFilename)
|
||||
break
|
||||
|
||||
for removeFilename in removeHashtags:
|
||||
try:
|
||||
|
|
|
@ -152,6 +152,7 @@ def E2EEdevicesCollection(baseDir: str, nickname: str, domain: str,
|
|||
devJson = loadJson(deviceFilename)
|
||||
if devJson:
|
||||
deviceList.append(devJson)
|
||||
break
|
||||
|
||||
devicesDict = {
|
||||
'id': personId + '/collections/devices',
|
||||
|
|
|
@ -7,7 +7,7 @@ __email__ = "bob@freedombone.net"
|
|||
__status__ = "Production"
|
||||
|
||||
|
||||
def getDonationTypes() -> str:
|
||||
def _getDonationTypes() -> str:
|
||||
return ('patreon', 'paypal', 'gofundme', 'liberapay',
|
||||
'kickstarter', 'indiegogo', 'crowdsupply',
|
||||
'subscribestar')
|
||||
|
@ -18,7 +18,7 @@ def getDonationUrl(actorJson: {}) -> str:
|
|||
"""
|
||||
if not actorJson.get('attachment'):
|
||||
return ''
|
||||
donationType = getDonationTypes()
|
||||
donationType = _getDonationTypes()
|
||||
for propertyValue in actorJson['attachment']:
|
||||
if not propertyValue.get('name'):
|
||||
continue
|
||||
|
@ -54,7 +54,7 @@ def setDonationUrl(actorJson: {}, donateUrl: str) -> None:
|
|||
if not actorJson.get('attachment'):
|
||||
actorJson['attachment'] = []
|
||||
|
||||
donationType = getDonationTypes()
|
||||
donationType = _getDonationTypes()
|
||||
donateName = None
|
||||
for paymentService in donationType:
|
||||
if paymentService in donateUrl:
|
||||
|
|
|
@ -314,6 +314,9 @@ a:focus {
|
|||
.message {
|
||||
margin-left: 7%;
|
||||
width: 90%;
|
||||
hyphens: auto;
|
||||
text-wrap: pretty;
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.gitpatch {
|
||||
|
|
|
@ -244,6 +244,10 @@ h1 {
|
|||
width: 10%;
|
||||
}
|
||||
|
||||
.containerSubmitNewPost {
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
.container img.timelineicon:hover {
|
||||
filter: brightness(150%);
|
||||
}
|
||||
|
@ -524,6 +528,7 @@ input[type=submit] {
|
|||
font-size: var(--font-size-header);
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
width: 120px;
|
||||
float: right;
|
||||
}
|
||||
|
||||
.loginButton {
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
# License
|
||||
# =======
|
||||
#
|
||||
# Copyright (C) 2020 Bob Mottram <bob@freedombone.net>
|
||||
# Copyright (C) 2020-2021 Bob Mottram <bob@freedombone.net>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
--options-bg-color: #282c37;
|
||||
--options-link-bg-color: transparent;
|
||||
--options-fg-color: #dddddd;
|
||||
--main-link-color: #999;
|
||||
--main-visited-color: #888;
|
||||
--options-main-link-color: #999;
|
||||
--options-main-visited-color: #888;
|
||||
--border-color: #505050;
|
||||
--font-size-header: 18px;
|
||||
--font-color-header: #ccc;
|
||||
|
@ -34,7 +34,7 @@
|
|||
--follow-text-entry-width: 90%;
|
||||
--focus-color: white;
|
||||
--petname-width-chars: 16ch;
|
||||
--main-link-color-hover: #bbb;
|
||||
--options-main-link-color-hover: #bbb;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
|
@ -73,25 +73,25 @@ a, u {
|
|||
}
|
||||
|
||||
a:visited{
|
||||
color: var(--main-visited-color);
|
||||
color: var(--options-main-visited-color);
|
||||
background: var(--options-link-bg-color);
|
||||
font-weight: normal;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:link {
|
||||
color: var(--main-link-color);
|
||||
color: var(--options-main-link-color);
|
||||
background: var(--options-link-bg-color);
|
||||
font-weight: normal;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:link:hover {
|
||||
color: var(--main-link-color-hover);
|
||||
color: var(--options-main-link-color-hover);
|
||||
}
|
||||
|
||||
a:visited:hover {
|
||||
color: var(--main-link-color-hover);
|
||||
color: var(--options-main-link-color-hover);
|
||||
}
|
||||
|
||||
a:focus {
|
||||
|
@ -116,12 +116,12 @@ a:focus {
|
|||
|
||||
.imText {
|
||||
font-size: var(--font-size4);
|
||||
color: var(--main-link-color);
|
||||
color: var(--options-main-link-color);
|
||||
}
|
||||
|
||||
.pgp {
|
||||
font-size: var(--font-size5);
|
||||
color: var(--main-link-color);
|
||||
color: var(--options-main-link-color);
|
||||
background: var(--options-link-bg-color);
|
||||
}
|
||||
|
||||
|
|
|
@ -472,6 +472,12 @@ a:focus {
|
|||
border: 0;
|
||||
background-color: var(--main-bg-color);
|
||||
margin: var(--vertical-between-posts);
|
||||
float: right;
|
||||
}
|
||||
|
||||
.editColumnHeader {
|
||||
float: right;
|
||||
min-width: 10ch;
|
||||
}
|
||||
|
||||
.media {
|
||||
|
@ -485,6 +491,9 @@ a:focus {
|
|||
margin-left: 0%;
|
||||
margin-right: 0%;
|
||||
width: 100%;
|
||||
hyphens: auto;
|
||||
text-wrap: pretty;
|
||||
text-align: justify;
|
||||
}
|
||||
|
||||
.addedHashtag:link {
|
||||
|
@ -704,7 +713,8 @@ input[type=submit] {
|
|||
cursor: pointer;
|
||||
font-size: var(--font-size-header);
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
width: 120px;
|
||||
width: 10ch;
|
||||
float: right;
|
||||
}
|
||||
|
||||
.loginButton {
|
||||
|
@ -950,6 +960,14 @@ div.container {
|
|||
font-size: var(--font-size);
|
||||
color: var(--title-color);
|
||||
}
|
||||
.accountsTable {
|
||||
width: 100%;
|
||||
border: 0;
|
||||
}
|
||||
.accountsTableCol {
|
||||
width: 20%;
|
||||
text-align: center;
|
||||
}
|
||||
.containerHeader {
|
||||
border: var(--border-width-header) solid var(--border-color);
|
||||
background-color: var(--header-bg-color);
|
||||
|
@ -984,11 +1002,17 @@ div.container {
|
|||
color: var(--column-right-fg-color);
|
||||
line-height: var(--line-spacing-newswire);
|
||||
}
|
||||
.newswireItem img {
|
||||
width: 20px;
|
||||
}
|
||||
.newswireItemModerated {
|
||||
font-size: var(--font-size-newswire);
|
||||
color: var(--newswire-item-moderated-color);
|
||||
line-height: var(--line-spacing-newswire);
|
||||
}
|
||||
.newswireItemModerated img {
|
||||
width: 20px;
|
||||
}
|
||||
.newswireDateModerated {
|
||||
font-size: var(--font-size-newswire);
|
||||
font-weight: bold;
|
||||
|
@ -1004,6 +1028,9 @@ div.container {
|
|||
color: var(--column-right-fg-color-voted-on);
|
||||
line-height: var(--line-spacing-newswire);
|
||||
}
|
||||
.newswireItemVotedOn img {
|
||||
width: 20px;
|
||||
}
|
||||
.newswireDate {
|
||||
font-size: var(--font-size-newswire);
|
||||
color: var(--newswire-date-color);
|
||||
|
@ -1025,6 +1052,8 @@ div.container {
|
|||
max-height: var(--banner-height);
|
||||
}
|
||||
.timeline {
|
||||
display: flex;
|
||||
flex-wrap: nowrap;
|
||||
border: 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
@ -1043,7 +1072,6 @@ div.container {
|
|||
border: var(--column-left-border-width) solid var(--column-left-border-color);
|
||||
color: var(--column-left-fg-color);
|
||||
font-size: var(--font-size-links);
|
||||
float: left;
|
||||
width: var(--column-left-width);
|
||||
}
|
||||
.col-left img.leftColEditImage:hover {
|
||||
|
@ -1446,7 +1474,8 @@ div.container {
|
|||
cursor: pointer;
|
||||
font-size: var(--font-size);
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
width: 20%;
|
||||
width: 10ch;
|
||||
float: right;
|
||||
}
|
||||
.question {
|
||||
font-size: var(--font-size);
|
||||
|
@ -1466,7 +1495,7 @@ div.container {
|
|||
input.vote[type=submit] {
|
||||
background-color: var(--button-background);
|
||||
color: var(--button-text);
|
||||
float: left;
|
||||
float: right;
|
||||
padding: var(--button-height-padding);
|
||||
margin: 15px;
|
||||
border: none;
|
||||
|
@ -1601,6 +1630,14 @@ div.container {
|
|||
font-size: var(--font-size-mobile);
|
||||
color: var(--title-color);
|
||||
}
|
||||
.accountsTable {
|
||||
width: 100%;
|
||||
border: 0;
|
||||
}
|
||||
.accountsTableCol {
|
||||
width: 20%;
|
||||
text-align: center;
|
||||
}
|
||||
.containerHeader {
|
||||
border: var(--border-width-header) solid var(--border-color);
|
||||
background-color: var(--header-bg-color);
|
||||
|
@ -1661,11 +1698,17 @@ div.container {
|
|||
color: var(--column-right-fg-color);
|
||||
line-height: var(--line-spacing-newswire);
|
||||
}
|
||||
.newswireItem img {
|
||||
width: 40px;
|
||||
}
|
||||
.newswireItemModerated {
|
||||
font-size: var(--font-size-newswire-mobile);
|
||||
color: var(--newswire-item-moderated-color);
|
||||
line-height: var(--line-spacing-newswire);
|
||||
}
|
||||
.newswireItemModerated img {
|
||||
width: 40px;
|
||||
}
|
||||
.newswireDateModerated {
|
||||
font-size: var(--font-size-newswire-mobile);
|
||||
font-weight: bold;
|
||||
|
@ -1681,6 +1724,9 @@ div.container {
|
|||
color: var(--column-right-fg-color-voted-on);
|
||||
line-height: var(--line-spacing-newswire);
|
||||
}
|
||||
.newswireItemVotedOn img {
|
||||
width: 40px;
|
||||
}
|
||||
.newswireDate {
|
||||
font-size: var(--font-size-newswire-mobile);
|
||||
color: var(--newswire-date-color);
|
||||
|
@ -1702,9 +1748,7 @@ div.container {
|
|||
max-height: var(--banner-height-mobile);
|
||||
}
|
||||
.timeline {
|
||||
border: 0;
|
||||
width: 100vw;
|
||||
table-layout: fixed;
|
||||
overflow: hidden;
|
||||
}
|
||||
.column-left {
|
||||
|
@ -1715,8 +1759,6 @@ div.container {
|
|||
margin-left: var(--column-left-mobile-margin);
|
||||
}
|
||||
.col-left {
|
||||
float: left;
|
||||
width: 0%;
|
||||
display: none;
|
||||
}
|
||||
.col-center {
|
||||
|
@ -1727,8 +1769,6 @@ div.container {
|
|||
margin-right: var(--column-left-mobile-margin);
|
||||
}
|
||||
.col-right {
|
||||
float: right;
|
||||
width: 0%;
|
||||
display: none;
|
||||
}
|
||||
.column-right {
|
||||
|
@ -2078,7 +2118,8 @@ div.container {
|
|||
cursor: pointer;
|
||||
font-size: var(--font-size3);
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
width: 20%;
|
||||
width: 10ch;
|
||||
float: right;
|
||||
}
|
||||
.question {
|
||||
font-size: var(--font-size3);
|
||||
|
@ -2098,7 +2139,7 @@ div.container {
|
|||
input.vote[type=submit] {
|
||||
background-color: var(--button-background);
|
||||
color: var(--button-text);
|
||||
float: left;
|
||||
float: right;
|
||||
padding: var(--button-height-padding-mobile);
|
||||
margin: 15px;
|
||||
border: none;
|
||||
|
|
151
epicyon.py
|
@ -47,6 +47,8 @@ from tests import testClientToServer
|
|||
from tests import runAllTests
|
||||
from auth import storeBasicCredentials
|
||||
from auth import createPassword
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import setConfigParam
|
||||
from utils import getConfigParam
|
||||
from utils import getDomainFromActor
|
||||
|
@ -116,9 +118,19 @@ parser.add_argument('--postsPerSource',
|
|||
dest='maxNewswirePostsPerSource', type=int,
|
||||
default=4,
|
||||
help='Maximum newswire posts per feed or account')
|
||||
parser.add_argument('--dormantMonths',
|
||||
dest='dormantMonths', type=int,
|
||||
default=3,
|
||||
help='How many months does a followed account need to ' +
|
||||
'be unseen for before being considered dormant')
|
||||
parser.add_argument('--sendThreadsTimeoutMins',
|
||||
dest='sendThreadsTimeoutMins', type=int,
|
||||
default=30,
|
||||
help='How many minutes before a thread to send out ' +
|
||||
'posts expires')
|
||||
parser.add_argument('--maxNewswirePosts',
|
||||
dest='maxNewswirePosts', type=int,
|
||||
default=20,
|
||||
default=40,
|
||||
help='Maximum newswire posts in the right column')
|
||||
parser.add_argument('--maxFeedSize',
|
||||
dest='maxNewswireFeedSizeKb', type=int,
|
||||
|
@ -231,11 +243,6 @@ parser.add_argument("--fullWidthTimelineButtonHeader",
|
|||
help="Whether to show the timeline " +
|
||||
"button header containing inbox and outbox " +
|
||||
"as the full width of the screen")
|
||||
parser.add_argument("--allowNewsFollowers",
|
||||
dest='allowNewsFollowers',
|
||||
type=str2bool, nargs='?',
|
||||
const=True, default=False,
|
||||
help="Whether to allow the news account to be followed")
|
||||
parser.add_argument("--iconsAsButtons",
|
||||
dest='iconsAsButtons',
|
||||
type=str2bool, nargs='?',
|
||||
|
@ -471,7 +478,6 @@ if args.debug:
|
|||
if args.tests:
|
||||
runAllTests()
|
||||
sys.exit()
|
||||
|
||||
if args.testsnetwork:
|
||||
print('Network Tests')
|
||||
testPostMessageBetweenServers()
|
||||
|
@ -496,10 +502,8 @@ if args.posts:
|
|||
if '/users/' in args.posts:
|
||||
postsNickname = getNicknameFromActor(args.posts)
|
||||
postsDomain, postsPort = getDomainFromActor(args.posts)
|
||||
args.posts = postsNickname + '@' + postsDomain
|
||||
if postsPort:
|
||||
if postsPort != 80 and postsPort != 443:
|
||||
args.posts += ':' + str(postsPort)
|
||||
args.posts = \
|
||||
getFullDomain(postsNickname + '@' + postsDomain, postsPort)
|
||||
else:
|
||||
print('Syntax: --posts nickname@domain')
|
||||
sys.exit()
|
||||
|
@ -528,10 +532,8 @@ if args.postDomains:
|
|||
if '/users/' in args.postDomains:
|
||||
postsNickname = getNicknameFromActor(args.postDomains)
|
||||
postsDomain, postsPort = getDomainFromActor(args.postDomains)
|
||||
args.postDomains = postsNickname + '@' + postsDomain
|
||||
if postsPort:
|
||||
if postsPort != 80 and postsPort != 443:
|
||||
args.postDomains += ':' + str(postsPort)
|
||||
args.postDomains = \
|
||||
getFullDomain(postsNickname + '@' + postsDomain, postsPort)
|
||||
else:
|
||||
print('Syntax: --postDomains nickname@domain')
|
||||
sys.exit()
|
||||
|
@ -568,10 +570,8 @@ if args.postDomainsBlocked:
|
|||
postsNickname = getNicknameFromActor(args.postDomainsBlocked)
|
||||
postsDomain, postsPort = \
|
||||
getDomainFromActor(args.postDomainsBlocked)
|
||||
args.postDomainsBlocked = postsNickname + '@' + postsDomain
|
||||
if postsPort:
|
||||
if postsPort != 80 and postsPort != 443:
|
||||
args.postDomainsBlocked += ':' + str(postsPort)
|
||||
args.postDomainsBlocked = \
|
||||
getFullDomain(postsNickname + '@' + postsDomain, postsPort)
|
||||
else:
|
||||
print('Syntax: --postDomainsBlocked nickname@domain')
|
||||
sys.exit()
|
||||
|
@ -607,10 +607,8 @@ if args.checkDomains:
|
|||
if '/users/' in args.checkDomains:
|
||||
postsNickname = getNicknameFromActor(args.posts)
|
||||
postsDomain, postsPort = getDomainFromActor(args.posts)
|
||||
args.checkDomains = postsNickname + '@' + postsDomain
|
||||
if postsPort:
|
||||
if postsPort != 80 and postsPort != 443:
|
||||
args.checkDomains += ':' + str(postsPort)
|
||||
args.checkDomains = \
|
||||
getFullDomain(postsNickname + '@' + postsDomain, postsPort)
|
||||
else:
|
||||
print('Syntax: --checkDomains nickname@domain')
|
||||
sys.exit()
|
||||
|
@ -1121,7 +1119,7 @@ if args.undoItemName:
|
|||
cachedWebfingers = {}
|
||||
print('Sending undo of shared item: ' + args.undoItemName)
|
||||
|
||||
sendUndoShareViaServer(session,
|
||||
sendUndoShareViaServer(baseDir, session,
|
||||
args.nickname, args.password,
|
||||
domain, port,
|
||||
httpPrefix,
|
||||
|
@ -1315,10 +1313,7 @@ if args.actor:
|
|||
for prefix in prefixes:
|
||||
args.actor = args.actor.replace(prefix, '')
|
||||
args.actor = args.actor.replace('/@', '/users/')
|
||||
if '/users/' not in args.actor and \
|
||||
'/accounts/' not in args.actor and \
|
||||
'/channel/' not in args.actor and \
|
||||
'/profile/' not in args.actor:
|
||||
if not hasUsersPath(args.actor):
|
||||
print('Expected actor format: ' +
|
||||
'https://domain/@nick or https://domain/users/nick')
|
||||
sys.exit()
|
||||
|
@ -1388,10 +1383,7 @@ if args.actor:
|
|||
personUrl = None
|
||||
if wfRequest.get('errors'):
|
||||
print('wfRequest error: ' + str(wfRequest['errors']))
|
||||
if '/users/' in args.actor or \
|
||||
'/accounts/' in args.actor or \
|
||||
'/profile/' in args.actor or \
|
||||
'/channel/' in args.actor:
|
||||
if hasUsersPath(args.actor):
|
||||
personUrl = originalActor
|
||||
else:
|
||||
sys.exit()
|
||||
|
@ -1928,33 +1920,88 @@ if args.testdata:
|
|||
|
||||
deleteAllPosts(baseDir, nickname, domain, 'inbox')
|
||||
deleteAllPosts(baseDir, nickname, domain, 'outbox')
|
||||
|
||||
testFollowersOnly = False
|
||||
testSaveToFile = True
|
||||
testClientToServer = False
|
||||
testCommentsEnabled = True
|
||||
testAttachImageFilename = None
|
||||
testMediaType = None
|
||||
testImageDescription = None
|
||||
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"like, this is totally just a #test, man",
|
||||
False, True, False, True, None, None, useBlurhash)
|
||||
"like this is totally just a #test man",
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"Zoiks!!!",
|
||||
False, True, False, True, None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"Hey scoob we need like a hundred more #milkshakes",
|
||||
False, True, False, True, None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"Getting kinda spooky around here",
|
||||
False, True, False, True, None, None,
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash, 'someone')
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"And they would have gotten away with it too" +
|
||||
"if it wasn't for those pesky hackers",
|
||||
False, True, False, True, 'img/logo.png',
|
||||
'Description of image', useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
'img/logo.png', 'image/png',
|
||||
'Description of image',
|
||||
useBlurhash)
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"man, these centralized sites are, like, the worst!",
|
||||
False, True, False, True, None, None, useBlurhash)
|
||||
"man these centralized sites are like the worst!",
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"another mystery solved #test",
|
||||
False, True, False, True, None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"let's go bowling",
|
||||
False, True, False, True, None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
testClientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType, testImageDescription,
|
||||
useBlurhash)
|
||||
|
||||
domainFull = domain + ':' + str(port)
|
||||
clearFollows(baseDir, nickname, domain)
|
||||
|
@ -2032,10 +2079,15 @@ maxFeedItemSizeKb = \
|
|||
if maxFeedItemSizeKb is not None:
|
||||
args.maxFeedItemSizeKb = int(maxFeedItemSizeKb)
|
||||
|
||||
allowNewsFollowers = \
|
||||
getConfigParam(baseDir, 'allowNewsFollowers')
|
||||
if allowNewsFollowers is not None:
|
||||
args.allowNewsFollowers = bool(allowNewsFollowers)
|
||||
dormantMonths = \
|
||||
getConfigParam(baseDir, 'dormantMonths')
|
||||
if dormantMonths is not None:
|
||||
args.dormantMonths = int(dormantMonths)
|
||||
|
||||
sendThreadsTimeoutMins = \
|
||||
getConfigParam(baseDir, 'sendThreadsTimeoutMins')
|
||||
if sendThreadsTimeoutMins is not None:
|
||||
args.sendThreadsTimeoutMins = int(sendThreadsTimeoutMins)
|
||||
|
||||
showPublishAsIcon = \
|
||||
getConfigParam(baseDir, 'showPublishAsIcon')
|
||||
|
@ -2080,7 +2132,9 @@ if setTheme(baseDir, themeName, domain, args.allowLocalNetworkAccess):
|
|||
print('Theme set to ' + themeName)
|
||||
|
||||
if __name__ == "__main__":
|
||||
runDaemon(args.maxNewswirePosts,
|
||||
runDaemon(args.sendThreadsTimeoutMins,
|
||||
args.dormantMonths,
|
||||
args.maxNewswirePosts,
|
||||
args.allowLocalNetworkAccess,
|
||||
args.maxFeedItemSizeKb,
|
||||
args.publishButtonAtTop,
|
||||
|
@ -2089,7 +2143,6 @@ if __name__ == "__main__":
|
|||
args.fullWidthTimelineButtonHeader,
|
||||
args.showPublishAsIcon,
|
||||
args.maxFollowers,
|
||||
args.allowNewsFollowers,
|
||||
args.maxNewsPosts,
|
||||
args.maxMirroredArticles,
|
||||
args.maxNewswireFeedSizeKb,
|
||||
|
|
90
filters.py
|
@ -23,6 +23,24 @@ def addFilter(baseDir: str, nickname: str, domain: str, words: str) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
def addGlobalFilter(baseDir: str, words: str) -> bool:
|
||||
"""Adds a global filter for particular words within
|
||||
the content of a incoming posts
|
||||
"""
|
||||
if not words:
|
||||
return False
|
||||
if len(words) < 2:
|
||||
return False
|
||||
filtersFilename = baseDir + '/accounts/filters.txt'
|
||||
if os.path.isfile(filtersFilename):
|
||||
if words in open(filtersFilename).read():
|
||||
return False
|
||||
filtersFile = open(filtersFilename, "a+")
|
||||
filtersFile.write(words + '\n')
|
||||
filtersFile.close()
|
||||
return True
|
||||
|
||||
|
||||
def removeFilter(baseDir: str, nickname: str, domain: str,
|
||||
words: str) -> bool:
|
||||
"""Removes a word filter
|
||||
|
@ -43,7 +61,25 @@ def removeFilter(baseDir: str, nickname: str, domain: str,
|
|||
return False
|
||||
|
||||
|
||||
def isTwitterPost(content: str) -> bool:
|
||||
def removeGlobalFilter(baseDir: str, words: str) -> bool:
|
||||
"""Removes a global word filter
|
||||
"""
|
||||
filtersFilename = baseDir + '/accounts/filters.txt'
|
||||
if os.path.isfile(filtersFilename):
|
||||
if words in open(filtersFilename).read():
|
||||
with open(filtersFilename, 'r') as fp:
|
||||
with open(filtersFilename + '.new', 'w+') as fpnew:
|
||||
for line in fp:
|
||||
line = line.replace('\n', '')
|
||||
if line != words:
|
||||
fpnew.write(line + '\n')
|
||||
if os.path.isfile(filtersFilename + '.new'):
|
||||
os.rename(filtersFilename + '.new', filtersFilename)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _isTwitterPost(content: str) -> bool:
|
||||
"""Returns true if the given post content is a retweet or twitter crosspost
|
||||
"""
|
||||
if '/twitter.' in content or '@twitter.' in content:
|
||||
|
@ -53,32 +89,52 @@ def isTwitterPost(content: str) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def _isFilteredBase(filename: str, content: str) -> bool:
|
||||
"""Uses the given file containing filtered words to check
|
||||
the given content
|
||||
"""
|
||||
if not os.path.isfile(filename):
|
||||
return False
|
||||
|
||||
with open(filename, 'r') as fp:
|
||||
for line in fp:
|
||||
filterStr = line.replace('\n', '').replace('\r', '')
|
||||
if not filterStr:
|
||||
continue
|
||||
if len(filterStr) < 2:
|
||||
continue
|
||||
if '+' not in filterStr:
|
||||
if filterStr in content:
|
||||
return True
|
||||
else:
|
||||
filterWords = filterStr.replace('"', '').split('+')
|
||||
for word in filterWords:
|
||||
if word not in content:
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def isFiltered(baseDir: str, nickname: str, domain: str, content: str) -> bool:
|
||||
"""Should the given content be filtered out?
|
||||
This is a simple type of filter which just matches words, not a regex
|
||||
You can add individual words or use word1+word2 to indicate that two
|
||||
words must be present although not necessarily adjacent
|
||||
"""
|
||||
globalFiltersFilename = baseDir + '/accounts/filters.txt'
|
||||
if _isFilteredBase(globalFiltersFilename, content):
|
||||
return True
|
||||
|
||||
if not nickname or not domain:
|
||||
return False
|
||||
|
||||
# optionally remove retweets
|
||||
removeTwitter = baseDir + '/accounts/' + \
|
||||
nickname + '@' + domain + '/.removeTwitter'
|
||||
if os.path.isfile(removeTwitter):
|
||||
if isTwitterPost(content):
|
||||
if _isTwitterPost(content):
|
||||
return True
|
||||
|
||||
filtersFilename = baseDir + '/accounts/' + \
|
||||
accountFiltersFilename = baseDir + '/accounts/' + \
|
||||
nickname + '@' + domain + '/filters.txt'
|
||||
if os.path.isfile(filtersFilename):
|
||||
with open(filtersFilename, 'r') as fp:
|
||||
for line in fp:
|
||||
filterStr = line.replace('\n', '').replace('\r', '')
|
||||
if '+' not in filterStr:
|
||||
if filterStr in content:
|
||||
return True
|
||||
else:
|
||||
filterWords = filterStr.replace('"', '').split('+')
|
||||
for word in filterWords:
|
||||
if word not in content:
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
return _isFilteredBase(accountFiltersFilename, content)
|
||||
|
|
327
follow.py
|
@ -8,6 +8,8 @@ __status__ = "Production"
|
|||
|
||||
from pprint import pprint
|
||||
import os
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import isSystemAccount
|
||||
from utils import getFollowersList
|
||||
from utils import validNickname
|
||||
|
@ -27,16 +29,48 @@ from auth import createBasicAuthHeader
|
|||
from session import postJson
|
||||
|
||||
|
||||
def preApprovedFollower(baseDir: str,
|
||||
nickname: str, domain: str,
|
||||
approveHandle: str,
|
||||
allowNewsFollowers: bool) -> bool:
|
||||
def createInitialLastSeen(baseDir: str, httpPrefix: str) -> None:
|
||||
"""Creates initial lastseen files for all follows
|
||||
"""
|
||||
for subdir, dirs, files in os.walk(baseDir + '/accounts'):
|
||||
for acct in dirs:
|
||||
if '@' not in acct:
|
||||
continue
|
||||
if 'inbox@' in acct or 'news@' in acct:
|
||||
continue
|
||||
accountDir = os.path.join(baseDir + '/accounts', acct)
|
||||
followingFilename = accountDir + '/following.txt'
|
||||
if not os.path.isfile(followingFilename):
|
||||
continue
|
||||
lastSeenDir = accountDir + '/lastseen'
|
||||
if not os.path.isdir(lastSeenDir):
|
||||
os.mkdir(lastSeenDir)
|
||||
with open(followingFilename, 'r') as fp:
|
||||
followingHandles = fp.readlines()
|
||||
for handle in followingHandles:
|
||||
if '#' in handle:
|
||||
continue
|
||||
if '@' not in handle:
|
||||
continue
|
||||
handle = handle.replace('\n', '')
|
||||
nickname = handle.split('@')[0]
|
||||
domain = handle.split('@')[1]
|
||||
actor = \
|
||||
httpPrefix + '://' + domain + '/users/' + nickname
|
||||
lastSeenFilename = \
|
||||
lastSeenDir + '/' + actor.replace('/', '#') + '.txt'
|
||||
print('lastSeenFilename: ' + lastSeenFilename)
|
||||
if not os.path.isfile(lastSeenFilename):
|
||||
with open(lastSeenFilename, 'w+') as fp:
|
||||
fp.write(str(100))
|
||||
break
|
||||
|
||||
|
||||
def _preApprovedFollower(baseDir: str,
|
||||
nickname: str, domain: str,
|
||||
approveHandle: str) -> bool:
|
||||
"""Is the given handle an already manually approved follower?
|
||||
"""
|
||||
# optionally allow the news account to be followed
|
||||
if nickname == 'news' and allowNewsFollowers:
|
||||
return True
|
||||
|
||||
handle = nickname + '@' + domain
|
||||
accountDir = baseDir + '/accounts/' + handle
|
||||
approvedFilename = accountDir + '/approved.txt'
|
||||
|
@ -46,10 +80,10 @@ def preApprovedFollower(baseDir: str,
|
|||
return False
|
||||
|
||||
|
||||
def removeFromFollowBase(baseDir: str,
|
||||
nickname: str, domain: str,
|
||||
acceptOrDenyHandle: str, followFile: str,
|
||||
debug: bool) -> None:
|
||||
def _removeFromFollowBase(baseDir: str,
|
||||
nickname: str, domain: str,
|
||||
acceptOrDenyHandle: str, followFile: str,
|
||||
debug: bool) -> None:
|
||||
"""Removes a handle from follow requests or rejects file
|
||||
"""
|
||||
handle = nickname + '@' + domain
|
||||
|
@ -76,17 +110,17 @@ def removeFromFollowRequests(baseDir: str,
|
|||
denyHandle: str, debug: bool) -> None:
|
||||
"""Removes a handle from follow requests
|
||||
"""
|
||||
removeFromFollowBase(baseDir, nickname, domain,
|
||||
denyHandle, 'followrequests', debug)
|
||||
_removeFromFollowBase(baseDir, nickname, domain,
|
||||
denyHandle, 'followrequests', debug)
|
||||
|
||||
|
||||
def removeFromFollowRejects(baseDir: str,
|
||||
nickname: str, domain: str,
|
||||
acceptHandle: str, debug: bool) -> None:
|
||||
def _removeFromFollowRejects(baseDir: str,
|
||||
nickname: str, domain: str,
|
||||
acceptHandle: str, debug: bool) -> None:
|
||||
"""Removes a handle from follow rejects
|
||||
"""
|
||||
removeFromFollowBase(baseDir, nickname, domain,
|
||||
acceptHandle, 'followrejects', debug)
|
||||
_removeFromFollowBase(baseDir, nickname, domain,
|
||||
acceptHandle, 'followrejects', debug)
|
||||
|
||||
|
||||
def isFollowingActor(baseDir: str,
|
||||
|
@ -108,11 +142,8 @@ def isFollowingActor(baseDir: str,
|
|||
print('WARN: unable to find nickname in ' + actor)
|
||||
return False
|
||||
followingDomain, followingPort = getDomainFromActor(actor)
|
||||
followingHandle = followingNickname + '@' + followingDomain
|
||||
if followingPort:
|
||||
if followingPort != 80 and followingPort != 443:
|
||||
if ':' not in followingHandle:
|
||||
followingHandle += ':' + str(followingPort)
|
||||
followingHandle = \
|
||||
getFullDomain(followingNickname + '@' + followingDomain, followingPort)
|
||||
if followingHandle.lower() in open(followingFile).read().lower():
|
||||
return True
|
||||
return False
|
||||
|
@ -177,10 +208,10 @@ def isFollowerOfPerson(baseDir: str, nickname: str, domain: str,
|
|||
return alreadyFollowing
|
||||
|
||||
|
||||
def unfollowPerson(baseDir: str, nickname: str, domain: str,
|
||||
followNickname: str, followDomain: str,
|
||||
followFile='following.txt',
|
||||
debug=False) -> bool:
|
||||
def unfollowAccount(baseDir: str, nickname: str, domain: str,
|
||||
followNickname: str, followDomain: str,
|
||||
followFile='following.txt',
|
||||
debug=False) -> bool:
|
||||
"""Removes a person to the follow list
|
||||
"""
|
||||
if ':' in domain:
|
||||
|
@ -226,14 +257,14 @@ def unfollowPerson(baseDir: str, nickname: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def unfollowerOfPerson(baseDir: str, nickname: str, domain: str,
|
||||
followerNickname: str, followerDomain: str,
|
||||
debug=False) -> bool:
|
||||
def unfollowerOfAccount(baseDir: str, nickname: str, domain: str,
|
||||
followerNickname: str, followerDomain: str,
|
||||
debug=False) -> bool:
|
||||
"""Remove a follower of a person
|
||||
"""
|
||||
return unfollowPerson(baseDir, nickname, domain,
|
||||
followerNickname, followerDomain,
|
||||
'followers.txt', debug)
|
||||
return unfollowAccount(baseDir, nickname, domain,
|
||||
followerNickname, followerDomain,
|
||||
'followers.txt', debug)
|
||||
|
||||
|
||||
def clearFollows(baseDir: str, nickname: str, domain: str,
|
||||
|
@ -256,9 +287,9 @@ def clearFollowers(baseDir: str, nickname: str, domain: str) -> None:
|
|||
clearFollows(baseDir, nickname, domain, 'followers.txt')
|
||||
|
||||
|
||||
def getNoOfFollows(baseDir: str, nickname: str, domain: str,
|
||||
authenticated: bool,
|
||||
followFile='following.txt') -> int:
|
||||
def _getNoOfFollows(baseDir: str, nickname: str, domain: str,
|
||||
authenticated: bool,
|
||||
followFile='following.txt') -> int:
|
||||
"""Returns the number of follows or followers
|
||||
"""
|
||||
# only show number of followers to authenticated
|
||||
|
@ -281,20 +312,17 @@ def getNoOfFollows(baseDir: str, nickname: str, domain: str,
|
|||
ctr += 1
|
||||
elif ((line.startswith('http') or
|
||||
line.startswith('dat')) and
|
||||
('/users/' in line or
|
||||
'/profile/' in line or
|
||||
'/accounts/' in line or
|
||||
'/channel/' in line)):
|
||||
hasUsersPath(line)):
|
||||
ctr += 1
|
||||
return ctr
|
||||
|
||||
|
||||
def getNoOfFollowers(baseDir: str,
|
||||
nickname: str, domain: str, authenticated: bool) -> int:
|
||||
def _getNoOfFollowers(baseDir: str,
|
||||
nickname: str, domain: str, authenticated: bool) -> int:
|
||||
"""Returns the number of followers of the given person
|
||||
"""
|
||||
return getNoOfFollows(baseDir, nickname, domain,
|
||||
authenticated, 'followers.txt')
|
||||
return _getNoOfFollows(baseDir, nickname, domain,
|
||||
authenticated, 'followers.txt')
|
||||
|
||||
|
||||
def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
|
||||
|
@ -337,10 +365,7 @@ def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
|
|||
if not validNickname(domain, nickname):
|
||||
return None
|
||||
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domain = domain + ':' + str(port)
|
||||
domain = getFullDomain(domain, port)
|
||||
|
||||
if headerOnly:
|
||||
firstStr = \
|
||||
|
@ -350,7 +375,7 @@ def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
|
|||
httpPrefix + '://' + domain + '/users/' + \
|
||||
nickname + '/' + followFile
|
||||
totalStr = \
|
||||
getNoOfFollows(baseDir, nickname, domain, authenticated)
|
||||
_getNoOfFollows(baseDir, nickname, domain, authenticated)
|
||||
following = {
|
||||
'@context': 'https://www.w3.org/ns/activitystreams',
|
||||
'first': firstStr,
|
||||
|
@ -406,10 +431,7 @@ def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
|
|||
following['orderedItems'].append(url)
|
||||
elif ((line.startswith('http') or
|
||||
line.startswith('dat')) and
|
||||
('/users/' in line or
|
||||
'/profile/' in line or
|
||||
'/accounts/' in line or
|
||||
'/channel/' in line)):
|
||||
hasUsersPath(line)):
|
||||
# https://domain/users/nickname
|
||||
pageCtr += 1
|
||||
totalCtr += 1
|
||||
|
@ -431,15 +453,14 @@ def getFollowingFeed(baseDir: str, domain: str, port: int, path: str,
|
|||
return following
|
||||
|
||||
|
||||
def followApprovalRequired(baseDir: str, nicknameToFollow: str,
|
||||
domainToFollow: str, debug: bool,
|
||||
followRequestHandle: str,
|
||||
allowNewsFollowers: bool) -> bool:
|
||||
def _followApprovalRequired(baseDir: str, nicknameToFollow: str,
|
||||
domainToFollow: str, debug: bool,
|
||||
followRequestHandle: str) -> bool:
|
||||
""" Returns the policy for follower approvals
|
||||
"""
|
||||
# has this handle already been manually approved?
|
||||
if preApprovedFollower(baseDir, nicknameToFollow, domainToFollow,
|
||||
followRequestHandle, allowNewsFollowers):
|
||||
if _preApprovedFollower(baseDir, nicknameToFollow, domainToFollow,
|
||||
followRequestHandle):
|
||||
return False
|
||||
|
||||
manuallyApproveFollows = False
|
||||
|
@ -462,10 +483,10 @@ def followApprovalRequired(baseDir: str, nicknameToFollow: str,
|
|||
return manuallyApproveFollows
|
||||
|
||||
|
||||
def noOfFollowRequests(baseDir: str,
|
||||
nicknameToFollow: str, domainToFollow: str,
|
||||
nickname: str, domain: str, fromPort: int,
|
||||
followType: str) -> int:
|
||||
def _noOfFollowRequests(baseDir: str,
|
||||
nicknameToFollow: str, domainToFollow: str,
|
||||
nickname: str, domain: str, fromPort: int,
|
||||
followType: str) -> int:
|
||||
"""Returns the current number of follow requests
|
||||
"""
|
||||
accountsDir = baseDir + '/accounts/' + \
|
||||
|
@ -489,11 +510,11 @@ def noOfFollowRequests(baseDir: str,
|
|||
return ctr
|
||||
|
||||
|
||||
def storeFollowRequest(baseDir: str,
|
||||
nicknameToFollow: str, domainToFollow: str, port: int,
|
||||
nickname: str, domain: str, fromPort: int,
|
||||
followJson: {},
|
||||
debug: bool, personUrl: str) -> bool:
|
||||
def _storeFollowRequest(baseDir: str,
|
||||
nicknameToFollow: str, domainToFollow: str, port: int,
|
||||
nickname: str, domain: str, fromPort: int,
|
||||
followJson: {},
|
||||
debug: bool, personUrl: str) -> bool:
|
||||
"""Stores the follow request for later use
|
||||
"""
|
||||
accountsDir = baseDir + '/accounts/' + \
|
||||
|
@ -502,12 +523,8 @@ def storeFollowRequest(baseDir: str,
|
|||
return False
|
||||
|
||||
approveHandle = nickname + '@' + domain
|
||||
domainFull = domain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in domain:
|
||||
approveHandle = nickname + '@' + domain + ':' + str(fromPort)
|
||||
domainFull = domain + ':' + str(fromPort)
|
||||
domainFull = getFullDomain(domain, fromPort)
|
||||
approveHandle = getFullDomain(nickname + '@' + domain, fromPort)
|
||||
|
||||
followersFilename = accountsDir + '/followers.txt'
|
||||
if os.path.isfile(followersFilename):
|
||||
|
@ -577,7 +594,6 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
cachedWebfingers: {}, personCache: {},
|
||||
messageJson: {}, federationList: [],
|
||||
debug: bool, projectVersion: str,
|
||||
allowNewsFollowers: bool,
|
||||
maxFollowers: int) -> bool:
|
||||
"""Receives a follow request within the POST section of HTTPServer
|
||||
"""
|
||||
|
@ -588,21 +604,15 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
if debug:
|
||||
print('DEBUG: follow request has no actor')
|
||||
return False
|
||||
if '/users/' not in messageJson['actor'] and \
|
||||
'/accounts/' not in messageJson['actor'] and \
|
||||
'/channel/' not in messageJson['actor'] and \
|
||||
'/profile/' not in messageJson['actor']:
|
||||
if not hasUsersPath(messageJson['actor']):
|
||||
if debug:
|
||||
print('DEBUG: users/profile/accounts/channel missing from actor')
|
||||
return False
|
||||
domain, tempPort = getDomainFromActor(messageJson['actor'])
|
||||
fromPort = port
|
||||
domainFull = domain
|
||||
domainFull = getFullDomain(domain, tempPort)
|
||||
if tempPort:
|
||||
fromPort = tempPort
|
||||
if tempPort != 80 and tempPort != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(tempPort)
|
||||
if not domainPermitted(domain, federationList):
|
||||
if debug:
|
||||
print('DEBUG: follower from domain not permitted - ' + domain)
|
||||
|
@ -616,10 +626,7 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
'nickname. Assuming single user instance.')
|
||||
if not messageJson.get('to'):
|
||||
messageJson['to'] = messageJson['object']
|
||||
if '/users/' not in messageJson['object'] and \
|
||||
'/accounts/' not in messageJson['object'] and \
|
||||
'/channel/' not in messageJson['object'] and \
|
||||
'/profile/' not in messageJson['object']:
|
||||
if not hasUsersPath(messageJson['object']):
|
||||
if debug:
|
||||
print('DEBUG: users/profile/channel/accounts ' +
|
||||
'not found within object')
|
||||
|
@ -629,11 +636,7 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
if debug:
|
||||
print('DEBUG: follow domain not permitted ' + domainToFollow)
|
||||
return True
|
||||
domainToFollowFull = domainToFollow
|
||||
if tempPort:
|
||||
if tempPort != 80 and tempPort != 443:
|
||||
if ':' not in domainToFollow:
|
||||
domainToFollowFull = domainToFollow + ':' + str(tempPort)
|
||||
domainToFollowFull = getFullDomain(domainToFollow, tempPort)
|
||||
nicknameToFollow = getNicknameFromActor(messageJson['object'])
|
||||
if not nicknameToFollow:
|
||||
if debug:
|
||||
|
@ -641,15 +644,14 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
'nickname for the account followed')
|
||||
return True
|
||||
if isSystemAccount(nicknameToFollow):
|
||||
if not (nicknameToFollow == 'news' and allowNewsFollowers):
|
||||
if debug:
|
||||
print('DEBUG: Cannot follow system account - ' +
|
||||
nicknameToFollow)
|
||||
return True
|
||||
if debug:
|
||||
print('DEBUG: Cannot follow system account - ' +
|
||||
nicknameToFollow)
|
||||
return True
|
||||
if maxFollowers > 0:
|
||||
if getNoOfFollowers(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
True) > maxFollowers:
|
||||
if _getNoOfFollowers(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
True) > maxFollowers:
|
||||
print('WARN: ' + nicknameToFollow +
|
||||
' has reached their maximum number of followers')
|
||||
return True
|
||||
|
@ -672,37 +674,36 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
|
||||
# what is the followers policy?
|
||||
approveHandle = nickname + '@' + domainFull
|
||||
if followApprovalRequired(baseDir, nicknameToFollow,
|
||||
domainToFollow, debug, approveHandle,
|
||||
allowNewsFollowers):
|
||||
if _followApprovalRequired(baseDir, nicknameToFollow,
|
||||
domainToFollow, debug, approveHandle):
|
||||
print('Follow approval is required')
|
||||
if domain.endswith('.onion'):
|
||||
if noOfFollowRequests(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
nickname, domain, fromPort,
|
||||
'onion') > 5:
|
||||
if _noOfFollowRequests(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
nickname, domain, fromPort,
|
||||
'onion') > 5:
|
||||
print('Too many follow requests from onion addresses')
|
||||
return False
|
||||
elif domain.endswith('.i2p'):
|
||||
if noOfFollowRequests(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
nickname, domain, fromPort,
|
||||
'i2p') > 5:
|
||||
if _noOfFollowRequests(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
nickname, domain, fromPort,
|
||||
'i2p') > 5:
|
||||
print('Too many follow requests from i2p addresses')
|
||||
return False
|
||||
else:
|
||||
if noOfFollowRequests(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
nickname, domain, fromPort,
|
||||
'') > 10:
|
||||
if _noOfFollowRequests(baseDir,
|
||||
nicknameToFollow, domainToFollow,
|
||||
nickname, domain, fromPort,
|
||||
'') > 10:
|
||||
print('Too many follow requests')
|
||||
return False
|
||||
|
||||
print('Storing follow request for approval')
|
||||
return storeFollowRequest(baseDir,
|
||||
nicknameToFollow, domainToFollow, port,
|
||||
nickname, domain, fromPort,
|
||||
messageJson, debug, messageJson['actor'])
|
||||
return _storeFollowRequest(baseDir,
|
||||
nicknameToFollow, domainToFollow, port,
|
||||
nickname, domain, fromPort,
|
||||
messageJson, debug, messageJson['actor'])
|
||||
else:
|
||||
print('Follow request does not require approval')
|
||||
# update the followers
|
||||
|
@ -724,8 +725,10 @@ def receiveFollowRequest(session, baseDir: str, httpPrefix: str,
|
|||
try:
|
||||
with open(followersFilename, 'r+') as followersFile:
|
||||
content = followersFile.read()
|
||||
followersFile.seek(0, 0)
|
||||
followersFile.write(approveHandle + '\n' + content)
|
||||
if approveHandle + '\n' not in content:
|
||||
followersFile.seek(0, 0)
|
||||
followersFile.write(approveHandle + '\n' +
|
||||
content)
|
||||
except Exception as e:
|
||||
print('WARN: ' +
|
||||
'Failed to write entry to followers file ' +
|
||||
|
@ -841,10 +844,7 @@ def followedAccountRejects(session, baseDir: str, httpPrefix: str,
|
|||
' port ' + str(port) + ' to ' +
|
||||
nickname + '@' + domain + ' port ' + str(fromPort))
|
||||
clientToServer = False
|
||||
denyHandle = nickname + '@' + domain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
denyHandle = denyHandle + ':' + str(fromPort)
|
||||
denyHandle = getFullDomain(nickname + '@' + domain, fromPort)
|
||||
# remove from the follow requests file
|
||||
removeFromFollowRequests(baseDir, nicknameToFollow, domainToFollow,
|
||||
denyHandle, debug)
|
||||
|
@ -870,26 +870,16 @@ def sendFollowRequest(session, baseDir: str,
|
|||
clientToServer: bool, federationList: [],
|
||||
sendThreads: [], postLog: [], cachedWebfingers: {},
|
||||
personCache: {}, debug: bool,
|
||||
projectVersion: str, allowNewsFollowers: bool) -> {}:
|
||||
projectVersion: str) -> {}:
|
||||
"""Gets the json object for sending a follow request
|
||||
"""
|
||||
if not domainPermitted(followDomain, federationList):
|
||||
return None
|
||||
|
||||
fullDomain = domain
|
||||
followActor = httpPrefix + '://' + domain + '/users/' + nickname
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
followActor = httpPrefix + '://' + \
|
||||
fullDomain + '/users/' + nickname
|
||||
fullDomain = getFullDomain(domain, port)
|
||||
followActor = httpPrefix + '://' + fullDomain + '/users/' + nickname
|
||||
|
||||
requestDomain = followDomain
|
||||
if followPort:
|
||||
if followPort != 80 and followPort != 443:
|
||||
if ':' not in followDomain:
|
||||
requestDomain = followDomain + ':' + str(followPort)
|
||||
requestDomain = getFullDomain(followDomain, followPort)
|
||||
|
||||
statusNumber, published = getStatusNumber()
|
||||
|
||||
|
@ -912,15 +902,15 @@ def sendFollowRequest(session, baseDir: str,
|
|||
'object': followedId
|
||||
}
|
||||
|
||||
if followApprovalRequired(baseDir, nickname, domain, debug,
|
||||
followHandle, allowNewsFollowers):
|
||||
if _followApprovalRequired(baseDir, nickname, domain, debug,
|
||||
followHandle):
|
||||
# Remove any follow requests rejected for the account being followed.
|
||||
# It's assumed that if you are following someone then you are
|
||||
# ok with them following back. If this isn't the case then a rejected
|
||||
# follow request will block them again.
|
||||
removeFromFollowRejects(baseDir,
|
||||
nickname, domain,
|
||||
followHandle, debug)
|
||||
_removeFromFollowRejects(baseDir,
|
||||
nickname, domain,
|
||||
followHandle, debug)
|
||||
|
||||
sendSignedJson(newFollowJson, session, baseDir, nickname, domain, port,
|
||||
followNickname, followDomain, followPort,
|
||||
|
@ -947,17 +937,9 @@ def sendFollowRequestViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendFollowRequestViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
followDomainFull = followDomain
|
||||
if followPort:
|
||||
if followPort != 80 and followPort != 443:
|
||||
if ':' not in followDomain:
|
||||
followDomainFull = followDomain + ':' + str(followPort)
|
||||
followDomainFull = getFullDomain(followDomain, followPort)
|
||||
|
||||
followActor = httpPrefix + '://' + \
|
||||
fromDomainFull + '/users/' + fromNickname
|
||||
|
@ -995,7 +977,7 @@ def sendFollowRequestViaServer(baseDir: str, session,
|
|||
fromPersonId, sharedInbox, avatarUrl,
|
||||
displayName) = getPersonBox(baseDir, session, wfRequest, personCache,
|
||||
projectVersion, httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
fromDomain, postToBox, 52025)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
@ -1040,16 +1022,8 @@ def sendUnfollowRequestViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendUnfollowRequestViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
followDomainFull = followDomain
|
||||
if followPort:
|
||||
if followPort != 80 and followPort != 443:
|
||||
if ':' not in followDomain:
|
||||
followDomainFull = followDomain + ':' + str(followPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
followDomainFull = getFullDomain(followDomain, followPort)
|
||||
|
||||
followActor = httpPrefix + '://' + \
|
||||
fromDomainFull + '/users/' + fromNickname
|
||||
|
@ -1094,7 +1068,8 @@ def sendUnfollowRequestViaServer(baseDir: str, session,
|
|||
wfRequest, personCache,
|
||||
projectVersion, httpPrefix,
|
||||
fromNickname,
|
||||
fromDomain, postToBox)
|
||||
fromDomain, postToBox,
|
||||
76536)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
@ -1167,6 +1142,7 @@ def getFollowersOfActor(baseDir: str, actor: str, debug: bool) -> {}:
|
|||
print('DEBUG: ' + account +
|
||||
' follows ' + actorHandle)
|
||||
recipientsDict[account] = None
|
||||
break
|
||||
return recipientsDict
|
||||
|
||||
|
||||
|
@ -1203,11 +1179,7 @@ def outboxUndoFollow(baseDir: str, messageJson: {}, debug: bool) -> None:
|
|||
return
|
||||
domainFollower, portFollower = \
|
||||
getDomainFromActor(messageJson['object']['actor'])
|
||||
domainFollowerFull = domainFollower
|
||||
if portFollower:
|
||||
if portFollower != 80 and portFollower != 443:
|
||||
if ':' not in domainFollower:
|
||||
domainFollowerFull = domainFollower + ':' + str(portFollower)
|
||||
domainFollowerFull = getFullDomain(domainFollower, portFollower)
|
||||
|
||||
nicknameFollowing = getNicknameFromActor(messageJson['object']['object'])
|
||||
if not nicknameFollowing:
|
||||
|
@ -1216,15 +1188,10 @@ def outboxUndoFollow(baseDir: str, messageJson: {}, debug: bool) -> None:
|
|||
return
|
||||
domainFollowing, portFollowing = \
|
||||
getDomainFromActor(messageJson['object']['object'])
|
||||
domainFollowingFull = domainFollowing
|
||||
if portFollowing:
|
||||
if portFollowing != 80 and portFollowing != 443:
|
||||
if ':' not in domainFollowing:
|
||||
domainFollowingFull = \
|
||||
domainFollowing + ':' + str(portFollowing)
|
||||
domainFollowingFull = getFullDomain(domainFollowing, portFollowing)
|
||||
|
||||
if unfollowPerson(baseDir, nicknameFollower, domainFollowerFull,
|
||||
nicknameFollowing, domainFollowingFull):
|
||||
if unfollowAccount(baseDir, nicknameFollower, domainFollowerFull,
|
||||
nicknameFollowing, domainFollowingFull):
|
||||
if debug:
|
||||
print('DEBUG: ' + nicknameFollower + ' unfollowed ' +
|
||||
nicknameFollowing + '@' + domainFollowingFull)
|
||||
|
|
|
@ -34,10 +34,10 @@ def receivingCalendarEvents(baseDir: str, nickname: str, domain: str,
|
|||
return handle + '\n' in open(calendarFilename).read()
|
||||
|
||||
|
||||
def receiveCalendarEvents(baseDir: str, nickname: str, domain: str,
|
||||
followingNickname: str,
|
||||
followingDomain: str,
|
||||
add: bool) -> None:
|
||||
def _receiveCalendarEvents(baseDir: str, nickname: str, domain: str,
|
||||
followingNickname: str,
|
||||
followingDomain: str,
|
||||
add: bool) -> None:
|
||||
"""Adds or removes a handle from the following.txt list into a list
|
||||
indicating whether to receive calendar events from that account
|
||||
"""
|
||||
|
@ -100,12 +100,12 @@ def receiveCalendarEvents(baseDir: str, nickname: str, domain: str,
|
|||
def addPersonToCalendar(baseDir: str, nickname: str, domain: str,
|
||||
followingNickname: str,
|
||||
followingDomain: str) -> None:
|
||||
receiveCalendarEvents(baseDir, nickname, domain,
|
||||
followingNickname, followingDomain, True)
|
||||
_receiveCalendarEvents(baseDir, nickname, domain,
|
||||
followingNickname, followingDomain, True)
|
||||
|
||||
|
||||
def removePersonFromCalendar(baseDir: str, nickname: str, domain: str,
|
||||
followingNickname: str,
|
||||
followingDomain: str) -> None:
|
||||
receiveCalendarEvents(baseDir, nickname, domain,
|
||||
followingNickname, followingDomain, False)
|
||||
_receiveCalendarEvents(baseDir, nickname, domain,
|
||||
followingNickname, followingDomain, False)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# Font Licenses
|
||||
|
||||
Absortile is under GPL. See https://www.ffonts.net/Absortile.font
|
||||
Barlow-Regular is under SIL Open Font License. See https://tribby.com/fonts/barlow
|
||||
Bedstead is under CC0. See below and https://fontlibrary.org/en/font/bedstead
|
||||
bgrove is under GPL. See http://www.free-fonts-download.com/basic/bloominggrove-font
|
||||
CheGuevaraTextSans-Regular is under CC0. See https://fonts2u.com/cheguevara-text-sans-regular.font
|
||||
|
|
26
git.py
|
@ -10,7 +10,7 @@ import os
|
|||
import html
|
||||
|
||||
|
||||
def gitFormatContent(content: str) -> str:
|
||||
def _gitFormatContent(content: str) -> str:
|
||||
""" replace html formatting, so that it's more
|
||||
like the original patch file
|
||||
"""
|
||||
|
@ -22,8 +22,8 @@ def gitFormatContent(content: str) -> str:
|
|||
return patchStr
|
||||
|
||||
|
||||
def getGitProjectName(baseDir: str, nickname: str, domain: str,
|
||||
subject: str) -> str:
|
||||
def _getGitProjectName(baseDir: str, nickname: str, domain: str,
|
||||
subject: str) -> str:
|
||||
"""Returns the project name for a git patch
|
||||
The project name should be contained within the subject line
|
||||
and should match against a list of projects which the account
|
||||
|
@ -71,13 +71,13 @@ def isGitPatch(baseDir: str, nickname: str, domain: str,
|
|||
return False
|
||||
if checkProjectName:
|
||||
projectName = \
|
||||
getGitProjectName(baseDir, nickname, domain, subject)
|
||||
_getGitProjectName(baseDir, nickname, domain, subject)
|
||||
if not projectName:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def getGitHash(patchStr: str) -> str:
|
||||
def _getGitHash(patchStr: str) -> str:
|
||||
"""Returns the commit hash from a given patch
|
||||
"""
|
||||
patchLines = patchStr.split('\n')
|
||||
|
@ -91,7 +91,7 @@ def getGitHash(patchStr: str) -> str:
|
|||
return None
|
||||
|
||||
|
||||
def getPatchDescription(patchStr: str) -> str:
|
||||
def _getPatchDescription(patchStr: str) -> str:
|
||||
"""Returns the description from a given patch
|
||||
"""
|
||||
patchLines = patchStr.split('\n')
|
||||
|
@ -134,8 +134,8 @@ def convertPostToPatch(baseDir: str, nickname: str, domain: str,
|
|||
postJsonObject['object']['content'],
|
||||
False):
|
||||
return False
|
||||
patchStr = gitFormatContent(postJsonObject['object']['content'])
|
||||
commitHash = getGitHash(patchStr)
|
||||
patchStr = _gitFormatContent(postJsonObject['object']['content'])
|
||||
commitHash = _getGitHash(patchStr)
|
||||
if not commitHash:
|
||||
return False
|
||||
postJsonObject['object']['type'] = 'Patch'
|
||||
|
@ -146,7 +146,7 @@ def convertPostToPatch(baseDir: str, nickname: str, domain: str,
|
|||
postJsonObject['object']['hash'] = commitHash
|
||||
postJsonObject['object']['description'] = {
|
||||
"mediaType": "text/plain",
|
||||
"content": getPatchDescription(patchStr)
|
||||
"content": _getPatchDescription(patchStr)
|
||||
}
|
||||
# remove content map
|
||||
if postJsonObject['object'].get('contentMap'):
|
||||
|
@ -155,7 +155,7 @@ def convertPostToPatch(baseDir: str, nickname: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def gitAddFromHandle(patchStr: str, handle: str) -> str:
|
||||
def _gitAddFromHandle(patchStr: str, handle: str) -> str:
|
||||
"""Adds the activitypub handle of the sender to the patch
|
||||
"""
|
||||
fromStr = 'AP-signed-off-by: '
|
||||
|
@ -181,7 +181,7 @@ def receiveGitPatch(baseDir: str, nickname: str, domain: str,
|
|||
messageType, subject, content):
|
||||
return False
|
||||
|
||||
patchStr = gitFormatContent(content)
|
||||
patchStr = _gitFormatContent(content)
|
||||
|
||||
patchLines = patchStr.split('\n')
|
||||
patchFilename = None
|
||||
|
@ -197,7 +197,7 @@ def receiveGitPatch(baseDir: str, nickname: str, domain: str,
|
|||
patchSubject = patchSubject.replace('[PATCH]', '').strip()
|
||||
patchSubject = patchSubject.replace(' ', '_')
|
||||
projectName = \
|
||||
getGitProjectName(baseDir, nickname, domain, subject)
|
||||
_getGitProjectName(baseDir, nickname, domain, subject)
|
||||
if not os.path.isdir(patchesDir):
|
||||
os.mkdir(patchesDir)
|
||||
projectDir = patchesDir + '/' + projectName
|
||||
|
@ -209,7 +209,7 @@ def receiveGitPatch(baseDir: str, nickname: str, domain: str,
|
|||
if not patchFilename:
|
||||
return False
|
||||
patchStr = \
|
||||
gitAddFromHandle(patchStr, '@' + fromNickname + '@' + fromDomain)
|
||||
_gitAddFromHandle(patchStr, '@' + fromNickname + '@' + fromDomain)
|
||||
with open(patchFilename, 'w+') as patchFile:
|
||||
patchFile.write(patchStr)
|
||||
patchNotifyFilename = \
|
||||
|
|
37
happening.py
|
@ -17,7 +17,7 @@ from utils import daysInMonth
|
|||
from utils import mergeDicts
|
||||
|
||||
|
||||
def validUuid(testUuid: str, version=4):
|
||||
def _validUuid(testUuid: str, version=4):
|
||||
"""Check if uuid_to_test is a valid UUID
|
||||
"""
|
||||
try:
|
||||
|
@ -28,7 +28,7 @@ def validUuid(testUuid: str, version=4):
|
|||
return str(uuid_obj) == testUuid
|
||||
|
||||
|
||||
def removeEventFromTimeline(eventId: str, tlEventsFilename: str) -> None:
|
||||
def _removeEventFromTimeline(eventId: str, tlEventsFilename: str) -> None:
|
||||
"""Removes the given event Id from the timeline
|
||||
"""
|
||||
if eventId + '\n' not in open(tlEventsFilename).read():
|
||||
|
@ -71,7 +71,7 @@ def saveEventPost(baseDir: str, handle: str, postId: str,
|
|||
|
||||
if eventJson.get('name') and eventJson.get('actor') and \
|
||||
eventJson.get('uuid') and eventJson.get('content'):
|
||||
if not validUuid(eventJson['uuid']):
|
||||
if not _validUuid(eventJson['uuid']):
|
||||
return False
|
||||
print('Mobilizon type event')
|
||||
# if this is a full description of an event then save it
|
||||
|
@ -92,12 +92,13 @@ def saveEventPost(baseDir: str, handle: str, postId: str,
|
|||
tlEventsFilename = baseDir + '/accounts/' + handle + '/events.txt'
|
||||
|
||||
if os.path.isfile(tlEventsFilename):
|
||||
removeEventFromTimeline(eventId, tlEventsFilename)
|
||||
_removeEventFromTimeline(eventId, tlEventsFilename)
|
||||
try:
|
||||
with open(tlEventsFilename, 'r+') as tlEventsFile:
|
||||
content = tlEventsFile.read()
|
||||
tlEventsFile.seek(0, 0)
|
||||
tlEventsFile.write(eventId + '\n' + content)
|
||||
if eventId + '\n' not in content:
|
||||
tlEventsFile.seek(0, 0)
|
||||
tlEventsFile.write(eventId + '\n' + content)
|
||||
except Exception as e:
|
||||
print('WARN: Failed to write entry to events file ' +
|
||||
tlEventsFilename + ' ' + str(e))
|
||||
|
@ -146,7 +147,7 @@ def saveEventPost(baseDir: str, handle: str, postId: str,
|
|||
return True
|
||||
|
||||
|
||||
def isHappeningEvent(tag: {}) -> bool:
|
||||
def _isHappeningEvent(tag: {}) -> bool:
|
||||
"""Is this tag an Event or Place ActivityStreams type?
|
||||
"""
|
||||
if not tag.get('type'):
|
||||
|
@ -156,7 +157,7 @@ def isHappeningEvent(tag: {}) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
def isHappeningPost(postJsonObject: {}) -> bool:
|
||||
def _isHappeningPost(postJsonObject: {}) -> bool:
|
||||
"""Is this a post with tags?
|
||||
"""
|
||||
if not postJsonObject:
|
||||
|
@ -208,13 +209,13 @@ def getTodaysEvents(baseDir: str, nickname: str, domain: str,
|
|||
continue
|
||||
|
||||
postJsonObject = loadJson(postFilename)
|
||||
if not isHappeningPost(postJsonObject):
|
||||
if not _isHappeningPost(postJsonObject):
|
||||
continue
|
||||
|
||||
postEvent = []
|
||||
dayOfMonth = None
|
||||
for tag in postJsonObject['object']['tag']:
|
||||
if not isHappeningEvent(tag):
|
||||
if not _isHappeningEvent(tag):
|
||||
continue
|
||||
# this tag is an event or a place
|
||||
if tag['type'] == 'Event':
|
||||
|
@ -275,11 +276,11 @@ def todaysEventsCheck(baseDir: str, nickname: str, domain: str) -> bool:
|
|||
continue
|
||||
|
||||
postJsonObject = loadJson(postFilename)
|
||||
if not isHappeningPost(postJsonObject):
|
||||
if not _isHappeningPost(postJsonObject):
|
||||
continue
|
||||
|
||||
for tag in postJsonObject['object']['tag']:
|
||||
if not isHappeningEvent(tag):
|
||||
if not _isHappeningEvent(tag):
|
||||
continue
|
||||
# this tag is an event or a place
|
||||
if tag['type'] != 'Event':
|
||||
|
@ -322,11 +323,11 @@ def thisWeeksEventsCheck(baseDir: str, nickname: str, domain: str) -> bool:
|
|||
continue
|
||||
|
||||
postJsonObject = loadJson(postFilename)
|
||||
if not isHappeningPost(postJsonObject):
|
||||
if not _isHappeningPost(postJsonObject):
|
||||
continue
|
||||
|
||||
for tag in postJsonObject['object']['tag']:
|
||||
if not isHappeningEvent(tag):
|
||||
if not _isHappeningEvent(tag):
|
||||
continue
|
||||
# this tag is an event or a place
|
||||
if tag['type'] != 'Event':
|
||||
|
@ -377,14 +378,14 @@ def getThisWeeksEvents(baseDir: str, nickname: str, domain: str) -> {}:
|
|||
continue
|
||||
|
||||
postJsonObject = loadJson(postFilename)
|
||||
if not isHappeningPost(postJsonObject):
|
||||
if not _isHappeningPost(postJsonObject):
|
||||
continue
|
||||
|
||||
postEvent = []
|
||||
dayOfMonth = None
|
||||
weekDayIndex = None
|
||||
for tag in postJsonObject['object']['tag']:
|
||||
if not isHappeningEvent(tag):
|
||||
if not _isHappeningEvent(tag):
|
||||
continue
|
||||
# this tag is an event or a place
|
||||
if tag['type'] == 'Event':
|
||||
|
@ -462,13 +463,13 @@ def getCalendarEvents(baseDir: str, nickname: str, domain: str,
|
|||
continue
|
||||
|
||||
postJsonObject = loadJson(postFilename)
|
||||
if not isHappeningPost(postJsonObject):
|
||||
if not _isHappeningPost(postJsonObject):
|
||||
continue
|
||||
|
||||
postEvent = []
|
||||
dayOfMonth = None
|
||||
for tag in postJsonObject['object']['tag']:
|
||||
if not isHappeningEvent(tag):
|
||||
if not _isHappeningEvent(tag):
|
||||
continue
|
||||
# this tag is an event or a place
|
||||
if tag['type'] == 'Event':
|
||||
|
|
24
httpsig.py
|
@ -22,6 +22,7 @@ except ImportError:
|
|||
import base64
|
||||
from time import gmtime, strftime
|
||||
import datetime
|
||||
from utils import getFullDomain
|
||||
|
||||
|
||||
def messageContentDigest(messageBodyJsonStr: str) -> str:
|
||||
|
@ -40,15 +41,9 @@ def signPostHeaders(dateStr: str, privateKeyPem: str,
|
|||
"""Returns a raw signature string that can be plugged into a header and
|
||||
used to verify the authenticity of an HTTP transmission.
|
||||
"""
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domain = domain + ':' + str(port)
|
||||
domain = getFullDomain(domain, port)
|
||||
|
||||
if toPort:
|
||||
if toPort != 80 and toPort != 443:
|
||||
if ':' not in toDomain:
|
||||
toDomain = toDomain + ':' + str(port)
|
||||
toDomain = getFullDomain(toDomain, toPort)
|
||||
|
||||
if not dateStr:
|
||||
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
|
@ -107,12 +102,7 @@ def createSignedHeader(privateKeyPem: str, nickname: str,
|
|||
"""Note that the domain is the destination, not the sender
|
||||
"""
|
||||
contentType = 'application/activity+json'
|
||||
headerDomain = toDomain
|
||||
|
||||
if toPort:
|
||||
if toPort != 80 and toPort != 443:
|
||||
if ':' not in headerDomain:
|
||||
headerDomain = headerDomain + ':' + str(toPort)
|
||||
headerDomain = getFullDomain(toDomain, toPort)
|
||||
|
||||
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
if not withDigest:
|
||||
|
@ -145,7 +135,7 @@ def createSignedHeader(privateKeyPem: str, nickname: str,
|
|||
return headers
|
||||
|
||||
|
||||
def verifyRecentSignature(signedDateStr: str) -> bool:
|
||||
def _verifyRecentSignature(signedDateStr: str) -> bool:
|
||||
"""Checks whether the given time taken from the header is within
|
||||
12 hours of the current time
|
||||
"""
|
||||
|
@ -229,7 +219,7 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
|
|||
else:
|
||||
if headers.get(signedHeader):
|
||||
if signedHeader == 'date':
|
||||
if not verifyRecentSignature(headers[signedHeader]):
|
||||
if not _verifyRecentSignature(headers[signedHeader]):
|
||||
if debug:
|
||||
print('DEBUG: ' +
|
||||
'verifyPostHeaders date is not recent ' +
|
||||
|
@ -240,7 +230,7 @@ def verifyPostHeaders(httpPrefix: str, publicKeyPem: str, headers: dict,
|
|||
else:
|
||||
signedHeaderCap = signedHeader.capitalize()
|
||||
if signedHeaderCap == 'Date':
|
||||
if not verifyRecentSignature(headers[signedHeaderCap]):
|
||||
if not _verifyRecentSignature(headers[signedHeaderCap]):
|
||||
if debug:
|
||||
print('DEBUG: ' +
|
||||
'verifyPostHeaders date is not recent ' +
|
||||
|
|
52
jsonldsig.py
|
@ -28,21 +28,21 @@ import base64
|
|||
import json
|
||||
|
||||
|
||||
def b64safeEncode(payload: {}) -> str:
|
||||
def _b64safeEncode(payload: {}) -> str:
|
||||
"""
|
||||
b64 url safe encoding with the padding removed.
|
||||
"""
|
||||
return base64.urlsafe_b64encode(payload).rstrip(b'=')
|
||||
|
||||
|
||||
def b64safeDecode(payload: {}) -> str:
|
||||
def _b64safeDecode(payload: {}) -> str:
|
||||
"""
|
||||
b64 url safe decoding with the padding added.
|
||||
"""
|
||||
return base64.urlsafe_b64decode(payload + b'=' * (4 - len(payload) % 4))
|
||||
|
||||
|
||||
def normalizeJson(payload: {}) -> str:
|
||||
def _normalizeJson(payload: {}) -> str:
|
||||
"""
|
||||
Normalize with URDNA2015
|
||||
"""
|
||||
|
@ -50,7 +50,7 @@ def normalizeJson(payload: {}) -> str:
|
|||
sort_keys=True).encode('utf-8')
|
||||
|
||||
|
||||
def signRs256(payload: {}, privateKeyPem: str) -> str:
|
||||
def _signRs256(payload: {}, privateKeyPem: str) -> str:
|
||||
"""
|
||||
Produce a RS256 signature of the payload
|
||||
"""
|
||||
|
@ -60,7 +60,7 @@ def signRs256(payload: {}, privateKeyPem: str) -> str:
|
|||
return signature
|
||||
|
||||
|
||||
def verifyRs256(payload: {}, signature: str, publicKeyPem: str) -> bool:
|
||||
def _verifyRs256(payload: {}, signature: str, publicKeyPem: str) -> bool:
|
||||
"""
|
||||
Verifies a RS256 signature
|
||||
"""
|
||||
|
@ -69,7 +69,7 @@ def verifyRs256(payload: {}, signature: str, publicKeyPem: str) -> bool:
|
|||
return verifier.verify(SHA256.new(payload), signature)
|
||||
|
||||
|
||||
def signJws(payload: {}, privateKeyPem: str) -> str:
|
||||
def _signJws(payload: {}, privateKeyPem: str) -> str:
|
||||
"""
|
||||
Prepare payload to sign
|
||||
"""
|
||||
|
@ -78,28 +78,32 @@ def signJws(payload: {}, privateKeyPem: str) -> str:
|
|||
'b64': False,
|
||||
'crit': ['b64']
|
||||
}
|
||||
normalizedJson = normalizeJson(header)
|
||||
encodedHeader = b64safeEncode(normalizedJson)
|
||||
normalizedJson = _normalizeJson(header)
|
||||
encodedHeader = _b64safeEncode(normalizedJson)
|
||||
preparedPayload = b'.'.join([encodedHeader, payload])
|
||||
|
||||
signature = signRs256(preparedPayload, privateKeyPem)
|
||||
encodedSignature = b64safeEncode(signature)
|
||||
signature = _signRs256(preparedPayload, privateKeyPem)
|
||||
encodedSignature = _b64safeEncode(signature)
|
||||
jwsSignature = b'..'.join([encodedHeader, encodedSignature])
|
||||
|
||||
return jwsSignature
|
||||
|
||||
|
||||
def verifyJws(payload: {}, jwsSignature: str, publicKeyPem: str) -> bool:
|
||||
def _verifyJws(payload: {}, jwsSignature: str, publicKeyPem: str) -> bool:
|
||||
"""
|
||||
Verifies a signature using the given public key
|
||||
"""
|
||||
encodedHeader, encodedSignature = jwsSignature.split(b'..')
|
||||
signature = b64safeDecode(encodedSignature)
|
||||
payload = b'.'.join([encodedHeader, payload])
|
||||
return verifyRs256(payload, signature, publicKeyPem)
|
||||
if b'..' in jwsSignature:
|
||||
encodedHeader, encodedSignature = jwsSignature.split(b'..')
|
||||
signature = _b64safeDecode(encodedSignature)
|
||||
payload = b'.'.join([encodedHeader, payload])
|
||||
else:
|
||||
signature = _b64safeDecode(jwsSignature)
|
||||
payload = b'.'.join([payload])
|
||||
return _verifyRs256(payload, signature, publicKeyPem)
|
||||
|
||||
|
||||
def jsonldNormalize(jldDocument: str):
|
||||
def _jsonldNormalize(jldDocument: str):
|
||||
"""
|
||||
Normalize and hash the json-ld document
|
||||
"""
|
||||
|
@ -116,9 +120,12 @@ def jsonldSign(jldDocument: {}, privateKeyPem: str) -> {}:
|
|||
"""
|
||||
Produces a signed JSON-LD document with a Json Web Signature
|
||||
"""
|
||||
if not jldDocument.get('@context'):
|
||||
print('WARN: json document must have @context to sign')
|
||||
return jldDocument
|
||||
jldDocument = deepcopy(jldDocument)
|
||||
normalizedJldHash = jsonldNormalize(jldDocument)
|
||||
jwsSignature = signJws(normalizedJldHash, privateKeyPem)
|
||||
normalizedJldHash = _jsonldNormalize(jldDocument)
|
||||
jwsSignature = _signJws(normalizedJldHash, privateKeyPem)
|
||||
|
||||
# construct the signature document and add it to jsonld
|
||||
signature = {
|
||||
|
@ -135,12 +142,17 @@ def jsonldVerify(signedJldDocument: {}, publicKeyPem: str) -> bool:
|
|||
"""
|
||||
Verifies the Json Web Signature of a signed JSON-LD Document
|
||||
"""
|
||||
if not isinstance(signedJldDocument, dict):
|
||||
return False
|
||||
if not signedJldDocument.get('@context'):
|
||||
print('json document must have @context')
|
||||
return False
|
||||
signedJldDocument = deepcopy(signedJldDocument)
|
||||
signature = signedJldDocument.pop('signature')
|
||||
jwsSignature = signature['signatureValue'].encode('utf-8')
|
||||
normalizedJldHash = jsonldNormalize(signedJldDocument)
|
||||
normalizedJldHash = _jsonldNormalize(signedJldDocument)
|
||||
|
||||
return verifyJws(normalizedJldHash, jwsSignature, publicKeyPem)
|
||||
return _verifyJws(normalizedJldHash, jwsSignature, publicKeyPem)
|
||||
|
||||
|
||||
def testSignJsonld(jldDocument: {}, privateKeyPem: str) -> {}:
|
||||
|
|
138
like.py
|
@ -6,6 +6,8 @@ __maintainer__ = "Bob Mottram"
|
|||
__email__ = "bob@freedombone.net"
|
||||
__status__ = "Production"
|
||||
|
||||
from utils import hasUsersPath
|
||||
from utils import getFullDomain
|
||||
from utils import removeIdEnding
|
||||
from utils import urlPermitted
|
||||
from utils import getNicknameFromActor
|
||||
|
@ -49,15 +51,15 @@ def noOfLikes(postJsonObject: {}) -> int:
|
|||
return len(postJsonObject['object']['likes']['items'])
|
||||
|
||||
|
||||
def like(recentPostsCache: {},
|
||||
session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
ccList: [], httpPrefix: str,
|
||||
objectUrl: str, actorLiked: str,
|
||||
clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
def _like(recentPostsCache: {},
|
||||
session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
ccList: [], httpPrefix: str,
|
||||
objectUrl: str, actorLiked: str,
|
||||
clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Creates a like
|
||||
actor is the person doing the liking
|
||||
'to' might be a specific person (actor) whose post was liked
|
||||
|
@ -66,11 +68,7 @@ def like(recentPostsCache: {},
|
|||
if not urlPermitted(objectUrl, federationList):
|
||||
return None
|
||||
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
fullDomain = getFullDomain(domain, port)
|
||||
|
||||
newLikeJson = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
|
@ -90,10 +88,7 @@ def like(recentPostsCache: {},
|
|||
likedPostNickname = getNicknameFromActor(actorLiked)
|
||||
likedPostDomain, likedPostPort = getDomainFromActor(actorLiked)
|
||||
else:
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
if hasUsersPath(objectUrl):
|
||||
likedPostNickname = getNicknameFromActor(objectUrl)
|
||||
likedPostDomain, likedPostPort = getDomainFromActor(objectUrl)
|
||||
|
||||
|
@ -132,94 +127,16 @@ def likePost(recentPostsCache: {},
|
|||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Likes a given status post. This is only used by unit tests
|
||||
"""
|
||||
likeDomain = likeDomain
|
||||
if likePort:
|
||||
if likePort != 80 and likePort != 443:
|
||||
if ':' not in likeDomain:
|
||||
likeDomain = likeDomain + ':' + str(likePort)
|
||||
likeDomain = getFullDomain(likeDomain, likePort)
|
||||
|
||||
actorLiked = httpPrefix + '://' + likeDomain + '/users/' + likeNickname
|
||||
objectUrl = actorLiked + '/statuses/' + str(likeStatusNumber)
|
||||
|
||||
return like(recentPostsCache,
|
||||
session, baseDir, federationList, nickname, domain, port,
|
||||
ccList, httpPrefix, objectUrl, actorLiked, clientToServer,
|
||||
sendThreads, postLog, personCache, cachedWebfingers,
|
||||
debug, projectVersion)
|
||||
|
||||
|
||||
def undolike(recentPostsCache: {},
|
||||
session, baseDir: str, federationList: [],
|
||||
nickname: str, domain: str, port: int,
|
||||
ccList: [], httpPrefix: str,
|
||||
objectUrl: str, actorLiked: str,
|
||||
clientToServer: bool,
|
||||
sendThreads: [], postLog: [],
|
||||
personCache: {}, cachedWebfingers: {},
|
||||
debug: bool, projectVersion: str) -> {}:
|
||||
"""Removes a like
|
||||
actor is the person doing the liking
|
||||
'to' might be a specific person (actor) whose post was liked
|
||||
object is typically the url of the message which was liked
|
||||
"""
|
||||
if not urlPermitted(objectUrl, federationList):
|
||||
return None
|
||||
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
|
||||
newUndoLikeJson = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
'type': 'Undo',
|
||||
'actor': httpPrefix + '://' + fullDomain + '/users/' + nickname,
|
||||
'object': {
|
||||
'type': 'Like',
|
||||
'actor': httpPrefix + '://' + fullDomain + '/users/' + nickname,
|
||||
'object': objectUrl
|
||||
}
|
||||
}
|
||||
if ccList:
|
||||
if len(ccList) > 0:
|
||||
newUndoLikeJson['cc'] = ccList
|
||||
newUndoLikeJson['object']['cc'] = ccList
|
||||
|
||||
# Extract the domain and nickname from a statuses link
|
||||
likedPostNickname = None
|
||||
likedPostDomain = None
|
||||
likedPostPort = None
|
||||
if actorLiked:
|
||||
likedPostNickname = getNicknameFromActor(actorLiked)
|
||||
likedPostDomain, likedPostPort = getDomainFromActor(actorLiked)
|
||||
else:
|
||||
if '/users/' in objectUrl or \
|
||||
'/accounts/' in objectUrl or \
|
||||
'/channel/' in objectUrl or \
|
||||
'/profile/' in objectUrl:
|
||||
likedPostNickname = getNicknameFromActor(objectUrl)
|
||||
likedPostDomain, likedPostPort = getDomainFromActor(objectUrl)
|
||||
|
||||
if likedPostNickname:
|
||||
postFilename = locatePost(baseDir, nickname, domain, objectUrl)
|
||||
if not postFilename:
|
||||
return None
|
||||
|
||||
undoLikesCollectionEntry(baseDir, postFilename, objectUrl,
|
||||
newUndoLikeJson['actor'], domain, debug)
|
||||
|
||||
sendSignedJson(newUndoLikeJson, session, baseDir,
|
||||
nickname, domain, port,
|
||||
likedPostNickname, likedPostDomain, likedPostPort,
|
||||
'https://www.w3.org/ns/activitystreams#Public',
|
||||
httpPrefix, True, clientToServer, federationList,
|
||||
sendThreads, postLog, cachedWebfingers, personCache,
|
||||
debug, projectVersion)
|
||||
else:
|
||||
return None
|
||||
|
||||
return newUndoLikeJson
|
||||
return _like(recentPostsCache,
|
||||
session, baseDir, federationList, nickname, domain, port,
|
||||
ccList, httpPrefix, objectUrl, actorLiked, clientToServer,
|
||||
sendThreads, postLog, personCache, cachedWebfingers,
|
||||
debug, projectVersion)
|
||||
|
||||
|
||||
def sendLikeViaServer(baseDir: str, session,
|
||||
|
@ -234,11 +151,7 @@ def sendLikeViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendLikeViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
actor = httpPrefix + '://' + fromDomainFull + '/users/' + fromNickname
|
||||
|
||||
|
@ -272,7 +185,7 @@ def sendLikeViaServer(baseDir: str, session,
|
|||
personCache,
|
||||
projectVersion, httpPrefix,
|
||||
fromNickname, fromDomain,
|
||||
postToBox)
|
||||
postToBox, 72873)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
@ -313,11 +226,7 @@ def sendUndoLikeViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendUndoLikeViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
actor = httpPrefix + '://' + fromDomainFull + '/users/' + fromNickname
|
||||
|
||||
|
@ -354,7 +263,8 @@ def sendUndoLikeViaServer(baseDir: str, session,
|
|||
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
|
||||
personCache, projectVersion,
|
||||
httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
fromDomain, postToBox,
|
||||
72625)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
|
|
@ -62,7 +62,7 @@ def manualDenyFollowRequest(session, baseDir: str,
|
|||
print('Follow request from ' + denyHandle + ' was denied.')
|
||||
|
||||
|
||||
def approveFollowerHandle(accountDir: str, approveHandle: str) -> None:
|
||||
def _approveFollowerHandle(accountDir: str, approveHandle: str) -> None:
|
||||
""" Record manually approved handles so that if they unfollow and then
|
||||
re-follow later then they don't need to be manually approved again
|
||||
"""
|
||||
|
@ -184,8 +184,10 @@ def manualApproveFollowRequest(session, baseDir: str,
|
|||
try:
|
||||
with open(followersFilename, 'r+') as followersFile:
|
||||
content = followersFile.read()
|
||||
followersFile.seek(0, 0)
|
||||
followersFile.write(approveHandleFull + '\n' + content)
|
||||
if approveHandleFull + '\n' not in content:
|
||||
followersFile.seek(0, 0)
|
||||
followersFile.write(approveHandleFull + '\n' +
|
||||
content)
|
||||
except Exception as e:
|
||||
print('WARN: Manual follow accept. ' +
|
||||
'Failed to write entry to followers file ' + str(e))
|
||||
|
@ -203,7 +205,7 @@ def manualApproveFollowRequest(session, baseDir: str,
|
|||
# in followers.txt
|
||||
if approveHandleFull in open(followersFilename).read():
|
||||
# mark this handle as approved for following
|
||||
approveFollowerHandle(accountDir, approveHandle)
|
||||
_approveFollowerHandle(accountDir, approveHandle)
|
||||
# update the follow requests with the handles not yet approved
|
||||
os.rename(approveFollowsFilename + '.new', approveFollowsFilename)
|
||||
# remove the .follow file
|
||||
|
|
24
media.py
|
@ -6,13 +6,14 @@ __maintainer__ = "Bob Mottram"
|
|||
__email__ = "bob@freedombone.net"
|
||||
__status__ = "Production"
|
||||
|
||||
from blurhash import blurhash_encode as blurencode
|
||||
from blurhash import blurhash_encode
|
||||
from PIL import Image
|
||||
import numpy
|
||||
import os
|
||||
import datetime
|
||||
from hashlib import sha1
|
||||
from auth import createPassword
|
||||
from utils import getFullDomain
|
||||
from utils import getImageExtensions
|
||||
from utils import getVideoExtensions
|
||||
from utils import getAudioExtensions
|
||||
|
@ -55,11 +56,12 @@ def removeMetaData(imageFilename: str, outputFilename: str) -> None:
|
|||
os.system('/usr/bin/mogrify -strip ' + outputFilename) # nosec
|
||||
|
||||
|
||||
def getImageHash(imageFilename: str) -> str:
|
||||
return blurencode(numpy.array(Image.open(imageFilename).convert("RGB")))
|
||||
def _getImageHash(imageFilename: str) -> str:
|
||||
value = numpy.array(Image.open(imageFilename).convert("RGB"))
|
||||
return blurhash_encode(value)
|
||||
|
||||
|
||||
def isMedia(imageFilename: str) -> bool:
|
||||
def _isMedia(imageFilename: str) -> bool:
|
||||
permittedMedia = getMediaExtensions()
|
||||
for m in permittedMedia:
|
||||
if imageFilename.endswith('.' + m):
|
||||
|
@ -101,7 +103,7 @@ def getAttachmentMediaType(filename: str) -> str:
|
|||
return mediaType
|
||||
|
||||
|
||||
def updateEtag(mediaFilename: str) -> None:
|
||||
def _updateEtag(mediaFilename: str) -> None:
|
||||
""" calculate the etag, which is a sha1 of the data
|
||||
"""
|
||||
# only create etags for media
|
||||
|
@ -141,7 +143,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
|
|||
Blurhash is optional, since low power systems may take a long
|
||||
time to calculate it
|
||||
"""
|
||||
if not isMedia(imageFilename):
|
||||
if not _isMedia(imageFilename):
|
||||
return postJson
|
||||
|
||||
fileExtension = None
|
||||
|
@ -163,10 +165,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
|
|||
if mediaType == 'audio/mpeg':
|
||||
fileExtension = 'mp3'
|
||||
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domain = domain + ':' + str(port)
|
||||
domain = getFullDomain(domain, port)
|
||||
|
||||
mPath = getMediaPath()
|
||||
mediaPath = mPath + '/' + createPassword(32) + '.' + fileExtension
|
||||
|
@ -183,7 +182,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
|
|||
if mediaType.startswith('image/'):
|
||||
attachmentJson['focialPoint'] = [0.0, 0.0]
|
||||
if useBlurhash:
|
||||
attachmentJson['blurhash'] = getImageHash(imageFilename)
|
||||
attachmentJson['blurhash'] = _getImageHash(imageFilename)
|
||||
postJson['attachment'] = [attachmentJson]
|
||||
|
||||
if baseDir:
|
||||
|
@ -191,7 +190,7 @@ def attachMedia(baseDir: str, httpPrefix: str, domain: str, port: int,
|
|||
removeMetaData(imageFilename, mediaFilename)
|
||||
else:
|
||||
copyfile(imageFilename, mediaFilename)
|
||||
updateEtag(mediaFilename)
|
||||
_updateEtag(mediaFilename)
|
||||
|
||||
return postJson
|
||||
|
||||
|
@ -221,3 +220,4 @@ def archiveMedia(baseDir: str, archiveDirectory: str, maxWeeks=4) -> None:
|
|||
else:
|
||||
# archive to /dev/null
|
||||
rmtree(os.path.join(baseDir + '/media', weekDir))
|
||||
break
|
||||
|
|
|
@ -9,8 +9,8 @@ __status__ = "Production"
|
|||
import os
|
||||
|
||||
|
||||
def migrateFollows(followFilename: str, oldHandle: str,
|
||||
newHandle: str) -> None:
|
||||
def _migrateFollows(followFilename: str, oldHandle: str,
|
||||
newHandle: str) -> None:
|
||||
"""Changes a handle within following or followers list
|
||||
"""
|
||||
if not os.path.isfile(followFilename):
|
||||
|
@ -48,6 +48,7 @@ def migrateAccount(baseDir: str, oldHandle: str, newHandle: str) -> None:
|
|||
if '@' in handle:
|
||||
accountDir = baseDir + '/accounts/' + handle
|
||||
followFilename = accountDir + '/following.txt'
|
||||
migrateFollows(followFilename, oldHandle, newHandle)
|
||||
_migrateFollows(followFilename, oldHandle, newHandle)
|
||||
followFilename = accountDir + '/followers.txt'
|
||||
migrateFollows(followFilename, oldHandle, newHandle)
|
||||
_migrateFollows(followFilename, oldHandle, newHandle)
|
||||
break
|
||||
|
|
128
newsdaemon.py
|
@ -26,6 +26,7 @@ from posts import archivePostsForPerson
|
|||
from content import removeHtmlTag
|
||||
from content import dangerousMarkup
|
||||
from content import validHashTag
|
||||
from utils import getFullDomain
|
||||
from utils import loadJson
|
||||
from utils import saveJson
|
||||
from utils import getStatusNumber
|
||||
|
@ -34,7 +35,7 @@ from inbox import storeHashTags
|
|||
from session import createSession
|
||||
|
||||
|
||||
def updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
|
||||
def _updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
|
||||
"""Updates the index used for imported RSS feeds
|
||||
"""
|
||||
basePath = baseDir + '/accounts/news@' + domain
|
||||
|
@ -45,9 +46,10 @@ def updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
|
|||
try:
|
||||
with open(indexFilename, 'r+') as feedsFile:
|
||||
content = feedsFile.read()
|
||||
feedsFile.seek(0, 0)
|
||||
feedsFile.write(postId + '\n' + content)
|
||||
print('DEBUG: feeds post added to index')
|
||||
if postId + '\n' not in content:
|
||||
feedsFile.seek(0, 0)
|
||||
feedsFile.write(postId + '\n' + content)
|
||||
print('DEBUG: feeds post added to index')
|
||||
except Exception as e:
|
||||
print('WARN: Failed to write entry to feeds posts index ' +
|
||||
indexFilename + ' ' + str(e))
|
||||
|
@ -58,7 +60,7 @@ def updateFeedsOutboxIndex(baseDir: str, domain: str, postId: str) -> None:
|
|||
feedsFile.close()
|
||||
|
||||
|
||||
def saveArrivedTime(baseDir: str, postFilename: str, arrived: str) -> None:
|
||||
def _saveArrivedTime(baseDir: str, postFilename: str, arrived: str) -> None:
|
||||
"""Saves the time when an rss post arrived to a file
|
||||
"""
|
||||
arrivedFile = open(postFilename + '.arrived', 'w+')
|
||||
|
@ -67,7 +69,7 @@ def saveArrivedTime(baseDir: str, postFilename: str, arrived: str) -> None:
|
|||
arrivedFile.close()
|
||||
|
||||
|
||||
def removeControlCharacters(content: str) -> str:
|
||||
def _removeControlCharacters(content: str) -> str:
|
||||
"""Remove escaped html
|
||||
"""
|
||||
if '&' in content:
|
||||
|
@ -226,14 +228,14 @@ def hashtagRuleTree(operators: [],
|
|||
return tree
|
||||
|
||||
|
||||
def newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
|
||||
hashtags: [], httpPrefix: str,
|
||||
domain: str, port: int,
|
||||
personCache: {},
|
||||
cachedWebfingers: {},
|
||||
federationList: [],
|
||||
sendThreads: [], postLog: [],
|
||||
moderated: bool, url: str) -> bool:
|
||||
def _newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
|
||||
hashtags: [], httpPrefix: str,
|
||||
domain: str, port: int,
|
||||
personCache: {},
|
||||
cachedWebfingers: {},
|
||||
federationList: [],
|
||||
sendThreads: [], postLog: [],
|
||||
moderated: bool, url: str) -> bool:
|
||||
"""Applies hashtag rules to a news post.
|
||||
Returns true if the post should be saved to the news timeline
|
||||
of this instance
|
||||
|
@ -245,10 +247,7 @@ def newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
|
|||
with open(rulesFilename, "r") as f:
|
||||
rules = f.readlines()
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
# get the full text content of the post
|
||||
content = ''
|
||||
|
@ -358,9 +357,9 @@ def newswireHashtagProcessing(session, baseDir: str, postJsonObject: {},
|
|||
return True
|
||||
|
||||
|
||||
def createNewsMirror(baseDir: str, domain: str,
|
||||
postIdNumber: str, url: str,
|
||||
maxMirroredArticles: int) -> bool:
|
||||
def _createNewsMirror(baseDir: str, domain: str,
|
||||
postIdNumber: str, url: str,
|
||||
maxMirroredArticles: int) -> bool:
|
||||
"""Creates a local mirror of a news article
|
||||
"""
|
||||
if '|' in url or '>' in url:
|
||||
|
@ -448,17 +447,17 @@ def createNewsMirror(baseDir: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
||||
domain: str, port: int,
|
||||
newswire: {},
|
||||
translate: {},
|
||||
recentPostsCache: {}, maxRecentPosts: int,
|
||||
session, cachedWebfingers: {},
|
||||
personCache: {},
|
||||
federationList: [],
|
||||
sendThreads: [], postLog: [],
|
||||
maxMirroredArticles: int,
|
||||
allowLocalNetworkAccess: bool) -> None:
|
||||
def _convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
||||
domain: str, port: int,
|
||||
newswire: {},
|
||||
translate: {},
|
||||
recentPostsCache: {}, maxRecentPosts: int,
|
||||
session, cachedWebfingers: {},
|
||||
personCache: {},
|
||||
federationList: [],
|
||||
sendThreads: [], postLog: [],
|
||||
maxMirroredArticles: int,
|
||||
allowLocalNetworkAccess: bool) -> None:
|
||||
"""Converts rss items in a newswire into posts
|
||||
"""
|
||||
if not newswire:
|
||||
|
@ -499,7 +498,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
newswire[originalDateStr][3] = filename
|
||||
continue
|
||||
|
||||
rssTitle = removeControlCharacters(item[0])
|
||||
rssTitle = _removeControlCharacters(item[0])
|
||||
url = item[1]
|
||||
if dangerousMarkup(url, allowLocalNetworkAccess) or \
|
||||
dangerousMarkup(rssTitle, allowLocalNetworkAccess):
|
||||
|
@ -507,7 +506,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
rssDescription = ''
|
||||
|
||||
# get the rss description if it exists
|
||||
rssDescription = removeControlCharacters(item[4])
|
||||
rssDescription = _removeControlCharacters(item[4])
|
||||
if rssDescription.startswith('<![CDATA['):
|
||||
rssDescription = rssDescription.replace('<![CDATA[', '')
|
||||
rssDescription = rssDescription.replace(']]>', '')
|
||||
|
@ -557,8 +556,8 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
continue
|
||||
|
||||
if mirrored:
|
||||
if not createNewsMirror(baseDir, domain, statusNumber,
|
||||
url, maxMirroredArticles):
|
||||
if not _createNewsMirror(baseDir, domain, statusNumber,
|
||||
url, maxMirroredArticles):
|
||||
continue
|
||||
|
||||
idStr = \
|
||||
|
@ -584,10 +583,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
blog['object']['content'] = rssDescription
|
||||
blog['object']['contentMap']['en'] = rssDescription
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
hashtags = item[6]
|
||||
|
||||
|
@ -595,12 +591,12 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
|
||||
moderated = item[5]
|
||||
|
||||
savePost = newswireHashtagProcessing(session, baseDir, blog, hashtags,
|
||||
httpPrefix, domain, port,
|
||||
personCache, cachedWebfingers,
|
||||
federationList,
|
||||
sendThreads, postLog,
|
||||
moderated, url)
|
||||
savePost = _newswireHashtagProcessing(session, baseDir, blog, hashtags,
|
||||
httpPrefix, domain, port,
|
||||
personCache, cachedWebfingers,
|
||||
federationList,
|
||||
sendThreads, postLog,
|
||||
moderated, url)
|
||||
|
||||
# save the post and update the index
|
||||
if savePost:
|
||||
|
@ -633,7 +629,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
blog['object']['content'] = content
|
||||
|
||||
# update the newswire tags if new ones have been found by
|
||||
# newswireHashtagProcessing
|
||||
# _newswireHashtagProcessing
|
||||
for tag in hashtags:
|
||||
if tag not in newswire[originalDateStr][6]:
|
||||
newswire[originalDateStr][6].append(tag)
|
||||
|
@ -642,14 +638,14 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
|
||||
clearFromPostCaches(baseDir, recentPostsCache, postId)
|
||||
if saveJson(blog, filename):
|
||||
updateFeedsOutboxIndex(baseDir, domain, postId + '.json')
|
||||
_updateFeedsOutboxIndex(baseDir, domain, postId + '.json')
|
||||
|
||||
# Save a file containing the time when the post arrived
|
||||
# this can then later be used to construct the news timeline
|
||||
# excluding items during the voting period
|
||||
if moderated:
|
||||
saveArrivedTime(baseDir, filename,
|
||||
blog['object']['arrived'])
|
||||
_saveArrivedTime(baseDir, filename,
|
||||
blog['object']['arrived'])
|
||||
else:
|
||||
if os.path.isfile(filename + '.arrived'):
|
||||
os.remove(filename + '.arrived')
|
||||
|
@ -663,7 +659,7 @@ def convertRSStoActivityPub(baseDir: str, httpPrefix: str,
|
|||
newswire[originalDateStr][3] = filename
|
||||
|
||||
|
||||
def mergeWithPreviousNewswire(oldNewswire: {}, newNewswire: {}) -> None:
|
||||
def _mergeWithPreviousNewswire(oldNewswire: {}, newNewswire: {}) -> None:
|
||||
"""Preserve any votes or generated activitypub post filename
|
||||
as rss feeds are updated
|
||||
"""
|
||||
|
@ -712,26 +708,26 @@ def runNewswireDaemon(baseDir: str, httpd,
|
|||
if os.path.isfile(newswireStateFilename):
|
||||
httpd.newswire = loadJson(newswireStateFilename)
|
||||
|
||||
mergeWithPreviousNewswire(httpd.newswire, newNewswire)
|
||||
_mergeWithPreviousNewswire(httpd.newswire, newNewswire)
|
||||
|
||||
httpd.newswire = newNewswire
|
||||
if newNewswire:
|
||||
saveJson(httpd.newswire, newswireStateFilename)
|
||||
print('Newswire updated')
|
||||
|
||||
convertRSStoActivityPub(baseDir,
|
||||
httpPrefix, domain, port,
|
||||
newNewswire, translate,
|
||||
httpd.recentPostsCache,
|
||||
httpd.maxRecentPosts,
|
||||
httpd.session,
|
||||
httpd.cachedWebfingers,
|
||||
httpd.personCache,
|
||||
httpd.federationList,
|
||||
httpd.sendThreads,
|
||||
httpd.postLog,
|
||||
httpd.maxMirroredArticles,
|
||||
httpd.allowLocalNetworkAccess)
|
||||
_convertRSStoActivityPub(baseDir,
|
||||
httpPrefix, domain, port,
|
||||
newNewswire, translate,
|
||||
httpd.recentPostsCache,
|
||||
httpd.maxRecentPosts,
|
||||
httpd.session,
|
||||
httpd.cachedWebfingers,
|
||||
httpd.personCache,
|
||||
httpd.federationList,
|
||||
httpd.sendThreads,
|
||||
httpd.postLog,
|
||||
httpd.maxMirroredArticles,
|
||||
httpd.allowLocalNetworkAccess)
|
||||
print('Newswire feed converted to ActivityPub')
|
||||
|
||||
if httpd.maxNewsPosts > 0:
|
||||
|
@ -757,7 +753,7 @@ def runNewswireWatchdog(projectVersion: str, httpd) -> None:
|
|||
httpd.thrNewswireDaemon.start()
|
||||
while True:
|
||||
time.sleep(50)
|
||||
if not httpd.thrNewswireDaemon.isAlive():
|
||||
if not httpd.thrNewswireDaemon.is_alive():
|
||||
httpd.thrNewswireDaemon.kill()
|
||||
httpd.thrNewswireDaemon = \
|
||||
newswireOriginal.clone(runNewswireDaemon)
|
||||
|
|
336
newswire.py
|
@ -14,7 +14,8 @@ from datetime import datetime
|
|||
from datetime import timedelta
|
||||
from datetime import timezone
|
||||
from collections import OrderedDict
|
||||
from utils import setHashtagCategory
|
||||
from utils import validPostDate
|
||||
from categories import setHashtagCategory
|
||||
from utils import firstParagraphFromString
|
||||
from utils import isPublicPost
|
||||
from utils import locatePost
|
||||
|
@ -28,7 +29,7 @@ from blocking import isBlockedHashtag
|
|||
from filters import isFiltered
|
||||
|
||||
|
||||
def removeCDATA(text: str) -> str:
|
||||
def _removeCDATA(text: str) -> str:
|
||||
"""Removes any CDATA from the given text
|
||||
"""
|
||||
if 'CDATA[' in text:
|
||||
|
@ -94,13 +95,13 @@ def getNewswireTags(text: str, maxTags: int) -> []:
|
|||
return tags
|
||||
|
||||
|
||||
def addNewswireDictEntry(baseDir: str, domain: str,
|
||||
newswire: {}, dateStr: str,
|
||||
title: str, link: str,
|
||||
votesStatus: str, postFilename: str,
|
||||
description: str, moderated: bool,
|
||||
mirrored: bool,
|
||||
tags=[], maxTags=32) -> None:
|
||||
def _addNewswireDictEntry(baseDir: str, domain: str,
|
||||
newswire: {}, dateStr: str,
|
||||
title: str, link: str,
|
||||
votesStatus: str, postFilename: str,
|
||||
description: str, moderated: bool,
|
||||
mirrored: bool,
|
||||
tags=[], maxTags=32) -> None:
|
||||
"""Update the newswire dictionary
|
||||
"""
|
||||
# remove any markup
|
||||
|
@ -110,7 +111,7 @@ def addNewswireDictEntry(baseDir: str, domain: str,
|
|||
allText = title + ' ' + description
|
||||
|
||||
# check that none of the text is filtered against
|
||||
if isFiltered(baseDir, 'news', domain, allText):
|
||||
if isFiltered(baseDir, None, None, allText):
|
||||
return
|
||||
|
||||
if tags is None:
|
||||
|
@ -142,6 +143,13 @@ def addNewswireDictEntry(baseDir: str, domain: str,
|
|||
]
|
||||
|
||||
|
||||
def _validFeedDate(pubDate: str) -> bool:
|
||||
# convert from YY-MM-DD HH:MM:SS+00:00 to
|
||||
# YY-MM-DDTHH:MM:SSZ
|
||||
postDate = pubDate.replace(' ', 'T').replace('+00:00', 'Z')
|
||||
return validPostDate(postDate, 30)
|
||||
|
||||
|
||||
def parseFeedDate(pubDate: str) -> str:
|
||||
"""Returns a UTC date string based on the given date string
|
||||
This tries a number of formats to see which work
|
||||
|
@ -211,12 +219,12 @@ def loadHashtagCategories(baseDir: str, language: str) -> None:
|
|||
|
||||
with open(hashtagCategoriesFilename, 'r') as fp:
|
||||
xmlStr = fp.read()
|
||||
xml2StrToHashtagCategories(baseDir, xmlStr, 1024, True)
|
||||
_xml2StrToHashtagCategories(baseDir, xmlStr, 1024, True)
|
||||
|
||||
|
||||
def xml2StrToHashtagCategories(baseDir: str, xmlStr: str,
|
||||
maxCategoriesFeedItemSizeKb: int,
|
||||
force=False) -> None:
|
||||
def _xml2StrToHashtagCategories(baseDir: str, xmlStr: str,
|
||||
maxCategoriesFeedItemSizeKb: int,
|
||||
force=False) -> None:
|
||||
"""Updates hashtag categories based upon an rss feed
|
||||
"""
|
||||
rssItems = xmlStr.split('<item>')
|
||||
|
@ -253,12 +261,12 @@ def xml2StrToHashtagCategories(baseDir: str, xmlStr: str,
|
|||
setHashtagCategory(baseDir, hashtag, categoryStr, force)
|
||||
|
||||
|
||||
def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int,
|
||||
maxCategoriesFeedItemSizeKb: int) -> {}:
|
||||
"""Converts an xml 2.0 string to a dictionary
|
||||
def _xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int,
|
||||
maxCategoriesFeedItemSizeKb: int) -> {}:
|
||||
"""Converts an xml RSS 2.0 string to a dictionary
|
||||
"""
|
||||
if '<item>' not in xmlStr:
|
||||
return {}
|
||||
|
@ -266,8 +274,8 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
|
||||
# is this an rss feed containing hashtag categories?
|
||||
if '<title>#categories</title>' in xmlStr:
|
||||
xml2StrToHashtagCategories(baseDir, xmlStr,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
_xml2StrToHashtagCategories(baseDir, xmlStr,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
return {}
|
||||
|
||||
rssItems = xmlStr.split('<item>')
|
||||
|
@ -292,17 +300,17 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
if '</pubDate>' not in rssItem:
|
||||
continue
|
||||
title = rssItem.split('<title>')[1]
|
||||
title = removeCDATA(title.split('</title>')[0])
|
||||
title = _removeCDATA(title.split('</title>')[0])
|
||||
description = ''
|
||||
if '<description>' in rssItem and '</description>' in rssItem:
|
||||
description = rssItem.split('<description>')[1]
|
||||
description = removeCDATA(description.split('</description>')[0])
|
||||
description = _removeCDATA(description.split('</description>')[0])
|
||||
else:
|
||||
if '<media:description>' in rssItem and \
|
||||
'</media:description>' in rssItem:
|
||||
description = rssItem.split('<media:description>')[1]
|
||||
description = description.split('</media:description>')[0]
|
||||
description = removeCDATA(description)
|
||||
description = _removeCDATA(description)
|
||||
link = rssItem.split('<link>')[1]
|
||||
link = link.split('</link>')[0]
|
||||
if '://' not in link:
|
||||
|
@ -317,25 +325,110 @@ def xml2StrToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
|
||||
pubDateStr = parseFeedDate(pubDate)
|
||||
if pubDateStr:
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if _validFeedDate(pubDateStr):
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
_addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if postCtr > 0:
|
||||
print('Added ' + str(postCtr) + ' rss feed items to newswire')
|
||||
print('Added ' + str(postCtr) + ' rss 2.0 feed items to newswire')
|
||||
return result
|
||||
|
||||
|
||||
def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
def _xml1StrToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int) -> {}:
|
||||
maxFeedItemSizeKb: int,
|
||||
maxCategoriesFeedItemSizeKb: int) -> {}:
|
||||
"""Converts an xml RSS 1.0 string to a dictionary
|
||||
https://validator.w3.org/feed/docs/rss1.html
|
||||
"""
|
||||
itemStr = '<item'
|
||||
if itemStr not in xmlStr:
|
||||
return {}
|
||||
result = {}
|
||||
|
||||
# is this an rss feed containing hashtag categories?
|
||||
if '<title>#categories</title>' in xmlStr:
|
||||
_xml2StrToHashtagCategories(baseDir, xmlStr,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
return {}
|
||||
|
||||
rssItems = xmlStr.split(itemStr)
|
||||
postCtr = 0
|
||||
maxBytes = maxFeedItemSizeKb * 1024
|
||||
for rssItem in rssItems:
|
||||
if not rssItem:
|
||||
continue
|
||||
if len(rssItem) > maxBytes:
|
||||
print('WARN: rss 1.0 feed item is too big')
|
||||
continue
|
||||
if rssItem.startswith('s>'):
|
||||
continue
|
||||
if '<title>' not in rssItem:
|
||||
continue
|
||||
if '</title>' not in rssItem:
|
||||
continue
|
||||
if '<link>' not in rssItem:
|
||||
continue
|
||||
if '</link>' not in rssItem:
|
||||
continue
|
||||
if '<dc:date>' not in rssItem:
|
||||
continue
|
||||
if '</dc:date>' not in rssItem:
|
||||
continue
|
||||
title = rssItem.split('<title>')[1]
|
||||
title = _removeCDATA(title.split('</title>')[0])
|
||||
description = ''
|
||||
if '<description>' in rssItem and '</description>' in rssItem:
|
||||
description = rssItem.split('<description>')[1]
|
||||
description = _removeCDATA(description.split('</description>')[0])
|
||||
else:
|
||||
if '<media:description>' in rssItem and \
|
||||
'</media:description>' in rssItem:
|
||||
description = rssItem.split('<media:description>')[1]
|
||||
description = description.split('</media:description>')[0]
|
||||
description = _removeCDATA(description)
|
||||
link = rssItem.split('<link>')[1]
|
||||
link = link.split('</link>')[0]
|
||||
if '://' not in link:
|
||||
continue
|
||||
itemDomain = link.split('://')[1]
|
||||
if '/' in itemDomain:
|
||||
itemDomain = itemDomain.split('/')[0]
|
||||
if isBlockedDomain(baseDir, itemDomain):
|
||||
continue
|
||||
pubDate = rssItem.split('<dc:date>')[1]
|
||||
pubDate = pubDate.split('</dc:date>')[0]
|
||||
|
||||
pubDateStr = parseFeedDate(pubDate)
|
||||
if pubDateStr:
|
||||
if _validFeedDate(pubDateStr):
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
_addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if postCtr > 0:
|
||||
print('Added ' + str(postCtr) + ' rss 1.0 feed items to newswire')
|
||||
return result
|
||||
|
||||
|
||||
def _atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int) -> {}:
|
||||
"""Converts an atom feed string to a dictionary
|
||||
"""
|
||||
if '<entry>' not in xmlStr:
|
||||
|
@ -363,17 +456,17 @@ def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
if '</updated>' not in atomItem:
|
||||
continue
|
||||
title = atomItem.split('<title>')[1]
|
||||
title = removeCDATA(title.split('</title>')[0])
|
||||
title = _removeCDATA(title.split('</title>')[0])
|
||||
description = ''
|
||||
if '<summary>' in atomItem and '</summary>' in atomItem:
|
||||
description = atomItem.split('<summary>')[1]
|
||||
description = removeCDATA(description.split('</summary>')[0])
|
||||
description = _removeCDATA(description.split('</summary>')[0])
|
||||
else:
|
||||
if '<media:description>' in atomItem and \
|
||||
'</media:description>' in atomItem:
|
||||
description = atomItem.split('<media:description>')[1]
|
||||
description = description.split('</media:description>')[0]
|
||||
description = removeCDATA(description)
|
||||
description = _removeCDATA(description)
|
||||
link = atomItem.split('<link>')[1]
|
||||
link = link.split('</link>')[0]
|
||||
if '://' not in link:
|
||||
|
@ -388,25 +481,26 @@ def atomFeedToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
|
||||
pubDateStr = parseFeedDate(pubDate)
|
||||
if pubDateStr:
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if _validFeedDate(pubDateStr):
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
_addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if postCtr > 0:
|
||||
print('Added ' + str(postCtr) + ' atom feed items to newswire')
|
||||
return result
|
||||
|
||||
|
||||
def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int) -> {}:
|
||||
def _atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int) -> {}:
|
||||
"""Converts an atom-style YouTube feed string to a dictionary
|
||||
"""
|
||||
if '<entry>' not in xmlStr:
|
||||
|
@ -438,17 +532,17 @@ def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
if '</yt:videoId>' not in atomItem:
|
||||
continue
|
||||
title = atomItem.split('<title>')[1]
|
||||
title = removeCDATA(title.split('</title>')[0])
|
||||
title = _removeCDATA(title.split('</title>')[0])
|
||||
description = ''
|
||||
if '<media:description>' in atomItem and \
|
||||
'</media:description>' in atomItem:
|
||||
description = atomItem.split('<media:description>')[1]
|
||||
description = description.split('</media:description>')[0]
|
||||
description = removeCDATA(description)
|
||||
description = _removeCDATA(description)
|
||||
elif '<summary>' in atomItem and '</summary>' in atomItem:
|
||||
description = atomItem.split('<summary>')[1]
|
||||
description = description.split('</summary>')[0]
|
||||
description = removeCDATA(description)
|
||||
description = _removeCDATA(description)
|
||||
link = atomItem.split('<yt:videoId>')[1]
|
||||
link = link.split('</yt:videoId>')[0]
|
||||
link = 'https://www.youtube.com/watch?v=' + link.strip()
|
||||
|
@ -457,46 +551,52 @@ def atomFeedYTToDict(baseDir: str, domain: str, xmlStr: str,
|
|||
|
||||
pubDateStr = parseFeedDate(pubDate)
|
||||
if pubDateStr:
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if _validFeedDate(pubDateStr):
|
||||
postFilename = ''
|
||||
votesStatus = []
|
||||
_addNewswireDictEntry(baseDir, domain,
|
||||
result, pubDateStr,
|
||||
title, link,
|
||||
votesStatus, postFilename,
|
||||
description, moderated, mirrored)
|
||||
postCtr += 1
|
||||
if postCtr >= maxPostsPerSource:
|
||||
break
|
||||
if postCtr > 0:
|
||||
print('Added ' + str(postCtr) + ' YouTube feed items to newswire')
|
||||
return result
|
||||
|
||||
|
||||
def xmlStrToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int,
|
||||
maxCategoriesFeedItemSizeKb: int) -> {}:
|
||||
def _xmlStrToDict(baseDir: str, domain: str, xmlStr: str,
|
||||
moderated: bool, mirrored: bool,
|
||||
maxPostsPerSource: int,
|
||||
maxFeedItemSizeKb: int,
|
||||
maxCategoriesFeedItemSizeKb: int) -> {}:
|
||||
"""Converts an xml string to a dictionary
|
||||
"""
|
||||
if '<yt:videoId>' in xmlStr and '<yt:channelId>' in xmlStr:
|
||||
print('YouTube feed: reading')
|
||||
return atomFeedYTToDict(baseDir, domain,
|
||||
xmlStr, moderated, mirrored,
|
||||
maxPostsPerSource, maxFeedItemSizeKb)
|
||||
return _atomFeedYTToDict(baseDir, domain,
|
||||
xmlStr, moderated, mirrored,
|
||||
maxPostsPerSource, maxFeedItemSizeKb)
|
||||
elif 'rss version="2.0"' in xmlStr:
|
||||
return xml2StrToDict(baseDir, domain,
|
||||
xmlStr, moderated, mirrored,
|
||||
maxPostsPerSource, maxFeedItemSizeKb,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
elif 'xmlns="http://www.w3.org/2005/Atom"' in xmlStr:
|
||||
return atomFeedToDict(baseDir, domain,
|
||||
return _xml2StrToDict(baseDir, domain,
|
||||
xmlStr, moderated, mirrored,
|
||||
maxPostsPerSource, maxFeedItemSizeKb)
|
||||
maxPostsPerSource, maxFeedItemSizeKb,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
elif '<?xml version="1.0"' in xmlStr:
|
||||
return _xml1StrToDict(baseDir, domain,
|
||||
xmlStr, moderated, mirrored,
|
||||
maxPostsPerSource, maxFeedItemSizeKb,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
elif 'xmlns="http://www.w3.org/2005/Atom"' in xmlStr:
|
||||
return _atomFeedToDict(baseDir, domain,
|
||||
xmlStr, moderated, mirrored,
|
||||
maxPostsPerSource, maxFeedItemSizeKb)
|
||||
return {}
|
||||
|
||||
|
||||
def YTchannelToAtomFeed(url: str) -> str:
|
||||
def _YTchannelToAtomFeed(url: str) -> str:
|
||||
"""Converts a YouTube channel url into an atom feed url
|
||||
"""
|
||||
if 'youtube.com/channel/' not in url:
|
||||
|
@ -520,7 +620,7 @@ def getRSS(baseDir: str, domain: str, session, url: str,
|
|||
print('ERROR: getRSS url should be a string')
|
||||
return None
|
||||
headers = {
|
||||
'Accept': 'text/xml; charset=UTF-8'
|
||||
'Accept': 'text/xml, application/xml; charset=UTF-8'
|
||||
}
|
||||
params = None
|
||||
sessionParams = {}
|
||||
|
@ -533,17 +633,17 @@ def getRSS(baseDir: str, domain: str, session, url: str,
|
|||
'Mozilla/5.0 (X11; Linux x86_64; rv:81.0) Gecko/20100101 Firefox/81.0'
|
||||
if not session:
|
||||
print('WARN: no session specified for getRSS')
|
||||
url = YTchannelToAtomFeed(url)
|
||||
url = _YTchannelToAtomFeed(url)
|
||||
try:
|
||||
result = session.get(url, headers=sessionHeaders, params=sessionParams)
|
||||
if result:
|
||||
if int(len(result.text) / 1024) < maxFeedSizeKb and \
|
||||
not containsInvalidChars(result.text):
|
||||
return xmlStrToDict(baseDir, domain, result.text,
|
||||
moderated, mirrored,
|
||||
maxPostsPerSource,
|
||||
maxFeedItemSizeKb,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
return _xmlStrToDict(baseDir, domain, result.text,
|
||||
moderated, mirrored,
|
||||
maxPostsPerSource,
|
||||
maxFeedItemSizeKb,
|
||||
maxCategoriesFeedItemSizeKb)
|
||||
else:
|
||||
print('WARN: feed is too large, ' +
|
||||
'or contains invalid characters: ' + url)
|
||||
|
@ -592,7 +692,7 @@ def getRSSfromDict(baseDir: str, newswire: {},
|
|||
continue
|
||||
rssStr += '<item>\n'
|
||||
rssStr += ' <title>' + fields[0] + '</title>\n'
|
||||
description = removeCDATA(firstParagraphFromString(fields[4]))
|
||||
description = _removeCDATA(firstParagraphFromString(fields[4]))
|
||||
rssStr += ' <description>' + description + '</description>\n'
|
||||
url = fields[1]
|
||||
if '://' not in url:
|
||||
|
@ -607,7 +707,7 @@ def getRSSfromDict(baseDir: str, newswire: {},
|
|||
return rssStr
|
||||
|
||||
|
||||
def isNewswireBlogPost(postJsonObject: {}) -> bool:
|
||||
def _isNewswireBlogPost(postJsonObject: {}) -> bool:
|
||||
"""Is the given object a blog post?
|
||||
There isn't any difference between a blog post and a newswire blog post
|
||||
but we may here need to check for different properties than
|
||||
|
@ -627,7 +727,7 @@ def isNewswireBlogPost(postJsonObject: {}) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def getHashtagsFromPost(postJsonObject: {}) -> []:
|
||||
def _getHashtagsFromPost(postJsonObject: {}) -> []:
|
||||
"""Returns a list of any hashtags within a post
|
||||
"""
|
||||
if not postJsonObject.get('object'):
|
||||
|
@ -653,11 +753,11 @@ def getHashtagsFromPost(postJsonObject: {}) -> []:
|
|||
return tags
|
||||
|
||||
|
||||
def addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
|
||||
newswire: {},
|
||||
maxBlogsPerAccount: int,
|
||||
indexFilename: str,
|
||||
maxTags: int) -> None:
|
||||
def _addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
|
||||
newswire: {},
|
||||
maxBlogsPerAccount: int,
|
||||
indexFilename: str,
|
||||
maxTags: int) -> None:
|
||||
"""Adds blogs for the given account to the newswire
|
||||
"""
|
||||
if not os.path.isfile(indexFilename):
|
||||
|
@ -703,7 +803,7 @@ def addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
|
|||
postJsonObject = None
|
||||
if fullPostFilename:
|
||||
postJsonObject = loadJson(fullPostFilename)
|
||||
if isNewswireBlogPost(postJsonObject):
|
||||
if _isNewswireBlogPost(postJsonObject):
|
||||
published = postJsonObject['object']['published']
|
||||
published = published.replace('T', ' ')
|
||||
published = published.replace('Z', '+00:00')
|
||||
|
@ -712,24 +812,25 @@ def addAccountBlogsToNewswire(baseDir: str, nickname: str, domain: str,
|
|||
votes = loadJson(fullPostFilename + '.votes')
|
||||
content = postJsonObject['object']['content']
|
||||
description = firstParagraphFromString(content)
|
||||
description = removeCDATA(description)
|
||||
addNewswireDictEntry(baseDir, domain,
|
||||
newswire, published,
|
||||
postJsonObject['object']['summary'],
|
||||
postJsonObject['object']['url'],
|
||||
votes, fullPostFilename,
|
||||
description, moderated, False,
|
||||
getHashtagsFromPost(postJsonObject),
|
||||
maxTags)
|
||||
description = _removeCDATA(description)
|
||||
tagsFromPost = _getHashtagsFromPost(postJsonObject)
|
||||
_addNewswireDictEntry(baseDir, domain,
|
||||
newswire, published,
|
||||
postJsonObject['object']['summary'],
|
||||
postJsonObject['object']['url'],
|
||||
votes, fullPostFilename,
|
||||
description, moderated, False,
|
||||
tagsFromPost,
|
||||
maxTags)
|
||||
|
||||
ctr += 1
|
||||
if ctr >= maxBlogsPerAccount:
|
||||
break
|
||||
|
||||
|
||||
def addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
|
||||
maxBlogsPerAccount: int,
|
||||
maxTags: int) -> None:
|
||||
def _addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
|
||||
maxBlogsPerAccount: int,
|
||||
maxTags: int) -> None:
|
||||
"""Adds blogs from each user account into the newswire
|
||||
"""
|
||||
moderationDict = {}
|
||||
|
@ -757,9 +858,10 @@ def addBlogsToNewswire(baseDir: str, domain: str, newswire: {},
|
|||
blogsIndex = accountDir + '/tlblogs.index'
|
||||
if os.path.isfile(blogsIndex):
|
||||
domain = handle.split('@')[1]
|
||||
addAccountBlogsToNewswire(baseDir, nickname, domain,
|
||||
newswire, maxBlogsPerAccount,
|
||||
blogsIndex, maxTags)
|
||||
_addAccountBlogsToNewswire(baseDir, nickname, domain,
|
||||
newswire, maxBlogsPerAccount,
|
||||
blogsIndex, maxTags)
|
||||
break
|
||||
|
||||
# sort the moderation dict into chronological order, latest first
|
||||
sortedModerationDict = \
|
||||
|
@ -825,8 +927,8 @@ def getDictFromNewswire(session, baseDir: str, domain: str,
|
|||
result[dateStr] = item
|
||||
|
||||
# add blogs from each user account
|
||||
addBlogsToNewswire(baseDir, domain, result,
|
||||
maxPostsPerSource, maxTags)
|
||||
_addBlogsToNewswire(baseDir, domain, result,
|
||||
maxPostsPerSource, maxTags)
|
||||
|
||||
# sort into chronological order, latest first
|
||||
sortedResult = OrderedDict(sorted(result.items(), reverse=True))
|
||||
|
|
|
@ -14,6 +14,7 @@ from posts import outboxMessageCreateWrap
|
|||
from posts import savePostToBox
|
||||
from posts import sendToFollowersThread
|
||||
from posts import sendToNamedAddresses
|
||||
from utils import getFullDomain
|
||||
from utils import removeIdEnding
|
||||
from utils import getDomainFromActor
|
||||
from blocking import isBlockedDomain
|
||||
|
@ -113,9 +114,7 @@ def postMessageToOutbox(messageJson: {}, postToNickname: str,
|
|||
str(messageJson))
|
||||
return False
|
||||
testDomain, testPort = getDomainFromActor(messageJson['actor'])
|
||||
if testPort:
|
||||
if testPort != 80 and testPort != 443:
|
||||
testDomain = testDomain + ':' + str(testPort)
|
||||
testDomain = getFullDomain(testDomain, testPort)
|
||||
if isBlockedDomain(baseDir, testDomain):
|
||||
if debug:
|
||||
print('DEBUG: domain is blocked: ' + messageJson['actor'])
|
||||
|
|
119
person.py
|
@ -35,6 +35,7 @@ from auth import storeBasicCredentials
|
|||
from auth import removePassword
|
||||
from roles import setRole
|
||||
from media import removeMetaData
|
||||
from utils import getFullDomain
|
||||
from utils import validNickname
|
||||
from utils import loadJson
|
||||
from utils import saveJson
|
||||
|
@ -68,11 +69,7 @@ def setProfileImage(baseDir: str, httpPrefix: str, nickname: str, domain: str,
|
|||
|
||||
if ':' in domain:
|
||||
domain = domain.split(':')[0]
|
||||
fullDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
fullDomain = domain + ':' + str(port)
|
||||
fullDomain = getFullDomain(domain, port)
|
||||
|
||||
handle = nickname + '@' + domain
|
||||
personFilename = baseDir + '/accounts/' + handle + '.json'
|
||||
|
@ -137,7 +134,7 @@ def setOrganizationScheme(baseDir: str, nickname: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def accountExists(baseDir: str, nickname: str, domain: str) -> bool:
|
||||
def _accountExists(baseDir: str, nickname: str, domain: str) -> bool:
|
||||
"""Returns true if the given account exists
|
||||
"""
|
||||
if ':' in domain:
|
||||
|
@ -193,14 +190,21 @@ def getDefaultPersonContext() -> str:
|
|||
'identityKey': {'@id': 'toot:identityKey', '@type': '@id'},
|
||||
'fingerprintKey': {'@id': 'toot:fingerprintKey', '@type': '@id'},
|
||||
'messageFranking': 'toot:messageFranking',
|
||||
'publicKeyBase64': 'toot:publicKeyBase64'
|
||||
'publicKeyBase64': 'toot:publicKeyBase64',
|
||||
'discoverable': 'toot:discoverable',
|
||||
'orgSchema': 'toot:orgSchema',
|
||||
'shares': 'toot:shares',
|
||||
'skills': 'toot:skills',
|
||||
'roles': 'toot:roles',
|
||||
'availability': 'toot:availability',
|
||||
'nomadicLocations': 'toot:nomadicLocations'
|
||||
}
|
||||
|
||||
|
||||
def createPersonBase(baseDir: str, nickname: str, domain: str, port: int,
|
||||
httpPrefix: str, saveToFile: bool,
|
||||
manualFollowerApproval: bool,
|
||||
password=None) -> (str, str, {}, {}):
|
||||
def _createPersonBase(baseDir: str, nickname: str, domain: str, port: int,
|
||||
httpPrefix: str, saveToFile: bool,
|
||||
manualFollowerApproval: bool,
|
||||
password=None) -> (str, str, {}, {}):
|
||||
"""Returns the private key, public key, actor and webfinger endpoint
|
||||
"""
|
||||
privateKeyPem, publicKeyPem = generateRSAKey()
|
||||
|
@ -213,10 +217,7 @@ def createPersonBase(baseDir: str, nickname: str, domain: str, port: int,
|
|||
|
||||
handle = nickname + '@' + domain
|
||||
originalDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domain = domain + ':' + str(port)
|
||||
domain = getFullDomain(domain, port)
|
||||
|
||||
personType = 'Person'
|
||||
# Enable follower approval by default
|
||||
|
@ -285,6 +286,7 @@ def createPersonBase(baseDir: str, nickname: str, domain: str, port: int,
|
|||
},
|
||||
'inbox': inboxStr,
|
||||
'manuallyApprovesFollowers': approveFollowers,
|
||||
'discoverable': False,
|
||||
'name': personName,
|
||||
'outbox': personId+'/outbox',
|
||||
'preferredUsername': personName,
|
||||
|
@ -375,7 +377,7 @@ def registerAccount(baseDir: str, httpPrefix: str, domain: str, port: int,
|
|||
manualFollowerApproval: bool) -> bool:
|
||||
"""Registers a new account from the web interface
|
||||
"""
|
||||
if accountExists(baseDir, nickname, domain):
|
||||
if _accountExists(baseDir, nickname, domain):
|
||||
return False
|
||||
if not validNickname(domain, nickname):
|
||||
print('REGISTER: Nickname ' + nickname + ' is invalid')
|
||||
|
@ -418,10 +420,7 @@ def savePersonQrcode(baseDir: str,
|
|||
nickname + '@' + domain + '/qrcode.png'
|
||||
if os.path.isfile(qrcodeFilename):
|
||||
return
|
||||
handle = '@' + nickname + '@' + domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
handle = handle + ':' + str(port)
|
||||
handle = getFullDomain('@' + nickname + '@' + domain, port)
|
||||
url = pyqrcode.create(handle)
|
||||
url.png(qrcodeFilename, scale)
|
||||
|
||||
|
@ -450,12 +449,12 @@ def createPerson(baseDir: str, nickname: str, domain: str, port: int,
|
|||
return None, None, None, None
|
||||
|
||||
(privateKeyPem, publicKeyPem,
|
||||
newPerson, webfingerEndpoint) = createPersonBase(baseDir, nickname,
|
||||
domain, port,
|
||||
httpPrefix,
|
||||
saveToFile,
|
||||
manualFollowerApproval,
|
||||
password)
|
||||
newPerson, webfingerEndpoint) = _createPersonBase(baseDir, nickname,
|
||||
domain, port,
|
||||
httpPrefix,
|
||||
saveToFile,
|
||||
manualFollowerApproval,
|
||||
password)
|
||||
if not getConfigParam(baseDir, 'admin'):
|
||||
if nickname != 'news':
|
||||
# print(nickname+' becomes the instance admin and a moderator')
|
||||
|
@ -526,8 +525,8 @@ def createSharedInbox(baseDir: str, nickname: str, domain: str, port: int,
|
|||
httpPrefix: str) -> (str, str, {}, {}):
|
||||
"""Generates the shared inbox
|
||||
"""
|
||||
return createPersonBase(baseDir, nickname, domain, port, httpPrefix,
|
||||
True, True, None)
|
||||
return _createPersonBase(baseDir, nickname, domain, port, httpPrefix,
|
||||
True, True, None)
|
||||
|
||||
|
||||
def createNewsInbox(baseDir: str, domain: str, port: int,
|
||||
|
@ -723,48 +722,6 @@ def personBoxJson(recentPostsCache: {},
|
|||
return None
|
||||
|
||||
|
||||
def personInboxJson(recentPostsCache: {},
|
||||
baseDir: str, domain: str, port: int, path: str,
|
||||
httpPrefix: str, noOfItems: int) -> []:
|
||||
"""Obtain the inbox feed for the given person
|
||||
Authentication is expected to have already happened
|
||||
"""
|
||||
if '/inbox' not in path:
|
||||
return None
|
||||
|
||||
# Only show the header by default
|
||||
headerOnly = True
|
||||
|
||||
# handle page numbers
|
||||
pageNumber = None
|
||||
if '?page=' in path:
|
||||
pageNumber = path.split('?page=')[1]
|
||||
if pageNumber == 'true':
|
||||
pageNumber = 1
|
||||
else:
|
||||
try:
|
||||
pageNumber = int(pageNumber)
|
||||
except BaseException:
|
||||
pass
|
||||
path = path.split('?page=')[0]
|
||||
headerOnly = False
|
||||
|
||||
if not path.endswith('/inbox'):
|
||||
return None
|
||||
nickname = None
|
||||
if path.startswith('/users/'):
|
||||
nickname = path.replace('/users/', '', 1).replace('/inbox', '')
|
||||
if path.startswith('/@'):
|
||||
nickname = path.replace('/@', '', 1).replace('/inbox', '')
|
||||
if not nickname:
|
||||
return None
|
||||
if not validNickname(domain, nickname):
|
||||
return None
|
||||
return createInbox(recentPostsCache, baseDir, nickname,
|
||||
domain, port, httpPrefix,
|
||||
noOfItems, headerOnly, pageNumber)
|
||||
|
||||
|
||||
def setDisplayNickname(baseDir: str, nickname: str, domain: str,
|
||||
displayName: str) -> bool:
|
||||
if len(displayName) > 32:
|
||||
|
@ -801,7 +758,7 @@ def setBio(baseDir: str, nickname: str, domain: str, bio: str) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
def unsuspendAccount(baseDir: str, nickname: str) -> None:
|
||||
def reenableAccount(baseDir: str, nickname: str) -> None:
|
||||
"""Removes an account suspention
|
||||
"""
|
||||
suspendedFilename = baseDir + '/accounts/suspended.txt'
|
||||
|
@ -868,11 +825,7 @@ def canRemovePost(baseDir: str, nickname: str,
|
|||
if '/statuses/' not in postId:
|
||||
return False
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
# is the post by the admin?
|
||||
adminNickname = getConfigParam(baseDir, 'admin')
|
||||
|
@ -892,17 +845,13 @@ def canRemovePost(baseDir: str, nickname: str,
|
|||
return True
|
||||
|
||||
|
||||
def removeTagsForNickname(baseDir: str, nickname: str,
|
||||
domain: str, port: int) -> None:
|
||||
def _removeTagsForNickname(baseDir: str, nickname: str,
|
||||
domain: str, port: int) -> None:
|
||||
"""Removes tags for a nickname
|
||||
"""
|
||||
if not os.path.isdir(baseDir + '/tags'):
|
||||
return
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
matchStr = domainFull + '/users/' + nickname + '/'
|
||||
directory = os.fsencode(baseDir + '/tags/')
|
||||
for f in os.scandir(directory):
|
||||
|
@ -948,10 +897,10 @@ def removeAccount(baseDir: str, nickname: str,
|
|||
if moderator.strip('\n') == nickname:
|
||||
return False
|
||||
|
||||
unsuspendAccount(baseDir, nickname)
|
||||
reenableAccount(baseDir, nickname)
|
||||
handle = nickname + '@' + domain
|
||||
removePassword(baseDir, nickname)
|
||||
removeTagsForNickname(baseDir, nickname, domain, port)
|
||||
_removeTagsForNickname(baseDir, nickname, domain, port)
|
||||
if os.path.isdir(baseDir + '/deactivated/' + handle):
|
||||
shutil.rmtree(baseDir + '/deactivated/' + handle)
|
||||
if os.path.isdir(baseDir + '/accounts/' + handle):
|
||||
|
|
|
@ -77,8 +77,8 @@ def getPetName(baseDir: str, nickname: str, domain: str,
|
|||
return ''
|
||||
|
||||
|
||||
def getPetNameHandle(baseDir: str, nickname: str, domain: str,
|
||||
petname: str) -> str:
|
||||
def _getPetNameHandle(baseDir: str, nickname: str, domain: str,
|
||||
petname: str) -> str:
|
||||
"""Given a petname returns the handle
|
||||
"""
|
||||
if petname.startswith('@'):
|
||||
|
@ -113,7 +113,7 @@ def resolvePetnames(baseDir: str, nickname: str, domain: str,
|
|||
if not wrd.startswith('@'):
|
||||
break
|
||||
# does a petname handle exist for this?
|
||||
handle = getPetNameHandle(baseDir, nickname, domain, wrd)
|
||||
handle = _getPetNameHandle(baseDir, nickname, domain, wrd)
|
||||
if not handle:
|
||||
continue
|
||||
# replace the petname with the handle
|
||||
|
|
|
@ -234,7 +234,7 @@ def link(input_, ctx, options=None):
|
|||
return frame(input, frame, options)
|
||||
|
||||
|
||||
def normalize(input_, options=None):
|
||||
def normalize(input_: {}, options=None):
|
||||
"""
|
||||
Performs JSON-LD normalization.
|
||||
|
||||
|
|
33
roles.py
|
@ -11,6 +11,7 @@ from webfinger import webfingerHandle
|
|||
from auth import createBasicAuthHeader
|
||||
from posts import getPersonBox
|
||||
from session import postJson
|
||||
from utils import getFullDomain
|
||||
from utils import getNicknameFromActor
|
||||
from utils import getDomainFromActor
|
||||
from utils import loadJson
|
||||
|
@ -62,7 +63,7 @@ def clearEditorStatus(baseDir: str) -> None:
|
|||
saveJson(actorJson, filename)
|
||||
|
||||
|
||||
def addModerator(baseDir: str, nickname: str, domain: str) -> None:
|
||||
def _addModerator(baseDir: str, nickname: str, domain: str) -> None:
|
||||
"""Adds a moderator nickname to the file
|
||||
"""
|
||||
if ':' in domain:
|
||||
|
@ -91,7 +92,7 @@ def addModerator(baseDir: str, nickname: str, domain: str) -> None:
|
|||
f.write(nickname + '\n')
|
||||
|
||||
|
||||
def removeModerator(baseDir: str, nickname: str):
|
||||
def _removeModerator(baseDir: str, nickname: str):
|
||||
"""Removes a moderator nickname from the file
|
||||
"""
|
||||
moderatorsFile = baseDir + '/accounts/moderators.txt'
|
||||
|
@ -124,7 +125,7 @@ def setRole(baseDir: str, nickname: str, domain: str,
|
|||
if role:
|
||||
# add the role
|
||||
if project == 'instance' and 'role' == 'moderator':
|
||||
addModerator(baseDir, nickname, domain)
|
||||
_addModerator(baseDir, nickname, domain)
|
||||
if actorJson['roles'].get(project):
|
||||
if role not in actorJson['roles'][project]:
|
||||
actorJson['roles'][project].append(role)
|
||||
|
@ -133,7 +134,7 @@ def setRole(baseDir: str, nickname: str, domain: str,
|
|||
else:
|
||||
# remove the role
|
||||
if project == 'instance':
|
||||
removeModerator(baseDir, nickname)
|
||||
_removeModerator(baseDir, nickname)
|
||||
if actorJson['roles'].get(project):
|
||||
actorJson['roles'][project].remove(role)
|
||||
# if the project contains no roles then remove it
|
||||
|
@ -143,8 +144,8 @@ def setRole(baseDir: str, nickname: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def getRoles(baseDir: str, nickname: str, domain: str,
|
||||
project: str) -> []:
|
||||
def _getRoles(baseDir: str, nickname: str, domain: str,
|
||||
project: str) -> []:
|
||||
"""Returns the roles for a given person on a given project
|
||||
"""
|
||||
actorFilename = baseDir + '/accounts/' + \
|
||||
|
@ -197,8 +198,8 @@ def outboxDelegate(baseDir: str, authenticatedNickname: str,
|
|||
# instance delegators can delagate to other projects
|
||||
# than their own
|
||||
canDelegate = False
|
||||
delegatorRoles = getRoles(baseDir, delegatorNickname,
|
||||
domain, 'instance')
|
||||
delegatorRoles = _getRoles(baseDir, delegatorNickname,
|
||||
domain, 'instance')
|
||||
if delegatorRoles:
|
||||
if 'delegator' in delegatorRoles:
|
||||
canDelegate = True
|
||||
|
@ -206,8 +207,8 @@ def outboxDelegate(baseDir: str, authenticatedNickname: str,
|
|||
if not canDelegate:
|
||||
canDelegate = True
|
||||
# non-instance delegators can only delegate within their project
|
||||
delegatorRoles = getRoles(baseDir, delegatorNickname,
|
||||
domain, project)
|
||||
delegatorRoles = _getRoles(baseDir, delegatorNickname,
|
||||
domain, project)
|
||||
if delegatorRoles:
|
||||
if 'delegator' not in delegatorRoles:
|
||||
return False
|
||||
|
@ -229,7 +230,7 @@ def outboxDelegate(baseDir: str, authenticatedNickname: str,
|
|||
return True
|
||||
|
||||
# what roles is this person already assigned to?
|
||||
existingRoles = getRoles(baseDir, nickname, domain, project)
|
||||
existingRoles = _getRoles(baseDir, nickname, domain, project)
|
||||
if existingRoles:
|
||||
if role in existingRoles:
|
||||
if debug:
|
||||
|
@ -259,12 +260,7 @@ def sendRoleViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendRoleViaServer')
|
||||
return 6
|
||||
|
||||
delegatorDomainFull = delegatorDomain
|
||||
if delegatorPort:
|
||||
if delegatorPort != 80 and delegatorPort != 443:
|
||||
if ':' not in delegatorDomain:
|
||||
delegatorDomainFull = \
|
||||
delegatorDomain + ':' + str(delegatorPort)
|
||||
delegatorDomainFull = getFullDomain(delegatorDomain, delegatorPort)
|
||||
|
||||
toUrl = \
|
||||
httpPrefix + '://' + delegatorDomainFull + '/users/' + nickname
|
||||
|
@ -320,7 +316,8 @@ def sendRoleViaServer(baseDir: str, session,
|
|||
wfRequest, personCache,
|
||||
projectVersion, httpPrefix,
|
||||
delegatorNickname,
|
||||
delegatorDomain, postToBox)
|
||||
delegatorDomain, postToBox,
|
||||
765672)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
|
13
schedule.py
|
@ -14,8 +14,8 @@ from utils import loadJson
|
|||
from outbox import postMessageToOutbox
|
||||
|
||||
|
||||
def updatePostSchedule(baseDir: str, handle: str, httpd,
|
||||
maxScheduledPosts: int) -> None:
|
||||
def _updatePostSchedule(baseDir: str, handle: str, httpd,
|
||||
maxScheduledPosts: int) -> None:
|
||||
"""Checks if posts are due to be delivered and if so moves them to the outbox
|
||||
"""
|
||||
scheduleIndexFilename = baseDir + '/accounts/' + handle + '/schedule.index'
|
||||
|
@ -104,7 +104,9 @@ def updatePostSchedule(baseDir: str, handle: str, httpd,
|
|||
httpd.proxyType,
|
||||
httpd.projectVersion,
|
||||
httpd.debug,
|
||||
httpd.YTReplacementDomain):
|
||||
httpd.YTReplacementDomain,
|
||||
httpd.showPublishedDateOnly,
|
||||
httpd.allowLocalNetworkAccess):
|
||||
indexLines.remove(line)
|
||||
os.remove(postFilename)
|
||||
continue
|
||||
|
@ -145,7 +147,8 @@ def runPostSchedule(baseDir: str, httpd, maxScheduledPosts: int):
|
|||
baseDir + '/accounts/' + account + '/schedule.index'
|
||||
if not os.path.isfile(scheduleIndexFilename):
|
||||
continue
|
||||
updatePostSchedule(baseDir, account, httpd, maxScheduledPosts)
|
||||
_updatePostSchedule(baseDir, account, httpd, maxScheduledPosts)
|
||||
break
|
||||
|
||||
|
||||
def runPostScheduleWatchdog(projectVersion: str, httpd) -> None:
|
||||
|
@ -157,7 +160,7 @@ def runPostScheduleWatchdog(projectVersion: str, httpd) -> None:
|
|||
httpd.thrPostSchedule.start()
|
||||
while True:
|
||||
time.sleep(20)
|
||||
if not httpd.thrPostSchedule.isAlive():
|
||||
if not httpd.thrPostSchedule.is_alive():
|
||||
httpd.thrPostSchedule.kill()
|
||||
httpd.thrPostSchedule = \
|
||||
postScheduleOriginal.clone(runPostSchedule)
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/bash
|
||||
journalctl -u epicyon -r | grep "Sending profile update to\|a shared inbox"
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/bash
|
||||
journalctl -u epicyon -r | grep "Sending post to followers"
|
18
session.py
|
@ -58,7 +58,7 @@ def getJson(session, url: str, headers: {}, params: {},
|
|||
domain='testdomain') -> {}:
|
||||
if not isinstance(url, str):
|
||||
print('url: ' + str(url))
|
||||
print('ERROR: getJson url should be a string')
|
||||
print('ERROR: getJson failed, url should be a string')
|
||||
return None
|
||||
sessionParams = {}
|
||||
sessionHeaders = {}
|
||||
|
@ -71,23 +71,23 @@ def getJson(session, url: str, headers: {}, params: {},
|
|||
sessionHeaders['User-Agent'] += \
|
||||
'; +' + httpPrefix + '://' + domain + '/'
|
||||
if not session:
|
||||
print('WARN: no session specified for getJson')
|
||||
print('WARN: getJson failed, no session specified for getJson')
|
||||
try:
|
||||
result = session.get(url, headers=sessionHeaders, params=sessionParams)
|
||||
return result.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
print('ERROR: getJson failed\nurl: ' + str(url) + '\n' +
|
||||
'headers: ' + str(sessionHeaders) + '\n' +
|
||||
'params: ' + str(sessionParams) + '\n')
|
||||
print('ERROR: getJson failed\nurl: ' + str(url) + ' ' +
|
||||
'headers: ' + str(sessionHeaders) + ' ' +
|
||||
'params: ' + str(sessionParams))
|
||||
print(e)
|
||||
except ValueError as e:
|
||||
print('ERROR: getJson failed\nurl: ' + str(url) + '\n' +
|
||||
'headers: ' + str(sessionHeaders) + '\n' +
|
||||
'params: ' + str(sessionParams) + '\n')
|
||||
print('ERROR: getJson failed\nurl: ' + str(url) + ' ' +
|
||||
'headers: ' + str(sessionHeaders) + ' ' +
|
||||
'params: ' + str(sessionParams) + ' ')
|
||||
print(e)
|
||||
except SocketError as e:
|
||||
if e.errno == errno.ECONNRESET:
|
||||
print('WARN: connection was reset during getJson')
|
||||
print('WARN: getJson failed, connection was reset during getJson')
|
||||
print(e)
|
||||
return None
|
||||
|
||||
|
|
36
shares.py
|
@ -13,6 +13,7 @@ from auth import createBasicAuthHeader
|
|||
from posts import getPersonBox
|
||||
from session import postJson
|
||||
from session import postImage
|
||||
from utils import getFullDomain
|
||||
from utils import validNickname
|
||||
from utils import loadJson
|
||||
from utils import saveJson
|
||||
|
@ -115,11 +116,7 @@ def addShare(baseDir: str,
|
|||
imageFilename = sharesImageFilename + '.' + ext
|
||||
moveImage = True
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
# copy or move the image for the shared item to its destination
|
||||
if imageFilename:
|
||||
|
@ -167,6 +164,7 @@ def addShare(baseDir: str,
|
|||
'/users/' + nickname + '/tlshares')
|
||||
except BaseException:
|
||||
pass
|
||||
break
|
||||
|
||||
|
||||
def expireShares(baseDir: str) -> None:
|
||||
|
@ -178,10 +176,11 @@ def expireShares(baseDir: str) -> None:
|
|||
continue
|
||||
nickname = account.split('@')[0]
|
||||
domain = account.split('@')[1]
|
||||
expireSharesForAccount(baseDir, nickname, domain)
|
||||
_expireSharesForAccount(baseDir, nickname, domain)
|
||||
break
|
||||
|
||||
|
||||
def expireSharesForAccount(baseDir: str, nickname: str, domain: str) -> None:
|
||||
def _expireSharesForAccount(baseDir: str, nickname: str, domain: str) -> None:
|
||||
"""Removes expired items from shares for a particular account
|
||||
"""
|
||||
handleDomain = domain
|
||||
|
@ -245,10 +244,7 @@ def getSharesFeedForPerson(baseDir: str,
|
|||
if not validNickname(domain, nickname):
|
||||
return None
|
||||
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domain = domain + ':' + str(port)
|
||||
domain = getFullDomain(domain, port)
|
||||
|
||||
handleDomain = domain
|
||||
if ':' in handleDomain:
|
||||
|
@ -329,11 +325,7 @@ def sendShareViaServer(baseDir, session,
|
|||
print('WARN: No session for sendShareViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
ccUrl = httpPrefix + '://' + fromDomainFull + \
|
||||
|
@ -384,7 +376,8 @@ def sendShareViaServer(baseDir, session,
|
|||
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
|
||||
personCache, projectVersion,
|
||||
httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
fromDomain, postToBox,
|
||||
83653)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
@ -437,11 +430,7 @@ def sendUndoShareViaServer(baseDir: str, session,
|
|||
print('WARN: No session for sendUndoShareViaServer')
|
||||
return 6
|
||||
|
||||
fromDomainFull = fromDomain
|
||||
if fromPort:
|
||||
if fromPort != 80 and fromPort != 443:
|
||||
if ':' not in fromDomain:
|
||||
fromDomainFull = fromDomain + ':' + str(fromPort)
|
||||
fromDomainFull = getFullDomain(fromDomain, fromPort)
|
||||
|
||||
toUrl = 'https://www.w3.org/ns/activitystreams#Public'
|
||||
ccUrl = httpPrefix + '://' + fromDomainFull + \
|
||||
|
@ -486,7 +475,8 @@ def sendUndoShareViaServer(baseDir: str, session,
|
|||
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
|
||||
personCache, projectVersion,
|
||||
httpPrefix, fromNickname,
|
||||
fromDomain, postToBox)
|
||||
fromDomain, postToBox,
|
||||
12663)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
|
20
skills.py
|
@ -11,6 +11,7 @@ from webfinger import webfingerHandle
|
|||
from auth import createBasicAuthHeader
|
||||
from posts import getPersonBox
|
||||
from session import postJson
|
||||
from utils import getFullDomain
|
||||
from utils import getNicknameFromActor
|
||||
from utils import getDomainFromActor
|
||||
from utils import loadJson
|
||||
|
@ -40,17 +41,6 @@ def setSkillLevel(baseDir: str, nickname: str, domain: str,
|
|||
return True
|
||||
|
||||
|
||||
def setSkills(baseDir: str, nickname: str, domain: str, skills: {}) -> None:
|
||||
actorFilename = baseDir + '/accounts/' + nickname + '@' + domain + '.json'
|
||||
if not os.path.isfile(actorFilename):
|
||||
return False
|
||||
|
||||
actorJson = loadJson(actorFilename)
|
||||
if actorJson:
|
||||
actorJson['skills'] = skills
|
||||
saveJson(actorJson, actorFilename)
|
||||
|
||||
|
||||
def getSkills(baseDir: str, nickname: str, domain: str) -> []:
|
||||
"""Returns the skills for a given person
|
||||
"""
|
||||
|
@ -108,11 +98,7 @@ def sendSkillViaServer(baseDir: str, session, nickname: str, password: str,
|
|||
print('WARN: No session for sendSkillViaServer')
|
||||
return 6
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
|
||||
actor = httpPrefix + '://' + domainFull + '/users/' + nickname
|
||||
toUrl = actor
|
||||
|
@ -155,7 +141,7 @@ def sendSkillViaServer(baseDir: str, session, nickname: str, password: str,
|
|||
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
|
||||
personCache, projectVersion,
|
||||
httpPrefix, nickname, domain,
|
||||
postToBox)
|
||||
postToBox, 86725)
|
||||
|
||||
if not inboxUrl:
|
||||
if debug:
|
||||
|
|
10
socnet.py
|
@ -10,6 +10,7 @@ from session import createSession
|
|||
from webfinger import webfingerHandle
|
||||
from posts import getPersonBox
|
||||
from posts import getPostDomains
|
||||
from utils import getFullDomain
|
||||
|
||||
|
||||
def instancesGraph(baseDir: str, handles: str,
|
||||
|
@ -46,11 +47,7 @@ def instancesGraph(baseDir: str, handles: str,
|
|||
nickname = handle.split('@')[0]
|
||||
domain = handle.split('@')[1]
|
||||
|
||||
domainFull = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
domainFull = domain + ':' + str(port)
|
||||
domainFull = getFullDomain(domain, port)
|
||||
handle = httpPrefix + "://" + domainFull + "/@" + nickname
|
||||
wfRequest = \
|
||||
webfingerHandle(session, handle, httpPrefix,
|
||||
|
@ -68,7 +65,8 @@ def instancesGraph(baseDir: str, handles: str,
|
|||
avatarUrl, displayName) = getPersonBox(baseDir, session, wfRequest,
|
||||
personCache,
|
||||
projectVersion, httpPrefix,
|
||||
nickname, domain, 'outbox')
|
||||
nickname, domain, 'outbox',
|
||||
27261)
|
||||
postDomains = \
|
||||
getPostDomains(session, personUrl, 64, maxMentions, maxEmoji,
|
||||
maxAttachments, federationList,
|
||||
|
|
691
tests.py
|
@ -20,6 +20,7 @@ from cache import getPersonFromCache
|
|||
from threads import threadWithTrace
|
||||
from daemon import runDaemon
|
||||
from session import createSession
|
||||
from posts import getMentionedPeople
|
||||
from posts import validContentWarning
|
||||
from posts import deleteAllPosts
|
||||
from posts import createPublicPost
|
||||
|
@ -32,6 +33,7 @@ from follow import clearFollows
|
|||
from follow import clearFollowers
|
||||
from follow import sendFollowRequestViaServer
|
||||
from follow import sendUnfollowRequestViaServer
|
||||
from utils import getFullDomain
|
||||
from utils import validNickname
|
||||
from utils import firstParagraphFromString
|
||||
from utils import removeIdEnding
|
||||
|
@ -47,8 +49,8 @@ from utils import getStatusNumber
|
|||
from utils import getFollowersOfPerson
|
||||
from utils import removeHtml
|
||||
from follow import followerOfPerson
|
||||
from follow import unfollowPerson
|
||||
from follow import unfollowerOfPerson
|
||||
from follow import unfollowAccount
|
||||
from follow import unfollowerOfAccount
|
||||
from follow import sendFollowRequest
|
||||
from person import createPerson
|
||||
from person import setDisplayNickname
|
||||
|
@ -71,10 +73,11 @@ from delete import sendDeleteViaServer
|
|||
from inbox import jsonPostAllowsComments
|
||||
from inbox import validInbox
|
||||
from inbox import validInboxFilenames
|
||||
from inbox import guessHashtagCategory
|
||||
from categories import guessHashtagCategory
|
||||
from content import htmlReplaceEmailQuote
|
||||
from content import htmlReplaceQuoteMarks
|
||||
from content import dangerousMarkup
|
||||
from content import dangerousCSS
|
||||
from content import addWebLinks
|
||||
from content import replaceEmojiFromTags
|
||||
from content import addHtmlTags
|
||||
|
@ -98,7 +101,7 @@ thrBob = None
|
|||
thrEve = None
|
||||
|
||||
|
||||
def testHttpsigBase(withDigest):
|
||||
def _testHttpsigBase(withDigest):
|
||||
print('testHttpsig(' + str(withDigest) + ')')
|
||||
|
||||
baseDir = os.getcwd()
|
||||
|
@ -125,11 +128,7 @@ def testHttpsigBase(withDigest):
|
|||
}
|
||||
messageBodyJsonStr = json.dumps(messageBodyJson)
|
||||
|
||||
headersDomain = domain
|
||||
if port:
|
||||
if port != 80 and port != 443:
|
||||
if ':' not in domain:
|
||||
headersDomain = domain + ':' + str(port)
|
||||
headersDomain = getFullDomain(domain, port)
|
||||
|
||||
dateStr = strftime("%a, %d %b %Y %H:%M:%S %Z", gmtime())
|
||||
boxpath = '/inbox'
|
||||
|
@ -207,8 +206,8 @@ def testHttpsigBase(withDigest):
|
|||
|
||||
|
||||
def testHttpsig():
|
||||
testHttpsigBase(True)
|
||||
testHttpsigBase(False)
|
||||
_testHttpsigBase(True)
|
||||
_testHttpsigBase(False)
|
||||
|
||||
|
||||
def testCache():
|
||||
|
@ -237,11 +236,11 @@ def testThreads():
|
|||
args=('test',),
|
||||
daemon=True)
|
||||
thr.start()
|
||||
assert thr.isAlive() is True
|
||||
assert thr.is_alive() is True
|
||||
time.sleep(1)
|
||||
thr.kill()
|
||||
thr.join()
|
||||
assert thr.isAlive() is False
|
||||
assert thr.is_alive() is False
|
||||
|
||||
|
||||
def createServerAlice(path: str, domain: str, port: int,
|
||||
|
@ -275,19 +274,44 @@ def createServerAlice(path: str, domain: str, port: int,
|
|||
followerOfPerson(path, nickname, domain, 'bob', bobAddress,
|
||||
federationList, False)
|
||||
if hasPosts:
|
||||
testFollowersOnly = False
|
||||
testSaveToFile = True
|
||||
clientToServer = False
|
||||
testCommentsEnabled = True
|
||||
testAttachImageFilename = None
|
||||
testMediaType = None
|
||||
testImageDescription = None
|
||||
createPublicPost(path, nickname, domain, port, httpPrefix,
|
||||
"No wise fish would go anywhere without a porpoise",
|
||||
False, True, clientToServer, True,
|
||||
None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
clientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType,
|
||||
testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(path, nickname, domain, port, httpPrefix,
|
||||
"Curiouser and curiouser!", False, True,
|
||||
clientToServer, True, None, None, useBlurhash)
|
||||
"Curiouser and curiouser!",
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
clientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType,
|
||||
testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(path, nickname, domain, port, httpPrefix,
|
||||
"In the gardens of memory, in the palace " +
|
||||
"of dreams, that is where you and I shall meet",
|
||||
False, True, clientToServer, True,
|
||||
None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
clientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType,
|
||||
testImageDescription,
|
||||
useBlurhash)
|
||||
global testServerAliceRunning
|
||||
testServerAliceRunning = True
|
||||
maxMentions = 10
|
||||
|
@ -296,9 +320,14 @@ def createServerAlice(path: str, domain: str, port: int,
|
|||
i2pDomain = None
|
||||
allowLocalNetworkAccess = True
|
||||
maxNewswirePosts = 20
|
||||
dormantMonths = 3
|
||||
sendThreadsTimeoutMins = 30
|
||||
maxFollowers = 10
|
||||
print('Server running: Alice')
|
||||
runDaemon(maxNewswirePosts, allowLocalNetworkAccess,
|
||||
2048, False, True, False, False, True, 10, False,
|
||||
runDaemon(sendThreadsTimeoutMins,
|
||||
dormantMonths, maxNewswirePosts,
|
||||
allowLocalNetworkAccess,
|
||||
2048, False, True, False, False, True, maxFollowers,
|
||||
0, 100, 1024, 5, False,
|
||||
0, False, 1, False, False, False,
|
||||
5, True, True, 'en', __version__,
|
||||
|
@ -343,19 +372,43 @@ def createServerBob(path: str, domain: str, port: int,
|
|||
followerOfPerson(path, nickname, domain,
|
||||
'alice', aliceAddress, federationList, False)
|
||||
if hasPosts:
|
||||
testFollowersOnly = False
|
||||
testSaveToFile = True
|
||||
testCommentsEnabled = True
|
||||
testAttachImageFilename = None
|
||||
testImageDescription = None
|
||||
testMediaType = None
|
||||
createPublicPost(path, nickname, domain, port, httpPrefix,
|
||||
"It's your life, live it your way.",
|
||||
False, True, clientToServer, True,
|
||||
None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
clientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType,
|
||||
testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(path, nickname, domain, port, httpPrefix,
|
||||
"One of the things I've realised is that " +
|
||||
"I am very simple",
|
||||
False, True, clientToServer, True,
|
||||
None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
clientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType,
|
||||
testImageDescription,
|
||||
useBlurhash)
|
||||
createPublicPost(path, nickname, domain, port, httpPrefix,
|
||||
"Quantum physics is a bit of a passion of mine",
|
||||
False, True, clientToServer, True,
|
||||
None, None, useBlurhash)
|
||||
testFollowersOnly,
|
||||
testSaveToFile,
|
||||
clientToServer,
|
||||
testCommentsEnabled,
|
||||
testAttachImageFilename,
|
||||
testMediaType,
|
||||
testImageDescription,
|
||||
useBlurhash)
|
||||
global testServerBobRunning
|
||||
testServerBobRunning = True
|
||||
maxMentions = 10
|
||||
|
@ -364,9 +417,14 @@ def createServerBob(path: str, domain: str, port: int,
|
|||
i2pDomain = None
|
||||
allowLocalNetworkAccess = True
|
||||
maxNewswirePosts = 20
|
||||
dormantMonths = 3
|
||||
sendThreadsTimeoutMins = 30
|
||||
maxFollowers = 10
|
||||
print('Server running: Bob')
|
||||
runDaemon(maxNewswirePosts, allowLocalNetworkAccess,
|
||||
2048, False, True, False, False, True, 10, False,
|
||||
runDaemon(sendThreadsTimeoutMins,
|
||||
dormantMonths, maxNewswirePosts,
|
||||
allowLocalNetworkAccess,
|
||||
2048, False, True, False, False, True, maxFollowers,
|
||||
0, 100, 1024, 5, False, 0,
|
||||
False, 1, False, False, False,
|
||||
5, True, True, 'en', __version__,
|
||||
|
@ -406,9 +464,14 @@ def createServerEve(path: str, domain: str, port: int, federationList: [],
|
|||
i2pDomain = None
|
||||
allowLocalNetworkAccess = True
|
||||
maxNewswirePosts = 20
|
||||
dormantMonths = 3
|
||||
sendThreadsTimeoutMins = 30
|
||||
maxFollowers = 10
|
||||
print('Server running: Eve')
|
||||
runDaemon(maxNewswirePosts, allowLocalNetworkAccess,
|
||||
2048, False, True, False, False, True, 10, False,
|
||||
runDaemon(sendThreadsTimeoutMins,
|
||||
dormantMonths, maxNewswirePosts,
|
||||
allowLocalNetworkAccess,
|
||||
2048, False, True, False, False, True, maxFollowers,
|
||||
0, 100, 1024, 5, False, 0,
|
||||
False, 1, False, False, False,
|
||||
5, True, True, 'en', __version__,
|
||||
|
@ -451,7 +514,7 @@ def testPostMessageBetweenServers():
|
|||
|
||||
global thrAlice
|
||||
if thrAlice:
|
||||
while thrAlice.isAlive():
|
||||
while thrAlice.is_alive():
|
||||
thrAlice.stop()
|
||||
time.sleep(1)
|
||||
thrAlice.kill()
|
||||
|
@ -465,7 +528,7 @@ def testPostMessageBetweenServers():
|
|||
|
||||
global thrBob
|
||||
if thrBob:
|
||||
while thrBob.isAlive():
|
||||
while thrBob.is_alive():
|
||||
thrBob.stop()
|
||||
time.sleep(1)
|
||||
thrBob.kill()
|
||||
|
@ -479,8 +542,8 @@ def testPostMessageBetweenServers():
|
|||
|
||||
thrAlice.start()
|
||||
thrBob.start()
|
||||
assert thrAlice.isAlive() is True
|
||||
assert thrBob.isAlive() is True
|
||||
assert thrAlice.is_alive() is True
|
||||
assert thrBob.is_alive() is True
|
||||
|
||||
# wait for both servers to be running
|
||||
while not (testServerAliceRunning and testServerBobRunning):
|
||||
|
@ -590,10 +653,12 @@ def testPostMessageBetweenServers():
|
|||
print('\n\n*******************************************************')
|
||||
print("Bob likes Alice's post")
|
||||
|
||||
aliceDomainStr = aliceDomain + ':' + str(alicePort)
|
||||
followerOfPerson(bobDir, 'bob', bobDomain, 'alice',
|
||||
aliceDomain + ':' + str(alicePort), federationList, False)
|
||||
aliceDomainStr, federationList, False)
|
||||
bobDomainStr = bobDomain + ':' + str(bobPort)
|
||||
followPerson(aliceDir, 'alice', aliceDomain, 'bob',
|
||||
bobDomain + ':' + str(bobPort), federationList, False)
|
||||
bobDomainStr, federationList, False)
|
||||
|
||||
sessionBob = createSession(proxyType)
|
||||
bobPostLog = []
|
||||
|
@ -679,11 +744,11 @@ def testPostMessageBetweenServers():
|
|||
# stop the servers
|
||||
thrAlice.kill()
|
||||
thrAlice.join()
|
||||
assert thrAlice.isAlive() is False
|
||||
assert thrAlice.is_alive() is False
|
||||
|
||||
thrBob.kill()
|
||||
thrBob.join()
|
||||
assert thrBob.isAlive() is False
|
||||
assert thrBob.is_alive() is False
|
||||
|
||||
os.chdir(baseDir)
|
||||
shutil.rmtree(aliceDir)
|
||||
|
@ -722,7 +787,7 @@ def testFollowBetweenServers():
|
|||
|
||||
global thrAlice
|
||||
if thrAlice:
|
||||
while thrAlice.isAlive():
|
||||
while thrAlice.is_alive():
|
||||
thrAlice.stop()
|
||||
time.sleep(1)
|
||||
thrAlice.kill()
|
||||
|
@ -736,7 +801,7 @@ def testFollowBetweenServers():
|
|||
|
||||
global thrBob
|
||||
if thrBob:
|
||||
while thrBob.isAlive():
|
||||
while thrBob.is_alive():
|
||||
thrBob.stop()
|
||||
time.sleep(1)
|
||||
thrBob.kill()
|
||||
|
@ -750,8 +815,8 @@ def testFollowBetweenServers():
|
|||
|
||||
thrAlice.start()
|
||||
thrBob.start()
|
||||
assert thrAlice.isAlive() is True
|
||||
assert thrBob.isAlive() is True
|
||||
assert thrAlice.is_alive() is True
|
||||
assert thrBob.is_alive() is True
|
||||
|
||||
# wait for all servers to be running
|
||||
ctr = 0
|
||||
|
@ -789,10 +854,10 @@ def testFollowBetweenServers():
|
|||
clientToServer, federationList,
|
||||
aliceSendThreads, alicePostLog,
|
||||
aliceCachedWebfingers, alicePersonCache,
|
||||
True, __version__, False)
|
||||
True, __version__)
|
||||
print('sendResult: ' + str(sendResult))
|
||||
|
||||
for t in range(10):
|
||||
for t in range(16):
|
||||
if os.path.isfile(bobDir + '/accounts/bob@' +
|
||||
bobDomain + '/followers.txt'):
|
||||
if os.path.isfile(aliceDir + '/accounts/alice@' +
|
||||
|
@ -851,11 +916,11 @@ def testFollowBetweenServers():
|
|||
# stop the servers
|
||||
thrAlice.kill()
|
||||
thrAlice.join()
|
||||
assert thrAlice.isAlive() is False
|
||||
assert thrAlice.is_alive() is False
|
||||
|
||||
thrBob.kill()
|
||||
thrBob.join()
|
||||
assert thrBob.isAlive() is False
|
||||
assert thrBob.is_alive() is False
|
||||
|
||||
# queue item removed
|
||||
time.sleep(4)
|
||||
|
@ -967,7 +1032,7 @@ def testNoOfFollowersOnDomain():
|
|||
noOfFollowersOnDomain(baseDir, nickname + '@' + domain, otherdomain)
|
||||
assert followersOnOtherDomain == 3
|
||||
|
||||
unfollowerOfPerson(baseDir, nickname, domain, 'sausagedog', otherdomain)
|
||||
unfollowerOfAccount(baseDir, nickname, domain, 'sausagedog', otherdomain)
|
||||
followersOnOtherDomain = \
|
||||
noOfFollowersOnDomain(baseDir, nickname + '@' + domain, otherdomain)
|
||||
assert followersOnOtherDomain == 2
|
||||
|
@ -1063,7 +1128,7 @@ def testFollows():
|
|||
assert(False)
|
||||
|
||||
assert(domainFound)
|
||||
unfollowPerson(baseDir, nickname, domain, 'batman', 'mesh.com')
|
||||
unfollowAccount(baseDir, nickname, domain, 'batman', 'mesh.com')
|
||||
|
||||
domainFound = False
|
||||
for followingDomain in f:
|
||||
|
@ -1279,7 +1344,7 @@ def testClientToServer():
|
|||
|
||||
global thrAlice
|
||||
if thrAlice:
|
||||
while thrAlice.isAlive():
|
||||
while thrAlice.is_alive():
|
||||
thrAlice.stop()
|
||||
time.sleep(1)
|
||||
thrAlice.kill()
|
||||
|
@ -1293,7 +1358,7 @@ def testClientToServer():
|
|||
|
||||
global thrBob
|
||||
if thrBob:
|
||||
while thrBob.isAlive():
|
||||
while thrBob.is_alive():
|
||||
thrBob.stop()
|
||||
time.sleep(1)
|
||||
thrBob.kill()
|
||||
|
@ -1307,8 +1372,8 @@ def testClientToServer():
|
|||
|
||||
thrAlice.start()
|
||||
thrBob.start()
|
||||
assert thrAlice.isAlive() is True
|
||||
assert thrBob.isAlive() is True
|
||||
assert thrAlice.is_alive() is True
|
||||
assert thrBob.is_alive() is True
|
||||
|
||||
# wait for both servers to be running
|
||||
ctr = 0
|
||||
|
@ -1603,11 +1668,11 @@ def testClientToServer():
|
|||
# stop the servers
|
||||
thrAlice.kill()
|
||||
thrAlice.join()
|
||||
assert thrAlice.isAlive() is False
|
||||
assert thrAlice.is_alive() is False
|
||||
|
||||
thrBob.kill()
|
||||
thrBob.join()
|
||||
assert thrBob.isAlive() is False
|
||||
assert thrBob.is_alive() is False
|
||||
|
||||
os.chdir(baseDir)
|
||||
# shutil.rmtree(aliceDir)
|
||||
|
@ -1683,7 +1748,7 @@ def testWebLinks():
|
|||
'This post has a web links https://somesite.net\n\nAnd some other text'
|
||||
linkedText = addWebLinks(exampleText)
|
||||
assert \
|
||||
'<a href="https://somesite.net" rel="nofollow noopener"' + \
|
||||
'<a href="https://somesite.net" rel="nofollow noopener noreferrer"' + \
|
||||
' target="_blank"><span class="invisible">https://' + \
|
||||
'</span><span class="ellipsis">somesite.net</span></a' in linkedText
|
||||
|
||||
|
@ -1742,7 +1807,8 @@ def testWebLinks():
|
|||
'they prefer to cling to their customs, beliefs, and traditions ' + \
|
||||
'rather than to accept the teachings of a war of each ' + \
|
||||
'against all"\n\n--Peter Kropotkin'
|
||||
resultText = removeLongWords(addWebLinks(exampleText), 40, [])
|
||||
testFnStr = addWebLinks(exampleText)
|
||||
resultText = removeLongWords(testFnStr, 40, [])
|
||||
assert resultText == exampleText
|
||||
assert 'ellipsis' not in resultText
|
||||
|
||||
|
@ -1756,7 +1822,8 @@ def testWebLinks():
|
|||
|
||||
exampleText = \
|
||||
'<p>Test1 test2 #YetAnotherExcessivelyLongwindedAndBoringHashtag</p>'
|
||||
resultText = removeLongWords(addWebLinks(exampleText), 40, [])
|
||||
testFnStr = addWebLinks(exampleText)
|
||||
resultText = removeLongWords(testFnStr, 40, [])
|
||||
assert(resultText ==
|
||||
'<p>Test1 test2 '
|
||||
'#YetAnotherExcessivelyLongwindedAndBorin\ngHashtag</p>')
|
||||
|
@ -1765,7 +1832,8 @@ def testWebLinks():
|
|||
"<p>Don't remove a p2p link " + \
|
||||
"rad:git:hwd1yrerc3mcgn8ga9rho3dqi4w33nep7kxmqezss4topyfgmexihp" + \
|
||||
"33xcw</p>"
|
||||
resultText = removeLongWords(addWebLinks(exampleText), 40, [])
|
||||
testFnStr = addWebLinks(exampleText)
|
||||
resultText = removeLongWords(testFnStr, 40, [])
|
||||
assert resultText == exampleText
|
||||
|
||||
|
||||
|
@ -1910,10 +1978,11 @@ def testRemoveTextFormatting():
|
|||
def testJsonld():
|
||||
print("testJsonld")
|
||||
jldDocument = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"description": "My json document",
|
||||
"numberField": 83582,
|
||||
"object": {
|
||||
"content": "Some content"
|
||||
"content": "valid content"
|
||||
}
|
||||
}
|
||||
# privateKeyPem, publicKeyPem = generateRSAKey()
|
||||
|
@ -1962,6 +2031,28 @@ def testJsonld():
|
|||
assert(len(signedDocument['signature']['signatureValue']) > 50)
|
||||
assert(signedDocument['signature']['type'] == 'RsaSignatureSuite2017')
|
||||
assert(jsonldVerify(signedDocument, publicKeyPem))
|
||||
# alter the signed document
|
||||
signedDocument['object']['content'] = 'forged content'
|
||||
assert(not jsonldVerify(signedDocument, publicKeyPem))
|
||||
|
||||
jldDocument2 = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"description": "Another json document",
|
||||
"numberField": 13353,
|
||||
"object": {
|
||||
"content": "More content"
|
||||
}
|
||||
}
|
||||
signedDocument2 = testSignJsonld(jldDocument2, privateKeyPem)
|
||||
assert(signedDocument2)
|
||||
assert(signedDocument2.get('signature'))
|
||||
assert(signedDocument2['signature'].get('signatureValue'))
|
||||
# changed signature on different document
|
||||
if signedDocument['signature']['signatureValue'] == \
|
||||
signedDocument2['signature']['signatureValue']:
|
||||
print('json signature has not changed for different documents')
|
||||
assert(signedDocument['signature']['signatureValue'] !=
|
||||
signedDocument2['signature']['signatureValue'])
|
||||
|
||||
|
||||
def testSiteIsActive():
|
||||
|
@ -1978,6 +2069,17 @@ def testRemoveHtml():
|
|||
assert(removeHtml(testStr) == 'This string has html.')
|
||||
|
||||
|
||||
def testDangerousCSS():
|
||||
print('testDangerousCSS')
|
||||
baseDir = os.getcwd()
|
||||
for subdir, dirs, files in os.walk(baseDir):
|
||||
for f in files:
|
||||
if not f.endswith('.css'):
|
||||
continue
|
||||
assert not dangerousCSS(baseDir + '/' + f, False)
|
||||
break
|
||||
|
||||
|
||||
def testDangerousMarkup():
|
||||
print('testDangerousMarkup')
|
||||
allowLocalNetworkAccess = False
|
||||
|
@ -2418,6 +2520,10 @@ def testFirstParagraphFromString():
|
|||
def testParseFeedDate():
|
||||
print('testParseFeedDate')
|
||||
|
||||
pubDate = "2020-12-14T00:08:06+00:00"
|
||||
publishedDate = parseFeedDate(pubDate)
|
||||
assert publishedDate == "2020-12-14 00:08:06+00:00"
|
||||
|
||||
pubDate = "Tue, 08 Dec 2020 06:24:38 -0600"
|
||||
publishedDate = parseFeedDate(pubDate)
|
||||
assert publishedDate == "2020-12-08 12:24:38+00:00"
|
||||
|
@ -2461,8 +2567,472 @@ def testGuessHashtagCategory() -> None:
|
|||
assert guess == "bar"
|
||||
|
||||
|
||||
def testGetMentionedPeople() -> None:
|
||||
print('testGetMentionedPeople')
|
||||
baseDir = os.getcwd()
|
||||
|
||||
content = "@dragon@cave.site @bat@cave.site This is a test."
|
||||
actors = getMentionedPeople(baseDir, 'https',
|
||||
content,
|
||||
'mydomain', False)
|
||||
assert actors
|
||||
assert len(actors) == 2
|
||||
assert actors[0] == "https://cave.site/users/dragon"
|
||||
assert actors[1] == "https://cave.site/users/bat"
|
||||
|
||||
|
||||
def testReplyToPublicPost() -> None:
|
||||
baseDir = os.getcwd()
|
||||
nickname = 'test7492362'
|
||||
domain = 'other.site'
|
||||
port = 443
|
||||
httpPrefix = 'https'
|
||||
postId = httpPrefix + '://rat.site/users/ninjarodent/statuses/63746173435'
|
||||
reply = \
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
"@ninjarodent@rat.site This is a test.",
|
||||
False, False, False, True,
|
||||
None, None, False, postId)
|
||||
# print(str(reply))
|
||||
assert reply['object']['content'] == \
|
||||
'<p><span class=\"h-card\">' + \
|
||||
'<a href=\"https://rat.site/@ninjarodent\" ' + \
|
||||
'class=\"u-url mention\">@<span>ninjarodent</span>' + \
|
||||
'</a></span> This is a test.</p>'
|
||||
assert reply['object']['tag'][0]['type'] == 'Mention'
|
||||
assert reply['object']['tag'][0]['name'] == '@ninjarodent@rat.site'
|
||||
assert reply['object']['tag'][0]['href'] == \
|
||||
'https://rat.site/users/ninjarodent'
|
||||
assert len(reply['object']['to']) == 1
|
||||
assert reply['object']['to'][0].endswith('#Public')
|
||||
assert len(reply['object']['cc']) >= 1
|
||||
assert reply['object']['cc'][0].endswith(nickname + '/followers')
|
||||
assert len(reply['object']['tag']) == 1
|
||||
assert len(reply['object']['cc']) == 2
|
||||
assert reply['object']['cc'][1] == \
|
||||
httpPrefix + '://rat.site/users/ninjarodent'
|
||||
|
||||
|
||||
def getFunctionCallArgs(name: str, lines: [], startLineCtr: int) -> []:
|
||||
"""Returns the arguments of a function call given lines
|
||||
of source code and a starting line number
|
||||
"""
|
||||
argsStr = lines[startLineCtr].split(name + '(')[1]
|
||||
if ')' in argsStr:
|
||||
argsStr = argsStr.split(')')[0].replace(' ', '').split(',')
|
||||
return argsStr
|
||||
for lineCtr in range(startLineCtr + 1, len(lines)):
|
||||
if ')' not in lines[lineCtr]:
|
||||
argsStr += lines[lineCtr]
|
||||
continue
|
||||
else:
|
||||
argsStr += lines[lineCtr].split(')')[0]
|
||||
break
|
||||
return argsStr.replace('\n', '').replace(' ', '').split(',')
|
||||
|
||||
|
||||
def getFunctionCalls(name: str, lines: [], startLineCtr: int,
|
||||
functionProperties: {}) -> []:
|
||||
"""Returns the functions called by the given one,
|
||||
Starting with the given source code at the given line
|
||||
"""
|
||||
callsFunctions = []
|
||||
functionContentStr = ''
|
||||
for lineCtr in range(startLineCtr + 1, len(lines)):
|
||||
lineStr = lines[lineCtr].strip()
|
||||
if lineStr.startswith('def '):
|
||||
break
|
||||
if lineStr.startswith('class '):
|
||||
break
|
||||
functionContentStr += lines[lineCtr]
|
||||
for funcName, properties in functionProperties.items():
|
||||
if funcName + '(' in functionContentStr:
|
||||
callsFunctions.append(funcName)
|
||||
return callsFunctions
|
||||
|
||||
|
||||
def functionArgsMatch(callArgs: [], funcArgs: []):
|
||||
"""Do the function artuments match the function call arguments
|
||||
"""
|
||||
if len(callArgs) == len(funcArgs):
|
||||
return True
|
||||
|
||||
# count non-optional arguments
|
||||
callArgsCtr = 0
|
||||
for a in callArgs:
|
||||
if a == 'self':
|
||||
continue
|
||||
if '=' not in a or a.startswith("'"):
|
||||
callArgsCtr += 1
|
||||
|
||||
funcArgsCtr = 0
|
||||
for a in funcArgs:
|
||||
if a == 'self':
|
||||
continue
|
||||
if '=' not in a or a.startswith("'"):
|
||||
funcArgsCtr += 1
|
||||
|
||||
return callArgsCtr >= funcArgsCtr
|
||||
|
||||
|
||||
def testFunctions():
|
||||
print('testFunctions')
|
||||
function = {}
|
||||
functionProperties = {}
|
||||
modules = {}
|
||||
|
||||
for subdir, dirs, files in os.walk('.'):
|
||||
for sourceFile in files:
|
||||
if not sourceFile.endswith('.py'):
|
||||
continue
|
||||
modName = sourceFile.replace('.py', '')
|
||||
modules[modName] = {
|
||||
'functions': []
|
||||
}
|
||||
sourceStr = ''
|
||||
with open(sourceFile, "r") as f:
|
||||
sourceStr = f.read()
|
||||
modules[modName]['source'] = sourceStr
|
||||
with open(sourceFile, "r") as f:
|
||||
lines = f.readlines()
|
||||
modules[modName]['lines'] = lines
|
||||
for line in lines:
|
||||
if not line.strip().startswith('def '):
|
||||
continue
|
||||
methodName = line.split('def ', 1)[1].split('(')[0]
|
||||
methodArgs = \
|
||||
sourceStr.split('def ' + methodName + '(')[1]
|
||||
methodArgs = methodArgs.split(')')[0]
|
||||
methodArgs = methodArgs.replace(' ', '').split(',')
|
||||
if function.get(modName):
|
||||
function[modName].append(methodName)
|
||||
else:
|
||||
function[modName] = [methodName]
|
||||
if methodName not in modules[modName]['functions']:
|
||||
modules[modName]['functions'].append(methodName)
|
||||
functionProperties[methodName] = {
|
||||
"args": methodArgs,
|
||||
"module": modName,
|
||||
"calledInModule": []
|
||||
}
|
||||
break
|
||||
|
||||
excludeFuncArgs = [
|
||||
'pyjsonld'
|
||||
]
|
||||
excludeFuncs = [
|
||||
'link',
|
||||
'set',
|
||||
'get'
|
||||
]
|
||||
# which modules is each function used within?
|
||||
for modName, modProperties in modules.items():
|
||||
print('Module: ' + modName + ' ✓')
|
||||
for name, properties in functionProperties.items():
|
||||
lineCtr = 0
|
||||
for line in modules[modName]['lines']:
|
||||
lineStr = line.strip()
|
||||
if lineStr.startswith('def '):
|
||||
lineCtr += 1
|
||||
continue
|
||||
if lineStr.startswith('class '):
|
||||
lineCtr += 1
|
||||
continue
|
||||
if name + '(' in line:
|
||||
modList = \
|
||||
functionProperties[name]['calledInModule']
|
||||
if modName not in modList:
|
||||
modList.append(modName)
|
||||
if modName in excludeFuncArgs:
|
||||
lineCtr += 1
|
||||
continue
|
||||
if name in excludeFuncs:
|
||||
lineCtr += 1
|
||||
continue
|
||||
callArgs = \
|
||||
getFunctionCallArgs(name,
|
||||
modules[modName]['lines'],
|
||||
lineCtr)
|
||||
if not functionArgsMatch(callArgs,
|
||||
functionProperties[name]['args']):
|
||||
print('Call to function ' + name +
|
||||
' does not match its arguments')
|
||||
print('def args: ' +
|
||||
str(len(functionProperties[name]['args'])) +
|
||||
'\n' + str(functionProperties[name]['args']))
|
||||
print('Call args: ' + str(len(callArgs)) + '\n' +
|
||||
str(callArgs))
|
||||
print('module ' + modName + ' line ' + str(lineCtr))
|
||||
assert False
|
||||
lineCtr += 1
|
||||
|
||||
# don't check these functions, because they are procedurally called
|
||||
exclusions = [
|
||||
'do_GET',
|
||||
'do_POST',
|
||||
'do_HEAD',
|
||||
'__run',
|
||||
'globaltrace',
|
||||
'localtrace',
|
||||
'kill',
|
||||
'clone',
|
||||
'unregister_rdf_parser',
|
||||
'set_document_loader',
|
||||
'has_property',
|
||||
'has_value',
|
||||
'add_value',
|
||||
'get_values',
|
||||
'remove_property',
|
||||
'remove_value',
|
||||
'normalize',
|
||||
'get_document_loader',
|
||||
'runInboxQueueWatchdog',
|
||||
'runInboxQueue',
|
||||
'runPostSchedule',
|
||||
'runPostScheduleWatchdog',
|
||||
'str2bool',
|
||||
'runNewswireDaemon',
|
||||
'runNewswireWatchdog',
|
||||
'threadSendPost',
|
||||
'sendToFollowers',
|
||||
'expireCache',
|
||||
'migrateAccount',
|
||||
'getMutualsOfPerson',
|
||||
'runPostsQueue',
|
||||
'runSharesExpire',
|
||||
'runPostsWatchdog',
|
||||
'runSharesExpireWatchdog',
|
||||
'getThisWeeksEvents',
|
||||
'getAvailability',
|
||||
'testThreadsFunction',
|
||||
'createServerAlice',
|
||||
'createServerBob',
|
||||
'createServerEve',
|
||||
'E2EEremoveDevice',
|
||||
'setOrganizationScheme'
|
||||
]
|
||||
excludeImports = [
|
||||
'link',
|
||||
'start'
|
||||
]
|
||||
excludeLocal = [
|
||||
'pyjsonld',
|
||||
'daemon',
|
||||
'tests'
|
||||
]
|
||||
excludeMods = [
|
||||
'pyjsonld'
|
||||
]
|
||||
# check that functions are called somewhere
|
||||
for name, properties in functionProperties.items():
|
||||
if name.startswith('__'):
|
||||
if name.endswith('__'):
|
||||
continue
|
||||
if name in exclusions:
|
||||
continue
|
||||
if properties['module'] in excludeMods:
|
||||
continue
|
||||
isLocalFunction = False
|
||||
if not properties['calledInModule']:
|
||||
print('function ' + name +
|
||||
' in module ' + properties['module'] +
|
||||
' is not called anywhere')
|
||||
assert properties['calledInModule']
|
||||
|
||||
if len(properties['calledInModule']) == 1:
|
||||
modName = properties['calledInModule'][0]
|
||||
if modName not in excludeLocal and \
|
||||
modName == properties['module']:
|
||||
isLocalFunction = True
|
||||
if not name.startswith('_'):
|
||||
print('Local function ' + name +
|
||||
' in ' + modName + '.py does not begin with _')
|
||||
assert False
|
||||
|
||||
if name not in excludeImports:
|
||||
for modName in properties['calledInModule']:
|
||||
if modName == properties['module']:
|
||||
continue
|
||||
importStr = 'from ' + properties['module'] + ' import ' + name
|
||||
if importStr not in modules[modName]['source']:
|
||||
print(importStr + ' not found in ' + modName + '.py')
|
||||
assert False
|
||||
|
||||
if not isLocalFunction:
|
||||
if name.startswith('_'):
|
||||
excludePublic = [
|
||||
'pyjsonld',
|
||||
'daemon',
|
||||
'tests'
|
||||
]
|
||||
modName = properties['module']
|
||||
if modName not in excludePublic:
|
||||
print('Public function ' + name + ' in ' +
|
||||
modName + '.py begins with _')
|
||||
assert False
|
||||
print('Function: ' + name + ' ✓')
|
||||
|
||||
print('Constructing function call graph')
|
||||
moduleColors = ('red', 'green', 'yellow', 'orange', 'purple', 'cyan',
|
||||
'darkgoldenrod3', 'darkolivegreen1', 'darkorange1',
|
||||
'darkorchid1', 'darkseagreen', 'darkslategray4',
|
||||
'deeppink1', 'deepskyblue1', 'dimgrey', 'gold1',
|
||||
'goldenrod', 'burlywood2', 'bisque1', 'brown1',
|
||||
'chartreuse2', 'cornsilk', 'darksalmon')
|
||||
maxModuleCalls = 1
|
||||
maxFunctionCalls = 1
|
||||
colorCtr = 0
|
||||
for modName, modProperties in modules.items():
|
||||
lineCtr = 0
|
||||
modules[modName]['color'] = moduleColors[colorCtr]
|
||||
colorCtr += 1
|
||||
if colorCtr >= len(moduleColors):
|
||||
colorCtr = 0
|
||||
for line in modules[modName]['lines']:
|
||||
if line.strip().startswith('def '):
|
||||
name = line.split('def ')[1].split('(')[0]
|
||||
callsList = \
|
||||
getFunctionCalls(name, modules[modName]['lines'],
|
||||
lineCtr, functionProperties)
|
||||
functionProperties[name]['calls'] = callsList.copy()
|
||||
if len(callsList) > maxFunctionCalls:
|
||||
maxFunctionCalls = len(callsList)
|
||||
# keep track of which module calls which other module
|
||||
for fn in callsList:
|
||||
modCall = functionProperties[fn]['module']
|
||||
if modCall != modName:
|
||||
if modules[modName].get('calls'):
|
||||
if modCall not in modules[modName]['calls']:
|
||||
modules[modName]['calls'].append(modCall)
|
||||
if len(modules[modName]['calls']) > \
|
||||
maxModuleCalls:
|
||||
maxModuleCalls = \
|
||||
len(modules[modName]['calls'])
|
||||
else:
|
||||
modules[modName]['calls'] = [modCall]
|
||||
lineCtr += 1
|
||||
callGraphStr = 'digraph EpicyonModules {\n\n'
|
||||
callGraphStr += ' graph [fontsize=10 fontname="Verdana" compound=true];\n'
|
||||
callGraphStr += ' node [shape=record fontsize=10 fontname="Verdana"];\n\n'
|
||||
# colors of modules nodes
|
||||
for modName, modProperties in modules.items():
|
||||
if not modProperties.get('calls'):
|
||||
callGraphStr += ' "' + modName + \
|
||||
'" [fillcolor=yellow style=filled];\n'
|
||||
continue
|
||||
if len(modProperties['calls']) <= int(maxModuleCalls / 8):
|
||||
callGraphStr += ' "' + modName + \
|
||||
'" [fillcolor=green style=filled];\n'
|
||||
elif len(modProperties['calls']) < int(maxModuleCalls / 4):
|
||||
callGraphStr += ' "' + modName + \
|
||||
'" [fillcolor=orange style=filled];\n'
|
||||
else:
|
||||
callGraphStr += ' "' + modName + \
|
||||
'" [fillcolor=red style=filled];\n'
|
||||
callGraphStr += '\n'
|
||||
# connections between modules
|
||||
for modName, modProperties in modules.items():
|
||||
if not modProperties.get('calls'):
|
||||
continue
|
||||
for modCall in modProperties['calls']:
|
||||
callGraphStr += ' "' + modName + '" -> "' + modCall + '";\n'
|
||||
callGraphStr += '\n}\n'
|
||||
with open('epicyon_modules.dot', 'w+') as fp:
|
||||
fp.write(callGraphStr)
|
||||
print('Modules call graph saved to epicyon_modules.dot')
|
||||
print('Plot using: ' +
|
||||
'sfdp -x -Goverlap=false -Goverlap_scaling=2 ' +
|
||||
'-Gsep=+100 -Tx11 epicyon_modules.dot')
|
||||
|
||||
callGraphStr = 'digraph Epicyon {\n\n'
|
||||
callGraphStr += ' size="8,6"; ratio=fill;\n'
|
||||
callGraphStr += ' graph [fontsize=10 fontname="Verdana" compound=true];\n'
|
||||
callGraphStr += ' node [shape=record fontsize=10 fontname="Verdana"];\n\n'
|
||||
|
||||
for modName, modProperties in modules.items():
|
||||
callGraphStr += ' subgraph cluster_' + modName + ' {\n'
|
||||
callGraphStr += ' label = "' + modName + '";\n'
|
||||
callGraphStr += ' node [style=filled];\n'
|
||||
moduleFunctionsStr = ''
|
||||
for name in modProperties['functions']:
|
||||
if name.startswith('test'):
|
||||
continue
|
||||
if name not in excludeFuncs:
|
||||
if not functionProperties[name]['calls']:
|
||||
moduleFunctionsStr += \
|
||||
' "' + name + '" [fillcolor=yellow style=filled];\n'
|
||||
continue
|
||||
noOfCalls = len(functionProperties[name]['calls'])
|
||||
if noOfCalls < int(maxFunctionCalls / 4):
|
||||
moduleFunctionsStr += ' "' + name + \
|
||||
'" [fillcolor=orange style=filled];\n'
|
||||
else:
|
||||
moduleFunctionsStr += ' "' + name + \
|
||||
'" [fillcolor=red style=filled];\n'
|
||||
|
||||
if moduleFunctionsStr:
|
||||
callGraphStr += moduleFunctionsStr + '\n'
|
||||
callGraphStr += ' color=blue;\n'
|
||||
callGraphStr += ' }\n\n'
|
||||
|
||||
for name, properties in functionProperties.items():
|
||||
if not properties['calls']:
|
||||
continue
|
||||
noOfCalls = len(properties['calls'])
|
||||
if noOfCalls <= int(maxFunctionCalls / 8):
|
||||
modColor = 'blue'
|
||||
elif noOfCalls < int(maxFunctionCalls / 4):
|
||||
modColor = 'green'
|
||||
else:
|
||||
modColor = 'red'
|
||||
for calledFunc in properties['calls']:
|
||||
if calledFunc.startswith('test'):
|
||||
continue
|
||||
if calledFunc not in excludeFuncs:
|
||||
callGraphStr += ' "' + name + '" -> "' + calledFunc + \
|
||||
'" [color=' + modColor + '];\n'
|
||||
|
||||
callGraphStr += '\n}\n'
|
||||
with open('epicyon.dot', 'w+') as fp:
|
||||
fp.write(callGraphStr)
|
||||
print('Call graph saved to epicyon.dot')
|
||||
print('Plot using: ' +
|
||||
'sfdp -x -Goverlap=prism -Goverlap_scaling=8 ' +
|
||||
'-Gsep=+120 -Tx11 epicyon.dot')
|
||||
|
||||
|
||||
def testLinksWithinPost() -> None:
|
||||
baseDir = os.getcwd()
|
||||
nickname = 'test27636'
|
||||
domain = 'rando.site'
|
||||
port = 443
|
||||
httpPrefix = 'https'
|
||||
content = 'This is a test post with links.\n\n' + \
|
||||
'ftp://ftp.ncdc.noaa.gov/pub/data/ghcn/v4/\n\nhttps://freedombone.net'
|
||||
postJsonObject = \
|
||||
createPublicPost(baseDir, nickname, domain, port, httpPrefix,
|
||||
content,
|
||||
False, False, False, True,
|
||||
None, None, False, None)
|
||||
assert postJsonObject['object']['content'] == \
|
||||
'<p>This is a test post with links.<br><br>' + \
|
||||
'<a href="ftp://ftp.ncdc.noaa.gov/pub/data/ghcn/v4/" ' + \
|
||||
'rel="nofollow noopener noreferrer" target="_blank">' + \
|
||||
'<span class="invisible">ftp://</span>' + \
|
||||
'<span class="ellipsis">' + \
|
||||
'ftp.ncdc.noaa.gov/pub/data/ghcn/v4/</span>' + \
|
||||
'</a><br><br><a href="https://freedombone.net" ' + \
|
||||
'rel="nofollow noopener noreferrer" target="_blank">' + \
|
||||
'<span class="invisible">https://</span>' + \
|
||||
'<span class="ellipsis">freedombone.net</span></a></p>'
|
||||
|
||||
|
||||
def runAllTests():
|
||||
print('Running tests...')
|
||||
testFunctions()
|
||||
testLinksWithinPost()
|
||||
testReplyToPublicPost()
|
||||
testGetMentionedPeople()
|
||||
testGuessHashtagCategory()
|
||||
testValidNickname()
|
||||
testParseFeedDate()
|
||||
|
@ -2477,6 +3047,7 @@ def runAllTests():
|
|||
testRemoveIdEnding()
|
||||
testJsonPostAllowsComments()
|
||||
runHtmlReplaceQuoteMarks()
|
||||
testDangerousCSS()
|
||||
testDangerousMarkup()
|
||||
testRemoveHtml()
|
||||
testSiteIsActive()
|
||||
|
|
142
theme.py
|
@ -14,7 +14,7 @@ from shutil import copyfile
|
|||
from content import dangerousCSS
|
||||
|
||||
|
||||
def getThemeFiles() -> []:
|
||||
def _getThemeFiles() -> []:
|
||||
return ('epicyon.css', 'login.css', 'follow.css',
|
||||
'suspended.css', 'calendar.css', 'blog.css',
|
||||
'options.css', 'search.css', 'links.css')
|
||||
|
@ -38,7 +38,7 @@ def getThemesList(baseDir: str) -> []:
|
|||
return themes
|
||||
|
||||
|
||||
def setThemeInConfig(baseDir: str, name: str) -> bool:
|
||||
def _setThemeInConfig(baseDir: str, name: str) -> bool:
|
||||
configFilename = baseDir + '/config.json'
|
||||
if not os.path.isfile(configFilename):
|
||||
return False
|
||||
|
@ -49,7 +49,7 @@ def setThemeInConfig(baseDir: str, name: str) -> bool:
|
|||
return saveJson(configJson, configFilename)
|
||||
|
||||
|
||||
def setNewswirePublishAsIcon(baseDir: str, useIcon: bool) -> bool:
|
||||
def _setNewswirePublishAsIcon(baseDir: str, useIcon: bool) -> bool:
|
||||
"""Shows the newswire publish action as an icon or a button
|
||||
"""
|
||||
configFilename = baseDir + '/config.json'
|
||||
|
@ -62,7 +62,7 @@ def setNewswirePublishAsIcon(baseDir: str, useIcon: bool) -> bool:
|
|||
return saveJson(configJson, configFilename)
|
||||
|
||||
|
||||
def setIconsAsButtons(baseDir: str, useButtons: bool) -> bool:
|
||||
def _setIconsAsButtons(baseDir: str, useButtons: bool) -> bool:
|
||||
"""Whether to show icons in the header (inbox, outbox, etc)
|
||||
as buttons
|
||||
"""
|
||||
|
@ -76,7 +76,7 @@ def setIconsAsButtons(baseDir: str, useButtons: bool) -> bool:
|
|||
return saveJson(configJson, configFilename)
|
||||
|
||||
|
||||
def setRssIconAtTop(baseDir: str, atTop: bool) -> bool:
|
||||
def _setRssIconAtTop(baseDir: str, atTop: bool) -> bool:
|
||||
"""Whether to show RSS icon at the top of the timeline
|
||||
"""
|
||||
configFilename = baseDir + '/config.json'
|
||||
|
@ -89,7 +89,7 @@ def setRssIconAtTop(baseDir: str, atTop: bool) -> bool:
|
|||
return saveJson(configJson, configFilename)
|
||||
|
||||
|
||||
def setPublishButtonAtTop(baseDir: str, atTop: bool) -> bool:
|
||||
def _setPublishButtonAtTop(baseDir: str, atTop: bool) -> bool:
|
||||
"""Whether to show the publish button above the title image
|
||||
in the newswire column
|
||||
"""
|
||||
|
@ -103,7 +103,7 @@ def setPublishButtonAtTop(baseDir: str, atTop: bool) -> bool:
|
|||
return saveJson(configJson, configFilename)
|
||||
|
||||
|
||||
def setFullWidthTimelineButtonHeader(baseDir: str, fullWidth: bool) -> bool:
|
||||
def _setFullWidthTimelineButtonHeader(baseDir: str, fullWidth: bool) -> bool:
|
||||
"""Shows the timeline button header containing inbox, outbox,
|
||||
calendar, etc as full width
|
||||
"""
|
||||
|
@ -127,8 +127,8 @@ def getTheme(baseDir: str) -> str:
|
|||
return 'default'
|
||||
|
||||
|
||||
def removeTheme(baseDir: str):
|
||||
themeFiles = getThemeFiles()
|
||||
def _removeTheme(baseDir: str):
|
||||
themeFiles = _getThemeFiles()
|
||||
for filename in themeFiles:
|
||||
if os.path.isfile(baseDir + '/' + filename):
|
||||
os.remove(baseDir + '/' + filename)
|
||||
|
@ -183,14 +183,14 @@ def setCSSparam(css: str, param: str, value: str) -> str:
|
|||
return newcss.strip()
|
||||
|
||||
|
||||
def setThemeFromDict(baseDir: str, name: str,
|
||||
themeParams: {}, bgParams: {},
|
||||
allowLocalNetworkAccess: bool) -> None:
|
||||
def _setThemeFromDict(baseDir: str, name: str,
|
||||
themeParams: {}, bgParams: {},
|
||||
allowLocalNetworkAccess: bool) -> None:
|
||||
"""Uses a dictionary to set a theme
|
||||
"""
|
||||
if name:
|
||||
setThemeInConfig(baseDir, name)
|
||||
themeFiles = getThemeFiles()
|
||||
_setThemeInConfig(baseDir, name)
|
||||
themeFiles = _getThemeFiles()
|
||||
for filename in themeFiles:
|
||||
# check for custom css within the theme directory
|
||||
templateFilename = baseDir + '/theme/' + name + '/epicyon-' + filename
|
||||
|
@ -215,33 +215,33 @@ def setThemeFromDict(baseDir: str, name: str,
|
|||
for paramName, paramValue in themeParams.items():
|
||||
if paramName == 'newswire-publish-icon':
|
||||
if paramValue.lower() == 'true':
|
||||
setNewswirePublishAsIcon(baseDir, True)
|
||||
_setNewswirePublishAsIcon(baseDir, True)
|
||||
else:
|
||||
setNewswirePublishAsIcon(baseDir, False)
|
||||
_setNewswirePublishAsIcon(baseDir, False)
|
||||
continue
|
||||
elif paramName == 'full-width-timeline-buttons':
|
||||
if paramValue.lower() == 'true':
|
||||
setFullWidthTimelineButtonHeader(baseDir, True)
|
||||
_setFullWidthTimelineButtonHeader(baseDir, True)
|
||||
else:
|
||||
setFullWidthTimelineButtonHeader(baseDir, False)
|
||||
_setFullWidthTimelineButtonHeader(baseDir, False)
|
||||
continue
|
||||
elif paramName == 'icons-as-buttons':
|
||||
if paramValue.lower() == 'true':
|
||||
setIconsAsButtons(baseDir, True)
|
||||
_setIconsAsButtons(baseDir, True)
|
||||
else:
|
||||
setIconsAsButtons(baseDir, False)
|
||||
_setIconsAsButtons(baseDir, False)
|
||||
continue
|
||||
elif paramName == 'rss-icon-at-top':
|
||||
if paramValue.lower() == 'true':
|
||||
setRssIconAtTop(baseDir, True)
|
||||
_setRssIconAtTop(baseDir, True)
|
||||
else:
|
||||
setRssIconAtTop(baseDir, False)
|
||||
_setRssIconAtTop(baseDir, False)
|
||||
continue
|
||||
elif paramName == 'publish-button-at-top':
|
||||
if paramValue.lower() == 'true':
|
||||
setPublishButtonAtTop(baseDir, True)
|
||||
_setPublishButtonAtTop(baseDir, True)
|
||||
else:
|
||||
setPublishButtonAtTop(baseDir, False)
|
||||
_setPublishButtonAtTop(baseDir, False)
|
||||
continue
|
||||
css = setCSSparam(css, paramName, paramValue)
|
||||
filename = baseDir + '/' + filename
|
||||
|
@ -249,17 +249,17 @@ def setThemeFromDict(baseDir: str, name: str,
|
|||
cssfile.write(css)
|
||||
|
||||
if bgParams.get('login'):
|
||||
setBackgroundFormat(baseDir, name, 'login', bgParams['login'])
|
||||
_setBackgroundFormat(baseDir, name, 'login', bgParams['login'])
|
||||
if bgParams.get('follow'):
|
||||
setBackgroundFormat(baseDir, name, 'follow', bgParams['follow'])
|
||||
_setBackgroundFormat(baseDir, name, 'follow', bgParams['follow'])
|
||||
if bgParams.get('options'):
|
||||
setBackgroundFormat(baseDir, name, 'options', bgParams['options'])
|
||||
_setBackgroundFormat(baseDir, name, 'options', bgParams['options'])
|
||||
if bgParams.get('search'):
|
||||
setBackgroundFormat(baseDir, name, 'search', bgParams['search'])
|
||||
_setBackgroundFormat(baseDir, name, 'search', bgParams['search'])
|
||||
|
||||
|
||||
def setBackgroundFormat(baseDir: str, name: str,
|
||||
backgroundType: str, extension: str) -> None:
|
||||
def _setBackgroundFormat(baseDir: str, name: str,
|
||||
backgroundType: str, extension: str) -> None:
|
||||
"""Sets the background file extension
|
||||
"""
|
||||
if extension == 'jpg':
|
||||
|
@ -277,7 +277,7 @@ def setBackgroundFormat(baseDir: str, name: str,
|
|||
def enableGrayscale(baseDir: str) -> None:
|
||||
"""Enables grayscale for the current theme
|
||||
"""
|
||||
themeFiles = getThemeFiles()
|
||||
themeFiles = _getThemeFiles()
|
||||
for filename in themeFiles:
|
||||
templateFilename = baseDir + '/' + filename
|
||||
if not os.path.isfile(templateFilename):
|
||||
|
@ -300,7 +300,7 @@ def enableGrayscale(baseDir: str) -> None:
|
|||
def disableGrayscale(baseDir: str) -> None:
|
||||
"""Disables grayscale for the current theme
|
||||
"""
|
||||
themeFiles = getThemeFiles()
|
||||
themeFiles = _getThemeFiles()
|
||||
for filename in themeFiles:
|
||||
templateFilename = baseDir + '/' + filename
|
||||
if not os.path.isfile(templateFilename):
|
||||
|
@ -318,7 +318,7 @@ def disableGrayscale(baseDir: str) -> None:
|
|||
os.remove(grayscaleFilename)
|
||||
|
||||
|
||||
def setCustomFont(baseDir: str):
|
||||
def _setCustomFont(baseDir: str):
|
||||
"""Uses a dictionary to set a theme
|
||||
"""
|
||||
customFontExt = None
|
||||
|
@ -337,7 +337,7 @@ def setCustomFont(baseDir: str):
|
|||
if not customFontExt:
|
||||
return
|
||||
|
||||
themeFiles = getThemeFiles()
|
||||
themeFiles = _getThemeFiles()
|
||||
for filename in themeFiles:
|
||||
templateFilename = baseDir + '/' + filename
|
||||
if not os.path.isfile(templateFilename):
|
||||
|
@ -356,9 +356,9 @@ def setCustomFont(baseDir: str):
|
|||
cssfile.write(css)
|
||||
|
||||
|
||||
def readVariablesFile(baseDir: str, themeName: str,
|
||||
variablesFile: str,
|
||||
allowLocalNetworkAccess: bool) -> None:
|
||||
def _readVariablesFile(baseDir: str, themeName: str,
|
||||
variablesFile: str,
|
||||
allowLocalNetworkAccess: bool) -> None:
|
||||
"""Reads variables from a file in the theme directory
|
||||
"""
|
||||
themeParams = loadJson(variablesFile, 0)
|
||||
|
@ -370,14 +370,14 @@ def readVariablesFile(baseDir: str, themeName: str,
|
|||
"options": "jpg",
|
||||
"search": "jpg"
|
||||
}
|
||||
setThemeFromDict(baseDir, themeName, themeParams, bgParams,
|
||||
allowLocalNetworkAccess)
|
||||
_setThemeFromDict(baseDir, themeName, themeParams, bgParams,
|
||||
allowLocalNetworkAccess)
|
||||
|
||||
|
||||
def setThemeDefault(baseDir: str, allowLocalNetworkAccess: bool):
|
||||
def _setThemeDefault(baseDir: str, allowLocalNetworkAccess: bool):
|
||||
name = 'default'
|
||||
removeTheme(baseDir)
|
||||
setThemeInConfig(baseDir, name)
|
||||
_removeTheme(baseDir)
|
||||
_setThemeInConfig(baseDir, name)
|
||||
bgParams = {
|
||||
"login": "jpg",
|
||||
"follow": "jpg",
|
||||
|
@ -394,44 +394,11 @@ def setThemeDefault(baseDir: str, allowLocalNetworkAccess: bool):
|
|||
"banner-height-mobile": "10vh",
|
||||
"search-banner-height-mobile": "15vh"
|
||||
}
|
||||
setThemeFromDict(baseDir, name, themeParams, bgParams,
|
||||
allowLocalNetworkAccess)
|
||||
_setThemeFromDict(baseDir, name, themeParams, bgParams,
|
||||
allowLocalNetworkAccess)
|
||||
|
||||
|
||||
def setThemeHighVis(baseDir: str, allowLocalNetworkAccess: bool):
|
||||
name = 'highvis'
|
||||
themeParams = {
|
||||
"newswire-publish-icon": True,
|
||||
"full-width-timeline-buttons": False,
|
||||
"icons-as-buttons": False,
|
||||
"rss-icon-at-top": True,
|
||||
"publish-button-at-top": False,
|
||||
"font-size-header": "22px",
|
||||
"font-size-header-mobile": "32px",
|
||||
"font-size": "45px",
|
||||
"font-size2": "45px",
|
||||
"font-size3": "45px",
|
||||
"font-size4": "35px",
|
||||
"font-size5": "29px",
|
||||
"gallery-font-size": "35px",
|
||||
"gallery-font-size-mobile": "55px",
|
||||
"hashtag-vertical-spacing3": "100px",
|
||||
"hashtag-vertical-spacing4": "150px",
|
||||
"time-vertical-align": "-10px",
|
||||
"*font-family": "'LinBiolinum_Rah'",
|
||||
"*src": "url('./fonts/LinBiolinum_Rah.woff2') format('woff2')"
|
||||
}
|
||||
bgParams = {
|
||||
"login": "jpg",
|
||||
"follow": "jpg",
|
||||
"options": "jpg",
|
||||
"search": "jpg"
|
||||
}
|
||||
setThemeFromDict(baseDir, name, themeParams, bgParams,
|
||||
allowLocalNetworkAccess)
|
||||
|
||||
|
||||
def setThemeFonts(baseDir: str, themeName: str) -> None:
|
||||
def _setThemeFonts(baseDir: str, themeName: str) -> None:
|
||||
"""Adds custom theme fonts
|
||||
"""
|
||||
themeNameLower = themeName.lower()
|
||||
|
@ -455,7 +422,7 @@ def setThemeFonts(baseDir: str, themeName: str) -> None:
|
|||
break
|
||||
|
||||
|
||||
def setThemeImages(baseDir: str, name: str) -> None:
|
||||
def _setThemeImages(baseDir: str, name: str) -> None:
|
||||
"""Changes the profile background image
|
||||
and banner to the defaults
|
||||
"""
|
||||
|
@ -557,6 +524,7 @@ def setThemeImages(baseDir: str, name: str) -> None:
|
|||
os.remove(accountDir + '/right_col_image.png')
|
||||
except BaseException:
|
||||
pass
|
||||
break
|
||||
|
||||
|
||||
def setNewsAvatar(baseDir: str, name: str,
|
||||
|
@ -589,7 +557,7 @@ def setTheme(baseDir: str, name: str, domain: str,
|
|||
result = False
|
||||
|
||||
prevThemeName = getTheme(baseDir)
|
||||
removeTheme(baseDir)
|
||||
_removeTheme(baseDir)
|
||||
|
||||
themes = getThemesList(baseDir)
|
||||
for themeName in themes:
|
||||
|
@ -605,21 +573,21 @@ def setTheme(baseDir: str, name: str, domain: str,
|
|||
if prevThemeName.lower() != themeNameLower:
|
||||
# change the banner and profile image
|
||||
# to the default for the theme
|
||||
setThemeImages(baseDir, name)
|
||||
setThemeFonts(baseDir, name)
|
||||
_setThemeImages(baseDir, name)
|
||||
_setThemeFonts(baseDir, name)
|
||||
result = True
|
||||
|
||||
if not result:
|
||||
# default
|
||||
setThemeDefault(baseDir)
|
||||
_setThemeDefault(baseDir, allowLocalNetworkAccess)
|
||||
result = True
|
||||
|
||||
variablesFile = baseDir + '/theme/' + name + '/theme.json'
|
||||
if os.path.isfile(variablesFile):
|
||||
readVariablesFile(baseDir, name, variablesFile,
|
||||
allowLocalNetworkAccess)
|
||||
_readVariablesFile(baseDir, name, variablesFile,
|
||||
allowLocalNetworkAccess)
|
||||
|
||||
setCustomFont(baseDir)
|
||||
_setCustomFont(baseDir)
|
||||
|
||||
# set the news avatar
|
||||
newsAvatarThemeFilename = \
|
||||
|
@ -636,5 +604,5 @@ def setTheme(baseDir: str, name: str, domain: str,
|
|||
else:
|
||||
disableGrayscale(baseDir)
|
||||
|
||||
setThemeInConfig(baseDir, name)
|
||||
_setThemeInConfig(baseDir, name)
|
||||
return result
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"today-circle": "#03a494",
|
||||
"options-main-link-color-hover": "white",
|
||||
"main-link-color-hover": "blue",
|
||||
"font-size-newswire-mobile": "32px",
|
||||
"newswire-date-color": "#00a594",
|
||||
|
@ -58,8 +59,10 @@
|
|||
"border-width": "1px",
|
||||
"border-width-header": "1px",
|
||||
"main-link-color": "darkblue",
|
||||
"options-main-link-color": "lightgrey",
|
||||
"title-color": "#2a2c37",
|
||||
"main-visited-color": "#232c37",
|
||||
"options-main-visited-color": "#ccc",
|
||||
"text-entry-foreground": "#111",
|
||||
"text-entry-background": "white",
|
||||
"font-color-header": "black",
|
||||
|
|
|
@ -24,8 +24,11 @@
|
|||
"border-color": "#035103",
|
||||
"main-link-color": "#2fff2f",
|
||||
"main-link-color-hover": "#afff2f",
|
||||
"options-main-link-color": "#2fff2f",
|
||||
"options-main-link-color-hover": "#afff2f",
|
||||
"title-color": "#2fff2f",
|
||||
"main-visited-color": "#3c8234",
|
||||
"options-main-visited-color": "#3c8234",
|
||||
"button-selected": "#063200",
|
||||
"button-background-hover": "#a62200",
|
||||
"button-text-hover": "#00ff00",
|
||||
|
|
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
After Width: | Height: | Size: 1.2 KiB |
Before Width: | Height: | Size: 969 B After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 5.8 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 12 KiB |
|
@ -1,4 +1,6 @@
|
|||
{
|
||||
"post-separator-margin-top": "10px",
|
||||
"post-separator-margin-bottom": "10px",
|
||||
"time-color": "grey",
|
||||
"event-color": "white",
|
||||
"login-bg-color": "#567726",
|
||||
|
@ -27,8 +29,11 @@
|
|||
"link-bg-color": "#383335",
|
||||
"main-link-color": "white",
|
||||
"main-link-color-hover": "#ddd",
|
||||
"options-main-link-color": "white",
|
||||
"options-main-link-color-hover": "#ddd",
|
||||
"title-color": "white",
|
||||
"main-visited-color": "#e1c4bc",
|
||||
"options-main-visited-color": "#e1c4bc",
|
||||
"main-fg-color": "white",
|
||||
"options-fg-color": "white",
|
||||
"column-left-fg-color": "white",
|
||||
|
|
|
@ -0,0 +1,300 @@
|
|||
/* Use high specificity where possible to override default value(s) */
|
||||
|
||||
/* Variable Overrides */
|
||||
|
||||
:root {
|
||||
--main-fg-color: #000;
|
||||
--main-bg-color: #FFF;
|
||||
--main-bg-color-reply: #FFF;
|
||||
--dropdown-fg-color: #000;
|
||||
}
|
||||
|
||||
|
||||
/* Property Overrides */
|
||||
|
||||
body {
|
||||
background-color: #FFF;
|
||||
}
|
||||
|
||||
h1, h2, h3, h4, h5 {
|
||||
width: 100%;
|
||||
margin-bottom: 0.5em; /* NOTE: em values will be relative to each font-size ?*/
|
||||
color: #000;
|
||||
}
|
||||
|
||||
p {
|
||||
width: 100%;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
a {
|
||||
/*font-size: var(--h4-size);*/
|
||||
color: #10408F;
|
||||
}
|
||||
a:hover {
|
||||
color: #10408F;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
padding-left: 2em;
|
||||
}
|
||||
ul {
|
||||
list-style-type: disc;
|
||||
}
|
||||
ol {
|
||||
list-style-type: decimal;
|
||||
}
|
||||
|
||||
hr {
|
||||
width: 100%;
|
||||
margin-top: 2em;
|
||||
margin-bottom: 2em;
|
||||
}
|
||||
|
||||
.timeline, .page {
|
||||
width: 99%;
|
||||
padding: 0;
|
||||
margin: 1em auto;
|
||||
}
|
||||
|
||||
div.container {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
padding: 0;
|
||||
margin: 1em;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
overflow: visible;
|
||||
}
|
||||
div.container img {
|
||||
display: block;
|
||||
float: none;
|
||||
margin: 0.5em;
|
||||
}
|
||||
|
||||
.containerHeader {
|
||||
width: 175px;
|
||||
margin: 0;
|
||||
padding: 0.5em 1em;
|
||||
background-color: transparent;
|
||||
border-radius: 0;
|
||||
border: 1px solid black;
|
||||
}
|
||||
|
||||
.containerHeader .navbuttons .button-bar {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.containerHeader .navbuttons .button-bar a {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background-color: transparent;
|
||||
color: #000;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
/* NOTE: Now handled in base.css
|
||||
.main {
|
||||
min-height: 512px;
|
||||
}*/
|
||||
|
||||
.links, .newswire {
|
||||
border: 1px solid black;
|
||||
}
|
||||
|
||||
.newswireItem {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
padding-bottom: 1em;
|
||||
margin-bottom: 0.5em;
|
||||
border-bottom: 1px solid #BBB;
|
||||
font-size: var(--font-size-base);
|
||||
}
|
||||
.newswireItem a {
|
||||
display: block;
|
||||
}
|
||||
.newswireItem img {
|
||||
width: 20px;
|
||||
}
|
||||
.newswireDate {
|
||||
display: block;
|
||||
float: none;
|
||||
margin-top: 0.3em;
|
||||
color: #BBB;
|
||||
font-size: var(--font-size-base); /* TODO: This kind of fix should be avoided with better CSS */
|
||||
}
|
||||
|
||||
.links h1, .newswire h1, .col-left-mobile h1, .col-right-mobile h1 {
|
||||
padding-bottom: 0.2em;
|
||||
margin-bottom: 0.5em;
|
||||
border-bottom: 1px solid #BBB;
|
||||
}
|
||||
.links center, .newswire center {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.columnIcons a img, .leftColIcons a img {
|
||||
float: none;
|
||||
width: 32px;
|
||||
}
|
||||
|
||||
.links a, .col-left-mobile a {
|
||||
display: block;
|
||||
/*margin-bottom: 0.5em;*/
|
||||
}
|
||||
.links-list {
|
||||
padding-bottom: 1em;
|
||||
margin-bottom: 1em;
|
||||
border-bottom: 1px solid #BBB;
|
||||
}
|
||||
.links-list .linksHeader {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
/*padding-top: 1em;
|
||||
margin-bottom: 0.5em*/
|
||||
/*border-top: 1px solid #BBB;*/
|
||||
margin-bottom: 0.5em;
|
||||
background-color: transparent;
|
||||
color: #000;
|
||||
font-size: var(--h2-size);
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
.login-text {
|
||||
padding-bottom: 1em;
|
||||
margin-bottom: 1em;
|
||||
border-bottom: 1px solid #BBB;
|
||||
}
|
||||
|
||||
form .container, form .imgcontainer {
|
||||
margin: 0;
|
||||
}
|
||||
form .imgcontainer {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.imgcontainer .loginimage {
|
||||
margin: 0 auto;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
.timeline-posts {
|
||||
background-color: transparent;
|
||||
}
|
||||
/* Use of .main in the following would be ideally avoided, but the HTML generation currently calls for it */
|
||||
.timeline-posts .container, .main .container {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
padding-bottom: 1.5em;
|
||||
margin-bottom: 1.5em;
|
||||
background-color: transparent;
|
||||
border-radius: 0;
|
||||
border: none;
|
||||
border-bottom: 1px solid #BBB;
|
||||
}
|
||||
.timeline-posts .message, .main .message {
|
||||
min-height: 96px;
|
||||
margin-bottom: 1em;
|
||||
color: #000;
|
||||
text-align: justify;
|
||||
text-justify: inter-word;
|
||||
}
|
||||
.timeline-posts .message b, .main .message b {
|
||||
display: block;
|
||||
margin-bottom: 0.3em;
|
||||
font-size: var(--h3-size);
|
||||
}
|
||||
.timeline-posts .message img, .main .message img {
|
||||
width: 128px;
|
||||
height: 96px;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
margin-right: 1em;
|
||||
border-radius: 0;
|
||||
}
|
||||
.timeline-posts .message br, .main .message br {
|
||||
display: block;
|
||||
}
|
||||
.timeline-posts .message p:last-of-type, .main .message p:last-of-type {
|
||||
clear: both;
|
||||
padding-top: 0.5em;
|
||||
}
|
||||
/*.timeline-posts .message p:last-of-type br, .main .message p:last-of-type br {
|
||||
display: none;
|
||||
}*/
|
||||
.links br, .newswire br, .timeline-posts br, .col-left-mobile br, .col-right-mobile br, .main br {
|
||||
display: none;
|
||||
}
|
||||
.post-title {
|
||||
display: none; /* TODO: Confirm this */
|
||||
}
|
||||
.timeline-avatar {
|
||||
display: none; /* TODO: Confirm this */
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.timeline-avatar a {
|
||||
display: block;
|
||||
}
|
||||
.timeline-avatar img {
|
||||
width: 64px;
|
||||
height: 64px;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
margin-right: 1em;
|
||||
transform: none;
|
||||
object-fit: contain;
|
||||
}
|
||||
.containericons {
|
||||
display: flex;
|
||||
justify-content: end;
|
||||
align-items: end;
|
||||
margin: 0;
|
||||
margin-left: auto;
|
||||
}
|
||||
.containericons img {
|
||||
padding: 0;
|
||||
width: 32px;
|
||||
}
|
||||
.containericons .imageAnchor {
|
||||
order: 1;
|
||||
}
|
||||
.containericons .time-right, .container .time-right {
|
||||
order: 0;
|
||||
margin: 0;
|
||||
margin-left: auto;
|
||||
font-size: var(--font-size-base); /* TODO: This kind of fix should be avoided with better CSS */
|
||||
float: none;
|
||||
}
|
||||
|
||||
.leftColIcons {
|
||||
float: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.leftColIcons, .columnIcons {
|
||||
padding-bottom: 0.5em;
|
||||
margin-bottom: 1em;
|
||||
border-bottom: 1px solid #BBB;
|
||||
}
|
||||
|
||||
.pageicon {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.frontPageMobileButtons {
|
||||
display: none;
|
||||
}
|
||||
|
||||
input[type="submit"] {
|
||||
margin: 0;
|
||||
margin-right: 0.5em;
|
||||
width: auto;
|
||||
}
|
||||
input[type="submit"]:last-of-type {
|
||||
margin-right: 0;
|
||||
}
|
||||
|
||||
form .vertical-center .container, form .vertical-center .container br {
|
||||
display: block;
|
||||
}
|
After Width: | Height: | Size: 868 KiB |
After Width: | Height: | Size: 948 KiB |
After Width: | Height: | Size: 636 KiB |
After Width: | Height: | Size: 753 KiB |
|
@ -40,7 +40,10 @@
|
|||
"link-bg-color": "black",
|
||||
"main-link-color": "#ff9900",
|
||||
"main-link-color-hover": "#d09338",
|
||||
"options-main-link-color": "#ff9900",
|
||||
"options-main-link-color-hover": "#d09338",
|
||||
"main-visited-color": "#ffb900",
|
||||
"options-main-visited-color": "#ffb900",
|
||||
"main-fg-color": "white",
|
||||
"login-fg-color": "white",
|
||||
"options-fg-color": "white",
|
||||
|
|
Before Width: | Height: | Size: 315 KiB After Width: | Height: | Size: 5.9 KiB |
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 5.0 KiB |
Before Width: | Height: | Size: 315 KiB After Width: | Height: | Size: 5.9 KiB |
|
@ -105,8 +105,11 @@
|
|||
"border-color": "#c0cdd9",
|
||||
"main-link-color": "#25408f",
|
||||
"main-link-color-hover": "#10408f",
|
||||
"options-main-link-color": "#25408f",
|
||||
"options-main-link-color-hover": "#10408f",
|
||||
"title-color": "#2a2c37",
|
||||
"main-visited-color": "#25408f",
|
||||
"options-main-visited-color": "#25408f",
|
||||
"text-entry-foreground": "#111",
|
||||
"text-entry-background": "white",
|
||||
"font-color-header": "black",
|
||||
|
|
|
@ -30,8 +30,11 @@
|
|||
"border-width-header": "5px",
|
||||
"main-link-color": "#9fb42b",
|
||||
"main-link-color-hover": "#cfb42b",
|
||||
"options-main-link-color": "#9fb42b",
|
||||
"options-main-link-color-hover": "#cfb42b",
|
||||
"title-color": "#9fb42b",
|
||||
"main-visited-color": "#9fb42b",
|
||||
"options-main-visited-color": "#9fb42b",
|
||||
"button-selected": "black",
|
||||
"button-highlighted": "green",
|
||||
"button-background-hover": "#a3390d",
|
||||
|
|
|
@ -39,8 +39,11 @@
|
|||
"border-color": "#c0cdd9",
|
||||
"main-link-color": "#2a2c37",
|
||||
"main-link-color-hover": "#aa2c37",
|
||||
"options-main-link-color": "#2a2c37",
|
||||
"options-main-link-color-hover": "#aa2c37",
|
||||
"title-color": "#2a2c37",
|
||||
"main-visited-color": "#232c37",
|
||||
"options-main-visited-color": "#232c37",
|
||||
"text-entry-foreground": "#111",
|
||||
"text-entry-background": "white",
|
||||
"font-color-header": "black",
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
"rss-icon-at-top": "True",
|
||||
"publish-button-at-top": "False",
|
||||
"main-visited-color": "#0481f5",
|
||||
"options-main-visited-color": "#0481f5",
|
||||
"post-separator-margin-top": "9%",
|
||||
"post-separator-margin-bottom": "9%",
|
||||
"post-separator-width": "80%",
|
||||
|
@ -30,6 +31,8 @@
|
|||
"link-bg-color": "#0f0d10",
|
||||
"main-link-color": "#6481f5",
|
||||
"main-link-color-hover": "#d09338",
|
||||
"options-main-link-color": "#6481f5",
|
||||
"options-main-link-color-hover": "#d09338",
|
||||
"main-fg-color": "#0481f5",
|
||||
"login-fg-color": "#0481f5",
|
||||
"options-fg-color": "#0481f5",
|
||||
|
|
|
@ -30,8 +30,11 @@
|
|||
"border-color": "#3f2145",
|
||||
"main-link-color": "#ff42a0",
|
||||
"main-link-color-hover": "white",
|
||||
"options-main-link-color": "#ff42a0",
|
||||
"options-main-link-color-hover": "white",
|
||||
"title-color": "white",
|
||||
"main-visited-color": "#f93bb0",
|
||||
"options-main-visited-color": "#f93bb0",
|
||||
"button-selected": "#c042a0",
|
||||
"button-background-hover": "#af42a0",
|
||||
"button-text-hover": "#f98bb0",
|
||||
|
|
After Width: | Height: | Size: 1.2 KiB |
|
@ -1,4 +1,6 @@
|
|||
{
|
||||
"post-separator-margin-top": "10px",
|
||||
"post-separator-margin-bottom": "10px",
|
||||
"calendar-header-font-style": "normal",
|
||||
"italic-font-style": "normal",
|
||||
"calendar-header-font": "'Orbitron'",
|
||||
|
@ -21,8 +23,7 @@
|
|||
"rss-icon-at-top": "True",
|
||||
"publish-button-at-top": "False",
|
||||
"main-visited-color": "#46eed5",
|
||||
"post-separator-margin-top": "9%",
|
||||
"post-separator-margin-bottom": "9%",
|
||||
"options-main-visited-color": "#46eed5",
|
||||
"post-separator-width": "80%",
|
||||
"post-separator-height": "10%",
|
||||
"column-left-header-background": "#6800e7",
|
||||
|
@ -51,6 +52,8 @@
|
|||
"link-bg-color": "#0f0d10",
|
||||
"main-link-color": "#05b9ec",
|
||||
"main-link-color-hover": "#46eed5",
|
||||
"options-main-link-color": "#05b9ec",
|
||||
"options-main-link-color-hover": "#46eed5",
|
||||
"main-fg-color": "white",
|
||||
"login-fg-color": "white",
|
||||
"options-fg-color": "white",
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
Banner theme is Emma Goldman speaking in Paterson NJ to members of the IWW. 16th Dec 1913.
|
||||
Font, post separator and globe image is from Mother Earth magazine.
|
|
@ -47,8 +47,11 @@
|
|||
"border-color": "#c0cdd9",
|
||||
"main-link-color": "#2a2c37",
|
||||
"main-link-color-hover": "#aa2c37",
|
||||
"options-main-link-color": "#2a2c37",
|
||||
"options-main-link-color-hover": "#aa2c37",
|
||||
"title-color": "#2a2c37",
|
||||
"main-visited-color": "#232c37",
|
||||
"options-main-visited-color": "#232c37",
|
||||
"text-entry-foreground": "#111",
|
||||
"text-entry-background": "white",
|
||||
"font-color-header": "black",
|
||||
|
|
|
@ -28,8 +28,11 @@
|
|||
"link-bg-color": "#0f0d10",
|
||||
"main-link-color": "#ffc4bc",
|
||||
"main-link-color-hover": "white",
|
||||
"options-main-link-color": "#ffc4bc",
|
||||
"options-main-link-color-hover": "white",
|
||||
"title-color": "#ffc4bc",
|
||||
"main-visited-color": "#e1c4bc",
|
||||
"options-main-visited-color": "#e1c4bc",
|
||||
"main-fg-color": "#ffc4bc",
|
||||
"login-fg-color": "#ffc4bc",
|
||||
"options-fg-color": "#ffc4bc",
|
||||
|
|
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 198 B |
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 4.6 KiB After Width: | Height: | Size: 1.4 KiB |