2020-04-04 11:00:19 +00:00
|
|
|
__filename__ = "session.py"
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
__license__ = "AGPL3+"
|
2023-01-21 23:03:30 +00:00
|
|
|
__version__ = "1.4.0"
|
2020-04-04 11:00:19 +00:00
|
|
|
__maintainer__ = "Bob Mottram"
|
2021-09-10 16:14:50 +00:00
|
|
|
__email__ = "bob@libreserver.org"
|
2020-04-04 11:00:19 +00:00
|
|
|
__status__ = "Production"
|
2021-09-17 15:12:50 +00:00
|
|
|
__module_group__ = "Session"
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2019-07-16 14:23:06 +00:00
|
|
|
import os
|
2019-06-28 18:55:29 +00:00
|
|
|
import requests
|
2022-11-09 12:10:29 +00:00
|
|
|
from utils import text_in_file
|
|
|
|
from utils import acct_dir
|
2021-12-27 20:47:05 +00:00
|
|
|
from utils import url_permitted
|
2021-12-27 15:58:46 +00:00
|
|
|
from utils import is_image_file
|
2021-12-29 21:55:09 +00:00
|
|
|
from httpsig import create_signed_header
|
2019-06-28 18:55:29 +00:00
|
|
|
import json
|
2020-06-23 13:28:41 +00:00
|
|
|
from socket import error as SocketError
|
|
|
|
import errno
|
2021-06-18 11:53:25 +00:00
|
|
|
from http.client import HTTPConnection
|
2019-06-28 18:55:29 +00:00
|
|
|
|
2019-06-30 18:23:18 +00:00
|
|
|
|
2021-12-28 16:56:57 +00:00
|
|
|
def create_session(proxy_type: str):
|
2023-01-09 10:00:26 +00:00
|
|
|
""" Creates a new session
|
|
|
|
"""
|
2020-06-08 17:10:53 +00:00
|
|
|
session = None
|
|
|
|
try:
|
2020-06-23 22:17:48 +00:00
|
|
|
session = requests.session()
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: requests error during create_session ' + str(ex))
|
2020-06-23 13:41:12 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
|
|
|
if ex.errno == errno.ECONNRESET:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: connection was reset during create_session ' +
|
2021-12-28 16:56:57 +00:00
|
|
|
str(ex))
|
2020-06-23 13:28:41 +00:00
|
|
|
else:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: socket error during create_session ' + str(ex))
|
2020-06-08 17:10:53 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: error during create_session ' + str(ex))
|
2020-06-23 21:39:19 +00:00
|
|
|
return None
|
2020-06-09 11:03:59 +00:00
|
|
|
if not session:
|
|
|
|
return None
|
2021-12-25 21:09:22 +00:00
|
|
|
if proxy_type == 'tor':
|
2020-04-04 11:00:19 +00:00
|
|
|
session.proxies = {}
|
|
|
|
session.proxies['http'] = 'socks5h://localhost:9050'
|
|
|
|
session.proxies['https'] = 'socks5h://localhost:9050'
|
2021-12-25 21:09:22 +00:00
|
|
|
elif proxy_type == 'i2p':
|
2020-06-09 11:03:59 +00:00
|
|
|
session.proxies = {}
|
2020-06-19 22:09:20 +00:00
|
|
|
session.proxies['http'] = 'socks5h://localhost:4447'
|
|
|
|
session.proxies['https'] = 'socks5h://localhost:4447'
|
2021-12-25 21:09:22 +00:00
|
|
|
elif proxy_type == 'gnunet':
|
2020-06-09 11:51:51 +00:00
|
|
|
session.proxies = {}
|
|
|
|
session.proxies['http'] = 'socks5h://localhost:7777'
|
|
|
|
session.proxies['https'] = 'socks5h://localhost:7777'
|
2022-05-30 21:41:18 +00:00
|
|
|
elif proxy_type in ('ipfs', 'ipns'):
|
2022-04-29 13:54:13 +00:00
|
|
|
session.proxies = {}
|
|
|
|
session.proxies['ipfs'] = 'socks5h://localhost:4001'
|
2021-12-25 21:09:22 +00:00
|
|
|
# print('New session created with proxy ' + str(proxy_type))
|
2019-06-28 18:55:29 +00:00
|
|
|
return session
|
|
|
|
|
2020-04-04 11:00:19 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
def url_exists(session, url: str, timeout_sec: int = 3,
|
2021-12-29 21:55:09 +00:00
|
|
|
http_prefix: str = 'https', domain: str = 'testdomain') -> bool:
|
2022-05-26 21:58:22 +00:00
|
|
|
"""Is the given url resolvable?
|
|
|
|
"""
|
2021-02-14 13:43:05 +00:00
|
|
|
if not isinstance(url, str):
|
|
|
|
print('url: ' + str(url))
|
2021-12-29 21:55:09 +00:00
|
|
|
print('ERROR: url_exists failed, url should be a string')
|
2021-02-14 13:43:05 +00:00
|
|
|
return False
|
2021-12-31 10:13:21 +00:00
|
|
|
session_params = {}
|
|
|
|
session_headers = {}
|
|
|
|
session_headers['User-Agent'] = 'Epicyon/' + __version__
|
2021-02-14 13:43:05 +00:00
|
|
|
if domain:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers['User-Agent'] += \
|
2021-12-25 17:09:22 +00:00
|
|
|
'; +' + http_prefix + '://' + domain + '/'
|
2021-02-14 13:43:05 +00:00
|
|
|
if not session:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: url_exists failed, no session specified')
|
2021-02-14 13:43:05 +00:00
|
|
|
return True
|
|
|
|
try:
|
2021-12-31 10:13:21 +00:00
|
|
|
result = session.get(url, headers=session_headers,
|
|
|
|
params=session_params,
|
2022-04-24 20:33:07 +00:00
|
|
|
timeout=timeout_sec,
|
|
|
|
allow_redirects=False)
|
2021-02-14 15:22:03 +00:00
|
|
|
if result:
|
2022-05-30 21:41:18 +00:00
|
|
|
if result.status_code in (200, 304):
|
2021-02-14 13:43:05 +00:00
|
|
|
return True
|
2021-12-31 10:13:21 +00:00
|
|
|
print('url_exists for ' + url + ' returned ' +
|
|
|
|
str(result.status_code))
|
2021-02-14 13:43:05 +00:00
|
|
|
except BaseException:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: url_exists GET failed ' + str(url))
|
2021-02-14 13:43:05 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
def _get_json_request(session, url: str, domain_full: str, session_headers: {},
|
|
|
|
session_params: {}, timeout_sec: int,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem: str, quiet: bool, debug: bool,
|
2021-12-31 10:13:21 +00:00
|
|
|
return_json: bool) -> {}:
|
2021-08-31 09:10:49 +00:00
|
|
|
"""http GET for json
|
2021-08-30 22:21:14 +00:00
|
|
|
"""
|
2019-07-04 17:31:41 +00:00
|
|
|
try:
|
2021-12-31 10:13:21 +00:00
|
|
|
result = session.get(url, headers=session_headers,
|
2022-04-24 20:33:07 +00:00
|
|
|
params=session_params, timeout=timeout_sec,
|
|
|
|
allow_redirects=False)
|
2021-06-18 11:53:25 +00:00
|
|
|
if result.status_code != 200:
|
|
|
|
if result.status_code == 401:
|
2021-12-29 21:55:09 +00:00
|
|
|
print("WARN: get_json " + url + ' rejected by secure mode')
|
2021-06-18 11:53:25 +00:00
|
|
|
elif result.status_code == 403:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: get_json Forbidden url: ' + url)
|
2021-06-18 11:53:25 +00:00
|
|
|
elif result.status_code == 404:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: get_json Not Found url: ' + url)
|
2021-09-07 17:05:26 +00:00
|
|
|
elif result.status_code == 410:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: get_json no longer available url: ' + url)
|
2022-12-30 17:51:34 +00:00
|
|
|
elif result.status_code == 303:
|
|
|
|
print('WARN: get_json redirect not permitted: ' + url)
|
2021-06-18 11:54:49 +00:00
|
|
|
else:
|
2022-02-24 12:06:26 +00:00
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: get_json url: ' + url +
|
2021-06-18 11:53:25 +00:00
|
|
|
' failed with error code ' +
|
2021-08-02 20:43:53 +00:00
|
|
|
str(result.status_code) +
|
2022-02-24 12:06:26 +00:00
|
|
|
' headers: ' + str(session_headers2))
|
2021-12-31 10:13:21 +00:00
|
|
|
if return_json:
|
2021-12-23 20:59:36 +00:00
|
|
|
return result.json()
|
|
|
|
return result.content
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
2021-03-14 20:55:37 +00:00
|
|
|
if debug and not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: get_json failed, url: ' + str(url) + ', ' +
|
2021-12-31 10:13:21 +00:00
|
|
|
'headers: ' + str(session_headers2) + ', ' +
|
|
|
|
'params: ' + str(session_params) + ', ' + str(ex))
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
2021-03-14 20:55:37 +00:00
|
|
|
if debug and not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: get_json failed, url: ' + str(url) + ', ' +
|
2021-12-31 10:13:21 +00:00
|
|
|
'headers: ' + str(session_headers2) + ', ' +
|
|
|
|
'params: ' + str(session_params) + ', ' + str(ex))
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
2021-03-10 15:47:12 +00:00
|
|
|
if not quiet:
|
2021-12-25 15:28:52 +00:00
|
|
|
if ex.errno == errno.ECONNRESET:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: get_json failed, ' +
|
2021-12-29 21:55:09 +00:00
|
|
|
'connection was reset during get_json ' + str(ex))
|
2019-07-04 17:31:41 +00:00
|
|
|
return None
|
2019-06-28 19:36:39 +00:00
|
|
|
|
2020-04-04 11:00:19 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
def _get_json_signed(session, url: str, domain_full: str, session_headers: {},
|
|
|
|
session_params: {}, timeout_sec: int,
|
2021-12-29 21:55:09 +00:00
|
|
|
signing_priv_key_pem: str, quiet: bool,
|
|
|
|
debug: bool) -> {}:
|
2021-08-31 20:20:58 +00:00
|
|
|
"""Authorized fetch - a signed version of GET
|
2021-08-31 09:10:49 +00:00
|
|
|
"""
|
2021-12-26 10:00:46 +00:00
|
|
|
if not domain_full:
|
2021-08-31 09:10:49 +00:00
|
|
|
if debug:
|
|
|
|
print('No sending domain for signed GET')
|
|
|
|
return None
|
|
|
|
if '://' not in url:
|
2021-09-20 16:51:53 +00:00
|
|
|
print('Invalid url: ' + url)
|
2021-08-31 09:10:49 +00:00
|
|
|
return None
|
2021-12-25 17:09:22 +00:00
|
|
|
http_prefix = url.split('://')[0]
|
2021-12-31 10:13:21 +00:00
|
|
|
to_domain_full = url.split('://')[1]
|
|
|
|
if '/' in to_domain_full:
|
|
|
|
to_domain_full = to_domain_full.split('/')[0]
|
2021-08-31 09:10:49 +00:00
|
|
|
|
2021-12-26 10:00:46 +00:00
|
|
|
if ':' in domain_full:
|
|
|
|
domain = domain_full.split(':')[0]
|
|
|
|
port = domain_full.split(':')[1]
|
2021-08-31 09:10:49 +00:00
|
|
|
else:
|
2021-12-26 10:00:46 +00:00
|
|
|
domain = domain_full
|
2021-12-25 17:09:22 +00:00
|
|
|
if http_prefix == 'https':
|
2021-08-31 09:10:49 +00:00
|
|
|
port = 443
|
|
|
|
else:
|
|
|
|
port = 80
|
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
if ':' in to_domain_full:
|
|
|
|
to_domain = to_domain_full.split(':')[0]
|
|
|
|
to_port = to_domain_full.split(':')[1]
|
2021-08-31 09:10:49 +00:00
|
|
|
else:
|
2021-12-31 10:13:21 +00:00
|
|
|
to_domain = to_domain_full
|
2021-12-25 17:09:22 +00:00
|
|
|
if http_prefix == 'https':
|
2021-12-31 10:13:21 +00:00
|
|
|
to_port = 443
|
2021-08-31 09:10:49 +00:00
|
|
|
else:
|
2021-12-31 10:13:21 +00:00
|
|
|
to_port = 80
|
2021-08-31 09:10:49 +00:00
|
|
|
|
2021-09-08 11:25:11 +00:00
|
|
|
if debug:
|
|
|
|
print('Signed GET domain: ' + domain + ' ' + str(port))
|
2021-12-31 10:13:21 +00:00
|
|
|
print('Signed GET to_domain: ' + to_domain + ' ' + str(to_port))
|
2021-09-08 11:25:11 +00:00
|
|
|
print('Signed GET url: ' + url)
|
2021-12-25 17:09:22 +00:00
|
|
|
print('Signed GET http_prefix: ' + http_prefix)
|
2021-12-31 10:13:21 +00:00
|
|
|
message_str = ''
|
|
|
|
with_digest = False
|
|
|
|
if to_domain_full + '/' in url:
|
|
|
|
path = '/' + url.split(to_domain_full + '/')[1]
|
2021-09-01 18:46:28 +00:00
|
|
|
else:
|
|
|
|
path = '/actor'
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'application/activity+json'
|
2021-12-31 10:13:21 +00:00
|
|
|
if session_headers.get('Accept'):
|
|
|
|
content_type = session_headers['Accept']
|
|
|
|
signature_header_json = \
|
2021-12-29 21:55:09 +00:00
|
|
|
create_signed_header(None, signing_priv_key_pem, 'actor', domain, port,
|
2021-12-31 10:13:21 +00:00
|
|
|
to_domain, to_port, path, http_prefix,
|
|
|
|
with_digest, message_str, content_type)
|
2021-09-08 11:25:11 +00:00
|
|
|
if debug:
|
2021-12-31 10:13:21 +00:00
|
|
|
print('Signed GET signature_header_json ' + str(signature_header_json))
|
2021-09-14 21:48:34 +00:00
|
|
|
# update the session headers from the signature headers
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers['Host'] = signature_header_json['host']
|
|
|
|
session_headers['Date'] = signature_header_json['date']
|
|
|
|
session_headers['Accept'] = signature_header_json['accept']
|
|
|
|
session_headers['Signature'] = signature_header_json['signature']
|
|
|
|
session_headers['Content-Length'] = '0'
|
2021-11-09 20:11:56 +00:00
|
|
|
if debug:
|
2021-12-31 10:13:21 +00:00
|
|
|
print('Signed GET session_headers ' + str(session_headers))
|
2021-08-31 09:10:49 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
return_json = True
|
2021-12-26 15:32:00 +00:00
|
|
|
if 'json' not in content_type:
|
2021-12-31 10:13:21 +00:00
|
|
|
return_json = False
|
|
|
|
return _get_json_request(session, url, domain_full, session_headers,
|
|
|
|
session_params, timeout_sec, None, quiet,
|
|
|
|
debug, return_json)
|
2021-08-31 09:10:49 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def get_json(signing_priv_key_pem: str,
|
|
|
|
session, url: str, headers: {}, params: {}, debug: bool,
|
2022-02-24 13:10:34 +00:00
|
|
|
version: str = __version__, http_prefix: str = 'https',
|
2021-12-29 21:55:09 +00:00
|
|
|
domain: str = 'testdomain',
|
2021-12-31 10:13:21 +00:00
|
|
|
timeout_sec: int = 20, quiet: bool = False) -> {}:
|
2023-01-09 10:00:26 +00:00
|
|
|
"""Download some json
|
|
|
|
"""
|
2021-08-31 09:10:49 +00:00
|
|
|
if not isinstance(url, str):
|
|
|
|
if debug and not quiet:
|
|
|
|
print('url: ' + str(url))
|
2021-12-29 21:55:09 +00:00
|
|
|
print('ERROR: get_json failed, url should be a string')
|
2021-08-31 09:10:49 +00:00
|
|
|
return None
|
2021-12-31 10:13:21 +00:00
|
|
|
session_params = {}
|
|
|
|
session_headers = {}
|
2021-08-31 09:10:49 +00:00
|
|
|
if headers:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers = headers
|
2021-08-31 09:10:49 +00:00
|
|
|
if params:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_params = params
|
|
|
|
session_headers['User-Agent'] = 'Epicyon/' + version
|
2021-08-31 09:10:49 +00:00
|
|
|
if domain:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers['User-Agent'] += \
|
2021-12-25 17:09:22 +00:00
|
|
|
'; +' + http_prefix + '://' + domain + '/'
|
2021-08-31 09:10:49 +00:00
|
|
|
if not session:
|
|
|
|
if not quiet:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: get_json failed, no session specified for get_json')
|
2021-08-31 09:10:49 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
HTTPConnection.debuglevel = 1
|
|
|
|
|
2021-12-25 23:03:28 +00:00
|
|
|
if signing_priv_key_pem:
|
2021-12-29 21:55:09 +00:00
|
|
|
return _get_json_signed(session, url, domain,
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers, session_params,
|
|
|
|
timeout_sec, signing_priv_key_pem,
|
2021-12-29 21:55:09 +00:00
|
|
|
quiet, debug)
|
2021-12-31 10:13:21 +00:00
|
|
|
return _get_json_request(session, url, domain, session_headers,
|
|
|
|
session_params, timeout_sec,
|
|
|
|
None, quiet, debug, True)
|
2021-12-23 20:59:36 +00:00
|
|
|
|
|
|
|
|
2022-02-16 11:10:44 +00:00
|
|
|
def get_vcard(xml_format: bool,
|
|
|
|
session, url: str, params: {}, debug: bool,
|
2022-05-26 22:14:25 +00:00
|
|
|
version: str, http_prefix: str, domain: str,
|
2022-02-15 15:01:18 +00:00
|
|
|
timeout_sec: int = 20, quiet: bool = False) -> {}:
|
2023-01-09 10:00:26 +00:00
|
|
|
"""Download a vcard
|
|
|
|
"""
|
2022-02-15 15:01:18 +00:00
|
|
|
if not isinstance(url, str):
|
|
|
|
if debug and not quiet:
|
|
|
|
print('url: ' + str(url))
|
|
|
|
print('ERROR: get_vcard failed, url should be a string')
|
|
|
|
return None
|
|
|
|
headers = {
|
|
|
|
'Accept': 'text/vcard'
|
|
|
|
}
|
2022-02-16 11:10:44 +00:00
|
|
|
if xml_format:
|
2022-02-16 12:44:26 +00:00
|
|
|
headers['Accept'] = 'application/vcard+xml'
|
2022-02-15 15:01:18 +00:00
|
|
|
session_params = {}
|
|
|
|
session_headers = {}
|
|
|
|
if headers:
|
|
|
|
session_headers = headers
|
|
|
|
if params:
|
|
|
|
session_params = params
|
|
|
|
session_headers['User-Agent'] = 'Epicyon/' + version
|
|
|
|
if domain:
|
|
|
|
session_headers['User-Agent'] += \
|
|
|
|
'; +' + http_prefix + '://' + domain + '/'
|
|
|
|
if not session:
|
|
|
|
if not quiet:
|
|
|
|
print('WARN: get_vcard failed, no session specified for get_vcard')
|
|
|
|
return None
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
HTTPConnection.debuglevel = 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = session.get(url, headers=session_headers,
|
2022-04-24 20:33:07 +00:00
|
|
|
params=session_params, timeout=timeout_sec,
|
|
|
|
allow_redirects=False)
|
2022-02-15 15:01:18 +00:00
|
|
|
if result.status_code != 200:
|
|
|
|
if result.status_code == 401:
|
|
|
|
print("WARN: get_vcard " + url + ' rejected by secure mode')
|
|
|
|
elif result.status_code == 403:
|
|
|
|
print('WARN: get_vcard Forbidden url: ' + url)
|
|
|
|
elif result.status_code == 404:
|
|
|
|
print('WARN: get_vcard Not Found url: ' + url)
|
|
|
|
elif result.status_code == 410:
|
|
|
|
print('WARN: get_vcard no longer available url: ' + url)
|
|
|
|
else:
|
2022-02-24 12:06:26 +00:00
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
2022-02-15 15:01:18 +00:00
|
|
|
print('WARN: get_vcard url: ' + url +
|
|
|
|
' failed with error code ' +
|
|
|
|
str(result.status_code) +
|
2022-02-24 12:06:26 +00:00
|
|
|
' headers: ' + str(session_headers2))
|
2022-02-15 16:03:10 +00:00
|
|
|
return result.content.decode('utf-8')
|
2022-02-15 15:01:18 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
|
|
|
if debug and not quiet:
|
|
|
|
print('EX: get_vcard failed, url: ' + str(url) + ', ' +
|
|
|
|
'headers: ' + str(session_headers2) + ', ' +
|
|
|
|
'params: ' + str(session_params) + ', ' + str(ex))
|
|
|
|
except ValueError as ex:
|
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
|
|
|
if debug and not quiet:
|
|
|
|
print('EX: get_vcard failed, url: ' + str(url) + ', ' +
|
|
|
|
'headers: ' + str(session_headers2) + ', ' +
|
|
|
|
'params: ' + str(session_params) + ', ' + str(ex))
|
|
|
|
except SocketError as ex:
|
|
|
|
if not quiet:
|
|
|
|
if ex.errno == errno.ECONNRESET:
|
|
|
|
print('EX: get_vcard failed, ' +
|
|
|
|
'connection was reset during get_vcard ' + str(ex))
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def download_html(signing_priv_key_pem: str,
|
|
|
|
session, url: str, headers: {}, params: {}, debug: bool,
|
2022-05-26 22:14:25 +00:00
|
|
|
version: str, http_prefix: str, domain: str,
|
2021-12-31 10:13:21 +00:00
|
|
|
timeout_sec: int = 20, quiet: bool = False) -> {}:
|
2023-01-09 10:00:26 +00:00
|
|
|
"""Download a html document
|
|
|
|
"""
|
2021-12-23 20:59:36 +00:00
|
|
|
if not isinstance(url, str):
|
|
|
|
if debug and not quiet:
|
|
|
|
print('url: ' + str(url))
|
2021-12-29 21:55:09 +00:00
|
|
|
print('ERROR: download_html failed, url should be a string')
|
2021-12-23 20:59:36 +00:00
|
|
|
return None
|
2021-12-31 10:13:21 +00:00
|
|
|
session_params = {}
|
|
|
|
session_headers = {}
|
2021-12-23 20:59:36 +00:00
|
|
|
if headers:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers = headers
|
2021-12-23 20:59:36 +00:00
|
|
|
if params:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_params = params
|
|
|
|
session_headers['Accept'] = 'text/html'
|
|
|
|
session_headers['User-Agent'] = 'Epicyon/' + version
|
2021-12-23 20:59:36 +00:00
|
|
|
if domain:
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers['User-Agent'] += \
|
2021-12-25 17:09:22 +00:00
|
|
|
'; +' + http_prefix + '://' + domain + '/'
|
2021-12-23 20:59:36 +00:00
|
|
|
if not session:
|
|
|
|
if not quiet:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: download_html failed, ' +
|
|
|
|
'no session specified for download_html')
|
2021-12-23 20:59:36 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
if debug:
|
2022-05-12 12:39:07 +00:00
|
|
|
HTTPConnection.debuglevel = 1
|
|
|
|
|
|
|
|
if signing_priv_key_pem:
|
|
|
|
return _get_json_signed(session, url, domain,
|
|
|
|
session_headers, session_params,
|
|
|
|
timeout_sec, signing_priv_key_pem,
|
|
|
|
quiet, debug)
|
|
|
|
return _get_json_request(session, url, domain, session_headers,
|
|
|
|
session_params, timeout_sec,
|
|
|
|
None, quiet, debug, False)
|
|
|
|
|
|
|
|
|
2022-11-09 10:40:11 +00:00
|
|
|
def verify_html(session, url: str, debug: bool,
|
|
|
|
version: str, http_prefix: str, nickname: str, domain: str,
|
|
|
|
timeout_sec: int = 20, quiet: bool = False) -> bool:
|
|
|
|
"""Verify that the handle for nickname@domain exists within the
|
|
|
|
given url
|
|
|
|
"""
|
2022-11-09 12:10:29 +00:00
|
|
|
if not url_exists(session, url, 3, http_prefix, domain):
|
|
|
|
return False
|
|
|
|
|
2022-11-09 10:40:11 +00:00
|
|
|
as_header = {
|
|
|
|
'Accept': 'text/html'
|
|
|
|
}
|
|
|
|
verification_site_html = \
|
|
|
|
download_html(None, session, url,
|
|
|
|
as_header, None, debug, __version__,
|
|
|
|
http_prefix, domain, timeout_sec, quiet)
|
|
|
|
if not verification_site_html:
|
|
|
|
if debug:
|
|
|
|
print('Verification site could not be contacted ' +
|
|
|
|
url)
|
|
|
|
return False
|
|
|
|
verification_site_html = verification_site_html.decode()
|
2022-11-10 09:46:48 +00:00
|
|
|
|
|
|
|
# does the site contain rel="me" links?
|
|
|
|
if ' rel="me" ' not in verification_site_html:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# ensure that there are not too many rel="me" links
|
|
|
|
sections = verification_site_html.split(' rel="me" ')
|
|
|
|
me_links_count = len(sections) - 1
|
|
|
|
if me_links_count > 5:
|
|
|
|
return False
|
|
|
|
|
2022-11-09 10:40:11 +00:00
|
|
|
actor_links = [
|
|
|
|
domain + '/@' + nickname,
|
|
|
|
domain + '/users/' + nickname
|
|
|
|
]
|
|
|
|
for actor in actor_links:
|
|
|
|
if domain.endswith('.onion') or domain.endswith('.i2p'):
|
|
|
|
actor = 'http://' + actor
|
|
|
|
else:
|
|
|
|
actor = http_prefix + '://' + actor
|
2022-11-09 14:16:47 +00:00
|
|
|
|
|
|
|
# double quotes
|
2022-11-09 10:40:11 +00:00
|
|
|
link_str = ' rel="me" href="' + actor + '"'
|
|
|
|
if link_str in verification_site_html:
|
|
|
|
return True
|
|
|
|
link_str = ' href="' + actor + '" rel="me"'
|
|
|
|
if link_str in verification_site_html:
|
|
|
|
return True
|
2022-11-09 14:16:47 +00:00
|
|
|
|
|
|
|
# single quotes
|
|
|
|
link_str = " rel=\"me\" href='" + actor + "'"
|
|
|
|
if link_str in verification_site_html:
|
|
|
|
return True
|
|
|
|
link_str = " href='" + actor + "' rel=\"me\""
|
|
|
|
if link_str in verification_site_html:
|
|
|
|
return True
|
2022-11-09 10:40:11 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2022-11-09 12:10:29 +00:00
|
|
|
def site_is_verified(session, base_dir: str, http_prefix: str,
|
|
|
|
nickname: str, domain: str,
|
|
|
|
url: str, update: bool, debug: bool) -> bool:
|
|
|
|
"""Is the given website verified?
|
|
|
|
"""
|
|
|
|
verified_sites_filename = \
|
|
|
|
acct_dir(base_dir, nickname, domain) + '/verified_sites.txt'
|
|
|
|
verified_file_exists = False
|
|
|
|
if os.path.isfile(verified_sites_filename):
|
|
|
|
verified_file_exists = True
|
2022-11-09 12:22:13 +00:00
|
|
|
if text_in_file(url + '\n', verified_sites_filename, True):
|
2022-11-09 12:10:29 +00:00
|
|
|
return True
|
|
|
|
if not update:
|
|
|
|
return False
|
|
|
|
|
|
|
|
verified = \
|
|
|
|
verify_html(session, url, debug,
|
|
|
|
__version__, http_prefix, nickname, domain)
|
|
|
|
if verified:
|
|
|
|
write_type = 'a+'
|
|
|
|
if not verified_file_exists:
|
|
|
|
write_type = 'w+'
|
|
|
|
try:
|
|
|
|
with open(verified_sites_filename, write_type,
|
|
|
|
encoding='utf-8') as fp_verified:
|
|
|
|
fp_verified.write(url + '\n')
|
|
|
|
except OSError:
|
|
|
|
print('EX: Verified sites could not be updated ' +
|
|
|
|
verified_sites_filename)
|
|
|
|
return verified
|
|
|
|
|
|
|
|
|
2022-05-12 12:39:07 +00:00
|
|
|
def download_ssml(signing_priv_key_pem: str,
|
|
|
|
session, url: str, headers: {}, params: {}, debug: bool,
|
2022-05-26 22:14:25 +00:00
|
|
|
version: str, http_prefix: str, domain: str,
|
2022-05-12 12:39:07 +00:00
|
|
|
timeout_sec: int = 20, quiet: bool = False) -> {}:
|
2023-01-09 10:00:26 +00:00
|
|
|
"""Download a ssml document
|
|
|
|
"""
|
2022-05-12 12:39:07 +00:00
|
|
|
if not isinstance(url, str):
|
|
|
|
if debug and not quiet:
|
|
|
|
print('url: ' + str(url))
|
|
|
|
print('ERROR: download_ssml failed, url should be a string')
|
|
|
|
return None
|
|
|
|
session_params = {}
|
|
|
|
session_headers = {}
|
|
|
|
if headers:
|
|
|
|
session_headers = headers
|
|
|
|
if params:
|
|
|
|
session_params = params
|
|
|
|
session_headers['Accept'] = 'application/ssml+xml'
|
|
|
|
session_headers['User-Agent'] = 'Epicyon/' + version
|
|
|
|
if domain:
|
|
|
|
session_headers['User-Agent'] += \
|
|
|
|
'; +' + http_prefix + '://' + domain + '/'
|
|
|
|
if not session:
|
|
|
|
if not quiet:
|
|
|
|
print('WARN: download_ssml failed, no session specified')
|
|
|
|
return None
|
|
|
|
|
|
|
|
if debug:
|
2021-12-23 20:59:36 +00:00
|
|
|
HTTPConnection.debuglevel = 1
|
|
|
|
|
2021-12-25 23:03:28 +00:00
|
|
|
if signing_priv_key_pem:
|
2021-12-29 21:55:09 +00:00
|
|
|
return _get_json_signed(session, url, domain,
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers, session_params,
|
|
|
|
timeout_sec, signing_priv_key_pem,
|
2021-12-29 21:55:09 +00:00
|
|
|
quiet, debug)
|
2021-12-31 10:13:21 +00:00
|
|
|
return _get_json_request(session, url, domain, session_headers,
|
|
|
|
session_params, timeout_sec,
|
|
|
|
None, quiet, debug, False)
|
2021-08-31 09:10:49 +00:00
|
|
|
|
|
|
|
|
2022-02-28 11:55:36 +00:00
|
|
|
def _set_user_agent(session, http_prefix: str, domain_full: str) -> None:
|
|
|
|
"""Sets the user agent
|
|
|
|
"""
|
|
|
|
ua_str = \
|
|
|
|
'Epicyon/' + __version__ + '; +' + \
|
|
|
|
http_prefix + '://' + domain_full + '/'
|
|
|
|
session.headers.update({'User-Agent': ua_str})
|
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def post_json(http_prefix: str, domain_full: str,
|
|
|
|
session, post_json_object: {}, federation_list: [],
|
2021-12-31 10:13:21 +00:00
|
|
|
inbox_url: str, headers: {}, timeout_sec: int = 60,
|
2021-12-29 21:55:09 +00:00
|
|
|
quiet: bool = False) -> str:
|
2019-06-28 19:36:39 +00:00
|
|
|
"""Post a json message to the inbox of another person
|
|
|
|
"""
|
2020-09-27 19:27:24 +00:00
|
|
|
# check that we are posting to a permitted domain
|
2021-12-31 10:13:21 +00:00
|
|
|
if not url_permitted(inbox_url, federation_list):
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2021-12-31 10:13:21 +00:00
|
|
|
print('post_json: ' + inbox_url + ' not permitted')
|
2020-09-27 19:27:24 +00:00
|
|
|
return None
|
2019-06-28 20:22:36 +00:00
|
|
|
|
2022-02-28 11:55:36 +00:00
|
|
|
_set_user_agent(session, http_prefix, domain_full)
|
2021-06-20 13:39:53 +00:00
|
|
|
|
2020-06-08 18:05:36 +00:00
|
|
|
try:
|
2021-12-31 10:13:21 +00:00
|
|
|
post_result = \
|
|
|
|
session.post(url=inbox_url,
|
2021-12-25 22:09:19 +00:00
|
|
|
data=json.dumps(post_json_object),
|
2022-02-28 11:55:36 +00:00
|
|
|
headers=headers, timeout=timeout_sec)
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.Timeout as ex:
|
2021-03-21 13:17:59 +00:00
|
|
|
if not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: post_json timeout ' + inbox_url + ' ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
json.dumps(post_json_object) + ' ' + str(headers))
|
2021-12-25 15:28:52 +00:00
|
|
|
print(ex)
|
2021-03-21 13:17:59 +00:00
|
|
|
return ''
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: post_json requests failed ' + inbox_url + ' ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
json.dumps(post_json_object) + ' ' + str(headers) +
|
2021-12-25 15:28:52 +00:00
|
|
|
' ' + str(ex))
|
2020-06-23 13:41:12 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
|
|
|
if not quiet and ex.errno == errno.ECONNRESET:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: connection was reset during post_json')
|
2020-06-08 18:05:36 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: post_json failed ' + inbox_url + ' ' +
|
2021-12-25 22:09:19 +00:00
|
|
|
json.dumps(post_json_object) + ' ' + str(headers) +
|
2021-12-25 15:28:52 +00:00
|
|
|
' ' + str(ex))
|
2020-06-23 21:39:19 +00:00
|
|
|
return None
|
2021-12-31 10:13:21 +00:00
|
|
|
if post_result:
|
|
|
|
return post_result.text
|
2019-10-26 12:01:22 +00:00
|
|
|
return None
|
2019-07-16 14:23:06 +00:00
|
|
|
|
2020-04-04 11:00:19 +00:00
|
|
|
|
2022-06-14 10:24:29 +00:00
|
|
|
def post_json_string(session, post_json_str: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
federation_list: [],
|
2021-12-31 10:13:21 +00:00
|
|
|
inbox_url: str,
|
2021-12-29 21:55:09 +00:00
|
|
|
headers: {},
|
|
|
|
debug: bool,
|
2022-02-28 11:55:36 +00:00
|
|
|
http_prefix: str, domain_full: str,
|
2021-12-31 10:13:21 +00:00
|
|
|
timeout_sec: int = 30,
|
2021-12-29 21:55:09 +00:00
|
|
|
quiet: bool = False) -> (bool, bool, int):
|
2019-08-17 10:15:01 +00:00
|
|
|
"""Post a json message string to the inbox of another person
|
2021-12-31 10:13:21 +00:00
|
|
|
The second boolean returned is true if the send if unauthorized
|
2019-08-17 10:15:01 +00:00
|
|
|
NOTE: Here we post a string rather than the original json so that
|
|
|
|
conversions between string and json format don't invalidate
|
|
|
|
the message body digest of http signatures
|
|
|
|
"""
|
2021-12-31 10:14:22 +00:00
|
|
|
# check that we are posting to a permitted domain
|
|
|
|
if not url_permitted(inbox_url, federation_list):
|
|
|
|
if not quiet:
|
|
|
|
print('post_json_string: ' + inbox_url + ' not permitted')
|
2021-12-31 10:15:51 +00:00
|
|
|
return False, True, 0
|
2021-12-31 10:14:22 +00:00
|
|
|
|
2022-02-28 11:55:36 +00:00
|
|
|
_set_user_agent(session, http_prefix, domain_full)
|
|
|
|
|
2020-06-08 18:05:36 +00:00
|
|
|
try:
|
2021-12-31 10:13:21 +00:00
|
|
|
post_result = \
|
2022-06-14 10:24:29 +00:00
|
|
|
session.post(url=inbox_url, data=post_json_str,
|
2021-12-31 10:13:21 +00:00
|
|
|
headers=headers, timeout=timeout_sec)
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: error during post_json_string requests ' + str(ex))
|
2021-10-18 10:20:57 +00:00
|
|
|
return None, None, 0
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
|
|
|
if not quiet and ex.errno == errno.ECONNRESET:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: connection was reset during post_json_string')
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: post_json_string failed ' + inbox_url + ' ' +
|
2022-06-14 10:24:29 +00:00
|
|
|
post_json_str + ' ' + str(headers))
|
2021-10-18 10:20:57 +00:00
|
|
|
return None, None, 0
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: error during post_json_string ' + str(ex))
|
2021-10-18 10:20:57 +00:00
|
|
|
return None, None, 0
|
2021-12-31 10:13:21 +00:00
|
|
|
if post_result.status_code < 200 or post_result.status_code > 202:
|
|
|
|
if post_result.status_code >= 400 and \
|
|
|
|
post_result.status_code <= 405 and \
|
|
|
|
post_result.status_code != 404:
|
2021-03-10 19:24:52 +00:00
|
|
|
if not quiet:
|
2021-12-31 10:13:21 +00:00
|
|
|
print('WARN: Post to ' + inbox_url +
|
2021-03-10 19:24:52 +00:00
|
|
|
' is unauthorized. Code ' +
|
2021-12-31 10:13:21 +00:00
|
|
|
str(post_result.status_code))
|
|
|
|
return False, True, post_result.status_code
|
|
|
|
|
|
|
|
if not quiet:
|
|
|
|
print('WARN: Failed to post to ' + inbox_url +
|
|
|
|
' with headers ' + str(headers) +
|
|
|
|
' status code ' + str(post_result.status_code))
|
|
|
|
return False, False, post_result.status_code
|
2021-10-18 10:20:57 +00:00
|
|
|
return True, False, 0
|
2020-04-04 11:00:19 +00:00
|
|
|
|
2019-08-17 10:15:01 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
def post_image(session, attach_image_filename: str, federation_list: [],
|
2022-02-28 11:55:36 +00:00
|
|
|
inbox_url: str, headers: {},
|
|
|
|
http_prefix: str, domain_full: str) -> str:
|
2019-07-16 14:23:06 +00:00
|
|
|
"""Post an image to the inbox of another person or outbox via c2s
|
|
|
|
"""
|
2020-09-27 19:27:24 +00:00
|
|
|
# check that we are posting to a permitted domain
|
2021-12-31 10:13:21 +00:00
|
|
|
if not url_permitted(inbox_url, federation_list):
|
|
|
|
print('post_json: ' + inbox_url + ' not permitted')
|
2020-09-27 19:27:24 +00:00
|
|
|
return None
|
2019-07-16 14:23:06 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
if not is_image_file(attach_image_filename):
|
2022-10-31 17:26:31 +00:00
|
|
|
print('Image must be png, jpg, jxl, webp, avif, heic, gif or svg')
|
2019-07-16 14:23:06 +00:00
|
|
|
return None
|
2021-12-31 10:13:21 +00:00
|
|
|
if not os.path.isfile(attach_image_filename):
|
|
|
|
print('Image not found: ' + attach_image_filename)
|
2019-07-16 14:23:06 +00:00
|
|
|
return None
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'image/jpeg'
|
2021-12-31 10:13:21 +00:00
|
|
|
if attach_image_filename.endswith('.png'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'image/png'
|
2021-12-31 10:13:21 +00:00
|
|
|
elif attach_image_filename.endswith('.gif'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'image/gif'
|
2021-12-31 10:13:21 +00:00
|
|
|
elif attach_image_filename.endswith('.webp'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'image/webp'
|
2021-12-31 10:13:21 +00:00
|
|
|
elif attach_image_filename.endswith('.avif'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'image/avif'
|
2022-10-31 17:26:31 +00:00
|
|
|
elif attach_image_filename.endswith('.heic'):
|
|
|
|
content_type = 'image/heic'
|
2022-02-06 11:04:49 +00:00
|
|
|
elif attach_image_filename.endswith('.jxl'):
|
|
|
|
content_type = 'image/jxl'
|
2021-12-31 10:13:21 +00:00
|
|
|
elif attach_image_filename.endswith('.svg'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = 'image/svg+xml'
|
|
|
|
headers['Content-type'] = content_type
|
2019-07-16 14:23:06 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
with open(attach_image_filename, 'rb') as av_file:
|
|
|
|
media_binary = av_file.read()
|
2022-02-28 11:55:36 +00:00
|
|
|
|
|
|
|
_set_user_agent(session, http_prefix, domain_full)
|
|
|
|
|
2020-06-08 18:05:36 +00:00
|
|
|
try:
|
2021-12-31 10:13:21 +00:00
|
|
|
post_result = session.post(url=inbox_url, data=media_binary,
|
|
|
|
headers=headers)
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: error during post_image requests ' + str(ex))
|
2020-06-23 13:41:12 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
|
|
|
if ex.errno == errno.ECONNRESET:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: connection was reset during post_image')
|
2021-12-31 10:13:21 +00:00
|
|
|
print('ERROR: post_image failed ' + inbox_url + ' ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
str(headers) + ' ' + str(ex))
|
2020-06-08 18:05:36 +00:00
|
|
|
return None
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: error during post_image ' + str(ex))
|
2020-06-23 21:39:19 +00:00
|
|
|
return None
|
2021-12-31 10:13:21 +00:00
|
|
|
if post_result:
|
|
|
|
return post_result.text
|
2019-07-16 14:23:06 +00:00
|
|
|
return None
|
2021-11-01 17:12:17 +00:00
|
|
|
|
|
|
|
|
2022-01-05 15:21:44 +00:00
|
|
|
def _looks_like_url(url: str) -> bool:
|
|
|
|
"""Does the given string look like a url
|
|
|
|
"""
|
|
|
|
if not url:
|
|
|
|
return False
|
|
|
|
if '.' not in url:
|
|
|
|
return False
|
|
|
|
if '://' not in url:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2022-06-14 10:24:29 +00:00
|
|
|
def download_image(session, url: str, image_filename: str, debug: bool,
|
2021-12-29 21:55:09 +00:00
|
|
|
force: bool = False) -> bool:
|
2021-12-17 10:04:18 +00:00
|
|
|
"""Downloads an image with an expected mime type
|
2021-11-01 17:12:17 +00:00
|
|
|
"""
|
2022-01-05 15:21:44 +00:00
|
|
|
if not _looks_like_url(url):
|
|
|
|
if debug:
|
|
|
|
print('WARN: download_image, ' +
|
|
|
|
url + ' does not look like a url')
|
2021-11-01 17:12:17 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
# try different image types
|
2021-12-31 10:13:21 +00:00
|
|
|
image_formats = {
|
2021-11-01 17:12:17 +00:00
|
|
|
'png': 'png',
|
|
|
|
'jpg': 'jpeg',
|
|
|
|
'jpeg': 'jpeg',
|
2022-02-06 11:04:49 +00:00
|
|
|
'jxl': 'jxl',
|
2021-11-01 17:12:17 +00:00
|
|
|
'gif': 'gif',
|
|
|
|
'svg': 'svg+xml',
|
|
|
|
'webp': 'webp',
|
2021-12-17 10:04:18 +00:00
|
|
|
'avif': 'avif',
|
2022-10-31 17:26:31 +00:00
|
|
|
'heic': 'heic',
|
2021-12-17 10:04:18 +00:00
|
|
|
'ico': 'x-icon'
|
2021-11-01 17:12:17 +00:00
|
|
|
}
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers = None
|
|
|
|
for im_format, mime_type in image_formats.items():
|
|
|
|
if url.endswith('.' + im_format) or \
|
|
|
|
'.' + im_format + '?' in url:
|
|
|
|
session_headers = {
|
|
|
|
'Accept': 'image/' + mime_type
|
2021-11-01 17:12:17 +00:00
|
|
|
}
|
2021-11-01 20:27:29 +00:00
|
|
|
break
|
2021-11-01 17:12:17 +00:00
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
if not session_headers:
|
2021-11-01 20:27:29 +00:00
|
|
|
if debug:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('download_image: no session headers')
|
2021-11-01 17:12:17 +00:00
|
|
|
return False
|
|
|
|
|
2021-12-26 14:42:21 +00:00
|
|
|
if not os.path.isfile(image_filename) or force:
|
2021-11-01 17:12:17 +00:00
|
|
|
try:
|
|
|
|
if debug:
|
|
|
|
print('Downloading image url: ' + url)
|
|
|
|
result = session.get(url,
|
2021-12-31 10:13:21 +00:00
|
|
|
headers=session_headers,
|
2022-04-24 20:33:07 +00:00
|
|
|
params=None,
|
|
|
|
allow_redirects=False)
|
2021-11-01 17:12:17 +00:00
|
|
|
if result.status_code < 200 or \
|
|
|
|
result.status_code > 202:
|
|
|
|
if debug:
|
|
|
|
print('Image download failed with status ' +
|
|
|
|
str(result.status_code))
|
|
|
|
# remove partial download
|
2021-12-26 14:42:21 +00:00
|
|
|
if os.path.isfile(image_filename):
|
2021-11-01 17:12:17 +00:00
|
|
|
try:
|
2021-12-26 14:42:21 +00:00
|
|
|
os.remove(image_filename)
|
2021-11-25 18:42:38 +00:00
|
|
|
except OSError:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('EX: download_image unable to delete ' +
|
2021-12-26 14:42:21 +00:00
|
|
|
image_filename)
|
2021-11-01 17:12:17 +00:00
|
|
|
else:
|
2021-12-31 10:13:21 +00:00
|
|
|
with open(image_filename, 'wb') as im_file:
|
|
|
|
im_file.write(result.content)
|
2021-11-01 17:12:17 +00:00
|
|
|
if debug:
|
|
|
|
print('Image downloaded from ' + url)
|
|
|
|
return True
|
2021-12-31 10:13:21 +00:00
|
|
|
except BaseException as ex:
|
2021-11-01 17:12:17 +00:00
|
|
|
print('EX: Failed to download image: ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
str(url) + ' ' + str(ex))
|
2021-11-01 17:12:17 +00:00
|
|
|
return False
|
2021-12-16 20:57:30 +00:00
|
|
|
|
|
|
|
|
2021-12-29 21:55:09 +00:00
|
|
|
def download_image_any_mime_type(session, url: str,
|
2021-12-31 10:13:21 +00:00
|
|
|
timeout_sec: int, debug: bool):
|
2021-12-17 10:04:18 +00:00
|
|
|
"""http GET for an image with any mime type
|
2021-12-16 20:57:30 +00:00
|
|
|
"""
|
2022-01-05 15:11:32 +00:00
|
|
|
# check that this looks like a url
|
2022-01-05 15:21:44 +00:00
|
|
|
if not _looks_like_url(url):
|
2022-01-05 15:11:32 +00:00
|
|
|
if debug:
|
|
|
|
print('WARN: download_image_any_mime_type, ' +
|
|
|
|
url + ' does not look like a url')
|
|
|
|
return None, None
|
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
mime_type = None
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = None
|
2021-12-17 09:55:19 +00:00
|
|
|
result = None
|
2023-01-20 14:08:30 +00:00
|
|
|
image_mime_types = \
|
|
|
|
'image/x-icon, image/png, image/webp, image/jpeg, image/gif, ' + \
|
|
|
|
'image/avif, image/heic, image/jxl, image/svg+xml'
|
2021-12-31 10:13:21 +00:00
|
|
|
session_headers = {
|
2023-01-20 14:08:30 +00:00
|
|
|
'Accept': image_mime_types
|
2021-12-17 12:55:30 +00:00
|
|
|
}
|
2021-12-16 20:57:30 +00:00
|
|
|
try:
|
2022-04-24 20:33:07 +00:00
|
|
|
result = session.get(url, headers=session_headers,
|
|
|
|
timeout=timeout_sec,
|
|
|
|
allow_redirects=False)
|
2021-12-25 15:28:52 +00:00
|
|
|
except requests.exceptions.RequestException as ex:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: download_image_any_mime_type failed1: ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
str(url) + ', ' + str(ex))
|
2021-12-17 10:12:11 +00:00
|
|
|
return None, None
|
2021-12-25 15:28:52 +00:00
|
|
|
except ValueError as ex:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: download_image_any_mime_type failed2: ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
str(url) + ', ' + str(ex))
|
2021-12-17 10:12:11 +00:00
|
|
|
return None, None
|
2021-12-25 15:28:52 +00:00
|
|
|
except SocketError as ex:
|
|
|
|
if ex.errno == errno.ECONNRESET:
|
2022-02-03 10:02:41 +00:00
|
|
|
print('EX: download_image_any_mime_type failed, ' +
|
2021-12-25 15:28:52 +00:00
|
|
|
'connection was reset ' + str(ex))
|
2021-12-17 10:12:11 +00:00
|
|
|
return None, None
|
2021-12-17 09:55:19 +00:00
|
|
|
|
|
|
|
if not result:
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
if result.status_code != 200:
|
2021-12-29 21:55:09 +00:00
|
|
|
print('WARN: download_image_any_mime_type: ' + url +
|
2021-12-17 09:55:19 +00:00
|
|
|
' failed with error code ' + str(result.status_code))
|
2021-12-17 10:12:11 +00:00
|
|
|
return None, None
|
2021-12-17 09:55:19 +00:00
|
|
|
|
|
|
|
if result.headers.get('content-type'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = result.headers['content-type']
|
2021-12-17 09:55:19 +00:00
|
|
|
elif result.headers.get('Content-type'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = result.headers['Content-type']
|
2021-12-17 09:55:19 +00:00
|
|
|
elif result.headers.get('Content-Type'):
|
2021-12-26 15:32:00 +00:00
|
|
|
content_type = result.headers['Content-Type']
|
2021-12-17 09:55:19 +00:00
|
|
|
|
2021-12-26 15:32:00 +00:00
|
|
|
if not content_type:
|
2021-12-17 09:55:19 +00:00
|
|
|
return None, None
|
|
|
|
|
2021-12-31 10:13:21 +00:00
|
|
|
image_formats = {
|
2021-12-17 10:07:49 +00:00
|
|
|
'ico': 'x-icon',
|
|
|
|
'png': 'png',
|
|
|
|
'jpg': 'jpeg',
|
2022-02-06 11:04:49 +00:00
|
|
|
'jxl': 'jxl',
|
2021-12-17 10:07:49 +00:00
|
|
|
'jpeg': 'jpeg',
|
|
|
|
'gif': 'gif',
|
|
|
|
'svg': 'svg+xml',
|
|
|
|
'webp': 'webp',
|
2022-10-31 17:26:31 +00:00
|
|
|
'avif': 'avif',
|
|
|
|
'heic': 'heic'
|
2021-12-17 10:07:49 +00:00
|
|
|
}
|
2021-12-31 10:13:21 +00:00
|
|
|
for _, m_type in image_formats.items():
|
|
|
|
if 'image/' + m_type in content_type:
|
|
|
|
mime_type = 'image/' + m_type
|
|
|
|
return result.content, mime_type
|
2022-02-24 10:18:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_method(method_name: str, xml_str: str,
|
2022-02-24 11:10:14 +00:00
|
|
|
session, url: str, params: {}, headers: {}, debug: bool,
|
2022-05-26 22:14:25 +00:00
|
|
|
version: str, http_prefix: str, domain: str,
|
2022-02-24 10:18:54 +00:00
|
|
|
timeout_sec: int = 20, quiet: bool = False) -> {}:
|
2022-05-26 21:58:22 +00:00
|
|
|
"""Part of the vcard interface
|
|
|
|
"""
|
2022-02-24 10:18:54 +00:00
|
|
|
if method_name not in ("REPORT", "PUT", "PROPFIND"):
|
|
|
|
print("Unrecognized method: " + method_name)
|
|
|
|
return None
|
|
|
|
if not isinstance(url, str):
|
|
|
|
if debug and not quiet:
|
|
|
|
print('url: ' + str(url))
|
|
|
|
print('ERROR: get_method failed, url should be a string')
|
|
|
|
return None
|
2022-02-24 11:10:14 +00:00
|
|
|
if not headers:
|
|
|
|
headers = {
|
|
|
|
'Accept': 'application/xml'
|
|
|
|
}
|
2022-02-24 16:55:37 +00:00
|
|
|
else:
|
|
|
|
headers['Accept'] = 'application/xml'
|
2022-02-24 10:18:54 +00:00
|
|
|
session_params = {}
|
|
|
|
session_headers = {}
|
|
|
|
if headers:
|
|
|
|
session_headers = headers
|
|
|
|
if params:
|
|
|
|
session_params = params
|
|
|
|
session_headers['User-Agent'] = 'Epicyon/' + version
|
|
|
|
if domain:
|
|
|
|
session_headers['User-Agent'] += \
|
|
|
|
'; +' + http_prefix + '://' + domain + '/'
|
|
|
|
if not session:
|
|
|
|
if not quiet:
|
2022-02-24 11:10:14 +00:00
|
|
|
print('WARN: get_method failed, ' +
|
|
|
|
'no session specified for get_vcard')
|
2022-02-24 10:18:54 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
if debug:
|
|
|
|
HTTPConnection.debuglevel = 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = session.request(method_name, url, headers=session_headers,
|
|
|
|
data=xml_str,
|
|
|
|
params=session_params, timeout=timeout_sec)
|
2022-05-30 21:41:18 +00:00
|
|
|
if result.status_code not in (200, 207):
|
2022-02-24 10:18:54 +00:00
|
|
|
if result.status_code == 401:
|
|
|
|
print("WARN: get_method " + url + ' rejected by secure mode')
|
|
|
|
elif result.status_code == 403:
|
|
|
|
print('WARN: get_method Forbidden url: ' + url)
|
|
|
|
elif result.status_code == 404:
|
|
|
|
print('WARN: get_method Not Found url: ' + url)
|
|
|
|
elif result.status_code == 410:
|
|
|
|
print('WARN: get_method no longer available url: ' + url)
|
|
|
|
else:
|
2022-02-24 12:06:26 +00:00
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
2022-02-24 10:18:54 +00:00
|
|
|
print('WARN: get_method url: ' + url +
|
|
|
|
' failed with error code ' +
|
|
|
|
str(result.status_code) +
|
2022-02-24 12:06:26 +00:00
|
|
|
' headers: ' + str(session_headers2))
|
2022-02-24 10:18:54 +00:00
|
|
|
return result.content.decode('utf-8')
|
|
|
|
except requests.exceptions.RequestException as ex:
|
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
|
|
|
if debug and not quiet:
|
|
|
|
print('EX: get_method failed, url: ' + str(url) + ', ' +
|
|
|
|
'headers: ' + str(session_headers2) + ', ' +
|
|
|
|
'params: ' + str(session_params) + ', ' + str(ex))
|
|
|
|
except ValueError as ex:
|
|
|
|
session_headers2 = session_headers.copy()
|
|
|
|
if session_headers2.get('Authorization'):
|
|
|
|
session_headers2['Authorization'] = 'REDACTED'
|
|
|
|
if debug and not quiet:
|
|
|
|
print('EX: get_method failed, url: ' + str(url) + ', ' +
|
|
|
|
'headers: ' + str(session_headers2) + ', ' +
|
|
|
|
'params: ' + str(session_params) + ', ' + str(ex))
|
|
|
|
except SocketError as ex:
|
|
|
|
if not quiet:
|
|
|
|
if ex.errno == errno.ECONNRESET:
|
|
|
|
print('EX: get_method failed, ' +
|
|
|
|
'connection was reset during get_vcard ' + str(ex))
|
|
|
|
return None
|
2022-03-11 13:27:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_session_for_domains(server, calling_domain: str, referer_domain: str):
|
|
|
|
"""Returns the appropriate session for the given domains
|
|
|
|
"""
|
|
|
|
if referer_domain is None:
|
|
|
|
referer_domain = ''
|
|
|
|
|
|
|
|
if '.onion:' in calling_domain or \
|
|
|
|
calling_domain.endswith('.onion') or \
|
|
|
|
'.onion:' in referer_domain or \
|
|
|
|
referer_domain.endswith('.onion'):
|
|
|
|
if not server.domain.endswith('.onion'):
|
|
|
|
if server.onion_domain and server.session_onion:
|
|
|
|
return server.session_onion, 'tor'
|
|
|
|
if '.i2p:' in calling_domain or \
|
|
|
|
calling_domain.endswith('.i2p') or \
|
|
|
|
'.i2p:' in referer_domain or \
|
|
|
|
referer_domain.endswith('.i2p'):
|
|
|
|
if not server.domain.endswith('.i2p'):
|
|
|
|
if server.i2p_domain and server.session_i2p:
|
|
|
|
return server.session_i2p, 'i2p'
|
|
|
|
return server.session, server.proxy_type
|
|
|
|
|
|
|
|
|
|
|
|
def get_session_for_domain(server, referer_domain: str):
|
|
|
|
"""Returns the appropriate session for the given domain
|
|
|
|
"""
|
|
|
|
return get_session_for_domains(server, referer_domain, referer_domain)
|
|
|
|
|
|
|
|
|
|
|
|
def set_session_for_sender(server, proxy_type: str, new_session) -> None:
|
|
|
|
"""Sets the appropriate session for the given sender
|
|
|
|
"""
|
|
|
|
if proxy_type == 'tor':
|
|
|
|
if not server.domain.endswith('.onion'):
|
|
|
|
if server.onion_domain and server.session_onion:
|
|
|
|
server.session_onion = new_session
|
|
|
|
return
|
|
|
|
if proxy_type == 'i2p':
|
|
|
|
if not server.domain.endswith('.i2p'):
|
|
|
|
if server.i2p_domain and server.session_i2p:
|
|
|
|
server.session_i2p = new_session
|
|
|
|
return
|
|
|
|
server.session = new_session
|