2022-07-20 22:47:08 +00:00
|
|
|
__filename__ = "importFollowing.py"
|
|
|
|
__author__ = "Bob Mottram"
|
|
|
|
__license__ = "AGPL3+"
|
|
|
|
__version__ = "1.3.0"
|
|
|
|
__maintainer__ = "Bob Mottram"
|
|
|
|
__email__ = "bob@libreserver.org"
|
|
|
|
__status__ = "Production"
|
|
|
|
__module_group__ = "Core"
|
|
|
|
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import random
|
2022-11-07 18:38:20 +00:00
|
|
|
from utils import acct_dir
|
2022-07-20 22:47:08 +00:00
|
|
|
from utils import is_account_dir
|
2022-07-21 09:18:01 +00:00
|
|
|
from utils import get_nickname_from_actor
|
|
|
|
from utils import get_domain_from_actor
|
2022-07-20 22:47:08 +00:00
|
|
|
from follow import is_following_actor
|
|
|
|
from follow import send_follow_request
|
|
|
|
from session import create_session
|
|
|
|
from session import set_session_for_sender
|
2022-07-28 09:59:18 +00:00
|
|
|
from threads import begin_thread
|
2022-07-20 22:47:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _establish_import_session(httpd,
|
|
|
|
calling_function: str,
|
|
|
|
curr_session,
|
|
|
|
proxy_type: str):
|
|
|
|
"""Recreates session if needed
|
|
|
|
"""
|
|
|
|
if curr_session:
|
|
|
|
return curr_session
|
|
|
|
print('DEBUG: creating new import session during ' + calling_function)
|
|
|
|
curr_session = create_session(proxy_type)
|
|
|
|
if curr_session:
|
|
|
|
set_session_for_sender(httpd, proxy_type, curr_session)
|
|
|
|
return curr_session
|
|
|
|
print('ERROR: failed to create import session during ' +
|
|
|
|
calling_function)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def _update_import_following(base_dir: str,
|
|
|
|
handle: str, httpd,
|
|
|
|
import_filename: str) -> bool:
|
|
|
|
"""Send out follow requests from the import csv file
|
|
|
|
"""
|
|
|
|
following_str = ''
|
|
|
|
try:
|
|
|
|
with open(import_filename, 'r', encoding='utf-8') as fp_import:
|
|
|
|
following_str = fp_import.read()
|
|
|
|
except OSError:
|
|
|
|
print('Ex: failed to load import file ' + import_filename)
|
|
|
|
return False
|
|
|
|
if following_str:
|
|
|
|
main_session = None
|
|
|
|
lines = following_str.split('\n')
|
|
|
|
random.shuffle(lines)
|
|
|
|
nickname = handle.split('@')[0]
|
|
|
|
domain = handle.split('@')[1]
|
|
|
|
for line in lines:
|
|
|
|
orig_line = line
|
2022-11-07 18:38:20 +00:00
|
|
|
notes = None
|
2022-07-20 22:47:08 +00:00
|
|
|
line = line.strip()
|
|
|
|
if ',' in line:
|
2022-11-07 18:38:20 +00:00
|
|
|
fields = line.split(',')
|
|
|
|
line = fields[0].strip()
|
2022-11-22 09:47:54 +00:00
|
|
|
if len(fields) >= 5:
|
|
|
|
notes = fields[4]
|
2022-07-20 22:47:08 +00:00
|
|
|
if line.startswith('#'):
|
2022-11-22 09:47:54 +00:00
|
|
|
# comment
|
2022-07-20 22:47:08 +00:00
|
|
|
continue
|
2022-07-21 09:18:01 +00:00
|
|
|
following_nickname = get_nickname_from_actor(line)
|
|
|
|
if not following_nickname:
|
2022-07-20 22:47:08 +00:00
|
|
|
continue
|
2022-07-21 09:18:01 +00:00
|
|
|
following_domain, following_port = get_domain_from_actor(line)
|
|
|
|
if not following_domain:
|
|
|
|
continue
|
2022-07-21 10:18:11 +00:00
|
|
|
if following_nickname == nickname and \
|
|
|
|
following_domain == domain:
|
|
|
|
# don't follow yourself
|
|
|
|
continue
|
2022-07-21 09:18:01 +00:00
|
|
|
following_handle = following_nickname + '@' + following_domain
|
2022-11-07 18:38:20 +00:00
|
|
|
if notes:
|
2022-11-07 18:44:17 +00:00
|
|
|
notes = notes.replace('<br>', '\n')
|
2022-11-07 18:38:20 +00:00
|
|
|
person_notes_filename = \
|
|
|
|
acct_dir(base_dir, nickname, domain) + \
|
|
|
|
'/notes/' + following_handle + '.txt'
|
|
|
|
try:
|
|
|
|
with open(person_notes_filename, 'w+',
|
|
|
|
encoding='utf-8') as fp_notes:
|
|
|
|
fp_notes.write(notes)
|
|
|
|
except OSError:
|
|
|
|
print('EX: Unable to import notes for ' +
|
|
|
|
following_handle)
|
2022-07-20 22:47:08 +00:00
|
|
|
if is_following_actor(base_dir,
|
|
|
|
nickname, domain, following_handle):
|
|
|
|
# remove the followed handle from the import list
|
|
|
|
following_str = following_str.replace(orig_line + '\n', '')
|
|
|
|
try:
|
|
|
|
with open(import_filename, 'w+',
|
|
|
|
encoding='utf-8') as fp_import:
|
|
|
|
fp_import.write(following_str)
|
|
|
|
except OSError:
|
|
|
|
print('EX: unable to remove import 1 ' + line +
|
|
|
|
' from ' + import_filename)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# send follow request
|
|
|
|
curr_domain = domain
|
|
|
|
curr_port = httpd.port
|
|
|
|
curr_http_prefix = httpd.http_prefix
|
|
|
|
following_actor = following_handle
|
|
|
|
if ':' in following_domain:
|
|
|
|
following_port = following_handle.split(':')[1]
|
|
|
|
following_domain = following_domain.split(':')[0]
|
|
|
|
|
|
|
|
# get the appropriate session
|
|
|
|
curr_session = main_session
|
|
|
|
curr_proxy_type = httpd.proxy_type
|
|
|
|
use_onion_session = False
|
|
|
|
use_i2p_session = False
|
|
|
|
if '.onion' not in domain and \
|
|
|
|
httpd.onion_domain and '.onion' in following_domain:
|
|
|
|
curr_session = httpd.session_onion
|
|
|
|
curr_domain = httpd.onion_domain
|
|
|
|
curr_port = 80
|
|
|
|
following_port = 80
|
|
|
|
curr_http_prefix = 'http'
|
|
|
|
curr_proxy_type = 'tor'
|
|
|
|
use_onion_session = True
|
|
|
|
if '.i2p' not in domain and \
|
|
|
|
httpd.i2p_domain and '.i2p' in following_domain:
|
|
|
|
curr_session = httpd.session_i2p
|
|
|
|
curr_domain = httpd.i2p_domain
|
|
|
|
curr_port = 80
|
|
|
|
following_port = 80
|
|
|
|
curr_http_prefix = 'http'
|
|
|
|
curr_proxy_type = 'i2p'
|
|
|
|
use_i2p_session = True
|
|
|
|
|
|
|
|
curr_session = \
|
|
|
|
_establish_import_session(httpd, "import follow",
|
|
|
|
curr_session, curr_proxy_type)
|
|
|
|
if curr_session:
|
|
|
|
if use_onion_session:
|
|
|
|
httpd.session_onion = curr_session
|
|
|
|
elif use_i2p_session:
|
|
|
|
httpd.session_i2p = curr_session
|
|
|
|
else:
|
|
|
|
main_session = curr_session
|
|
|
|
|
|
|
|
send_follow_request(curr_session,
|
|
|
|
base_dir, nickname,
|
|
|
|
domain, curr_domain, curr_port,
|
|
|
|
curr_http_prefix,
|
|
|
|
following_nickname,
|
|
|
|
following_domain,
|
|
|
|
following_actor,
|
|
|
|
following_port, curr_http_prefix,
|
|
|
|
False, httpd.federation_list,
|
|
|
|
httpd.send_threads,
|
|
|
|
httpd.postLog,
|
|
|
|
httpd.cached_webfingers,
|
|
|
|
httpd.person_cache, httpd.debug,
|
|
|
|
httpd.project_version,
|
|
|
|
httpd.signing_priv_key_pem,
|
|
|
|
httpd.domain,
|
|
|
|
httpd.onion_domain,
|
|
|
|
httpd.i2p_domain)
|
|
|
|
|
|
|
|
# remove the followed handle from the import list
|
|
|
|
following_str = following_str.replace(orig_line + '\n', '')
|
|
|
|
try:
|
|
|
|
with open(import_filename, 'w+',
|
|
|
|
encoding='utf-8') as fp_import:
|
|
|
|
fp_import.write(following_str)
|
|
|
|
except OSError:
|
|
|
|
print('EX: unable to remove import 2 ' + line +
|
|
|
|
' from ' + import_filename)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def run_import_following(base_dir: str, httpd):
|
|
|
|
"""Sends out follow requests for imported following csv files
|
|
|
|
"""
|
|
|
|
while True:
|
2022-07-21 10:03:31 +00:00
|
|
|
time.sleep(20)
|
2022-07-20 22:47:08 +00:00
|
|
|
|
|
|
|
# get a list of accounts on the instance, in random sequence
|
|
|
|
accounts_list = []
|
|
|
|
for _, dirs, _ in os.walk(base_dir + '/accounts'):
|
|
|
|
for account in dirs:
|
|
|
|
if '@' not in account:
|
|
|
|
continue
|
|
|
|
if not is_account_dir(account):
|
|
|
|
continue
|
|
|
|
accounts_list.append(account)
|
|
|
|
break
|
|
|
|
if not accounts_list:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# check if each accounts has an import csv
|
|
|
|
random.shuffle(accounts_list)
|
|
|
|
for account in accounts_list:
|
|
|
|
account_dir = base_dir + '/accounts/' + account
|
|
|
|
import_filename = account_dir + '/import_following.csv'
|
|
|
|
|
|
|
|
if not os.path.isfile(import_filename):
|
|
|
|
continue
|
|
|
|
if not _update_import_following(base_dir, account, httpd,
|
|
|
|
import_filename):
|
|
|
|
try:
|
|
|
|
os.remove(import_filename)
|
|
|
|
except OSError:
|
|
|
|
print('EX: unable to remove import file ' +
|
|
|
|
import_filename)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
def run_import_following_watchdog(project_version: str, httpd) -> None:
|
|
|
|
"""Imports following lists from csv for every account on the instance
|
|
|
|
"""
|
|
|
|
print('THREAD: Starting import following watchdog ' + project_version)
|
|
|
|
import_following_original = \
|
|
|
|
httpd.thrImportFollowing.clone(run_import_following)
|
2022-07-28 09:59:18 +00:00
|
|
|
begin_thread(httpd.thrImportFollowing,
|
|
|
|
'run_import_following_watchdog')
|
2022-07-20 22:47:08 +00:00
|
|
|
while True:
|
2022-07-27 19:44:53 +00:00
|
|
|
time.sleep(50)
|
2022-07-20 22:47:08 +00:00
|
|
|
if httpd.thrImportFollowing.is_alive():
|
|
|
|
continue
|
|
|
|
httpd.thrImportFollowing.kill()
|
|
|
|
print('THREAD: restarting import following watchdog')
|
|
|
|
httpd.thrImportFollowing = \
|
|
|
|
import_following_original.clone(run_import_following)
|
2022-07-28 09:59:18 +00:00
|
|
|
begin_thread(httpd.thrImportFollowing,
|
|
|
|
'run_import_following_watchdog 2')
|
2022-07-20 22:47:08 +00:00
|
|
|
print('Restarting import following...')
|