Revert converting tabs to whitespaces

This commit is contained in:
Valentin 2022-12-24 21:31:06 +01:00
parent 9842494fc6
commit 841d3e0ad9
No known key found for this signature in database
GPG key ID: AA2F52BADD7EF624

View file

@ -11,144 +11,144 @@ from urllib.parse import urlparse
from . import __version__ from . import __version__
from .misc import ( from .misc import (
MIMETYPES, MIMETYPES,
DotDict, DotDict,
Message Message
) )
HEADERS = { HEADERS = {
'Accept': f'{MIMETYPES["activity"]}, {MIMETYPES["json"]};q=0.9', 'Accept': f'{MIMETYPES["activity"]}, {MIMETYPES["json"]};q=0.9',
'User-Agent': f'ActivityRelay/{__version__}' 'User-Agent': f'ActivityRelay/{__version__}'
} }
class Cache(LRUCache): class Cache(LRUCache):
def set_maxsize(self, value): def set_maxsize(self, value):
self.__maxsize = int(value) self.__maxsize = int(value)
class HttpClient: class HttpClient:
def __init__(self, database, limit=100, timeout=10, cache_size=1024): def __init__(self, database, limit=100, timeout=10, cache_size=1024):
self.database = database self.database = database
self.cache = Cache(cache_size) self.cache = Cache(cache_size)
self.cfg = {'limit': limit, 'timeout': timeout} self.cfg = {'limit': limit, 'timeout': timeout}
self._conn = None self._conn = None
self._session = None self._session = None
@property @property
def limit(self): def limit(self):
return self.cfg['limit'] return self.cfg['limit']
@property @property
def timeout(self): def timeout(self):
return self.cfg['timeout'] return self.cfg['timeout']
async def open(self): async def open(self):
if self._session: if self._session:
return return
self._conn = TCPConnector( self._conn = TCPConnector(
limit=self.limit, limit=self.limit,
ttl_dns_cache=300, ttl_dns_cache=300,
) )
self._session = ClientSession( self._session = ClientSession(
connector=self._conn, connector=self._conn,
headers=HEADERS, headers=HEADERS,
connector_owner=True, connector_owner=True,
timeout=ClientTimeout(total=self.timeout) timeout=ClientTimeout(total=self.timeout)
) )
async def close(self): async def close(self):
if not self._session: if not self._session:
return return
await self._session.close() await self._session.close()
await self._conn.close() await self._conn.close()
self._conn = None self._conn = None
self._session = None self._session = None
async def get(self, url, sign_headers=False, loads=None, force=False): async def get(self, url, sign_headers=False, loads=None, force=False):
await self.open() await self.open()
try: try:
url, _ = url.split('#', 1) url, _ = url.split('#', 1)
except: except:
pass pass
if not force and url in self.cache: if not force and url in self.cache:
return self.cache[url] return self.cache[url]
headers = {} headers = {}
if sign_headers: if sign_headers:
headers.update(self.database.signer.sign_headers( headers.update(self.database.signer.sign_headers(
'GET', url, algorithm='original')) 'GET', url, algorithm='original'))
try: try:
logging.verbose(f'Fetching resource: {url}') logging.verbose(f'Fetching resource: {url}')
async with self._session.get(url, headers=headers) as resp: async with self._session.get(url, headers=headers) as resp:
# Not expecting a response with 202s, so just return # Not expecting a response with 202s, so just return
if resp.status == 202: if resp.status == 202:
return return
elif resp.status != 200: elif resp.status != 200:
logging.verbose( logging.verbose(
f'Received error when requesting {url}: {resp.status}') f'Received error when requesting {url}: {resp.status}')
logging.verbose(await resp.read()) # change this to debug logging.verbose(await resp.read()) # change this to debug
return return
if loads: if loads:
message = await resp.json(loads=loads) message = await resp.json(loads=loads)
elif resp.content_type == MIMETYPES['activity']: elif resp.content_type == MIMETYPES['activity']:
message = await resp.json(loads=Message.new_from_json) message = await resp.json(loads=Message.new_from_json)
elif resp.content_type == MIMETYPES['json']: elif resp.content_type == MIMETYPES['json']:
message = await resp.json(loads=DotDict.new_from_json) message = await resp.json(loads=DotDict.new_from_json)
else: else:
# todo: raise TypeError or something # todo: raise TypeError or something
logging.verbose( logging.verbose(
f'Invalid Content-Type for "{url}": {resp.content_type}') f'Invalid Content-Type for "{url}": {resp.content_type}')
return logging.debug(f'Response: {resp.read()}') return logging.debug(f'Response: {resp.read()}')
logging.debug(f'{url} >> resp {message.to_json(4)}') logging.debug(f'{url} >> resp {message.to_json(4)}')
self.cache[url] = message self.cache[url] = message
return message return message
except JSONDecodeError: except JSONDecodeError:
logging.verbose(f'Failed to parse JSON') logging.verbose(f'Failed to parse JSON')
except (ClientConnectorError, ServerTimeoutError): except (ClientConnectorError, ServerTimeoutError):
logging.verbose(f'Failed to connect to {urlparse(url).netloc}') logging.verbose(f'Failed to connect to {urlparse(url).netloc}')
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()
raise e raise e
async def post(self, url, message): async def post(self, url, message):
instance = self.database.get_inbox(url) instance = self.database.get_inbox(url)
# Using the old algo by default is probably a better idea right now # Using the old algo by default is probably a better idea right now
if instance and instance.get('software') in {'mastodon'}: if instance and instance.get('software') in {'mastodon'}:
algorithm = 'hs2019' algorithm = 'hs2019'
else: else:
algorithm = 'original' algorithm = 'original'
headers = {'Content-Type': 'application/activity+json'} headers = {'Content-Type': 'application/activity+json'}
headers.update(self.database.signer.sign_headers( headers.update(self.database.signer.sign_headers(
'POST', url, message, algorithm=algorithm)) 'POST', url, message, algorithm=algorithm))
try: try:
logging.verbose(f'Sending "{message.type}" to {url}') logging.verbose(f'Sending "{message.type}" to {url}')
logging.verbose( logging.verbose(
f'url: {url}\nheaders: {headers}\ndata: {message.to_json()}') f'url: {url}\nheaders: {headers}\ndata: {message.to_json()}')
# The following does not work and throws exception on 'relay inbox follow': # The following does not work and throws exception on 'relay inbox follow':
# Traceback (most recent call last): # Traceback (most recent call last):
@ -166,62 +166,62 @@ class HttpClient:
# raise RuntimeError('Event loop is closed') # raise RuntimeError('Event loop is closed')
# RuntimeError: Event loop is closed # RuntimeError: Event loop is closed
# ↓↓↓↓ # ↓↓↓↓
# async with self._session.post(url, headers=headers, data=message.to_json()) as resp: # async with self._session.post(url, headers=headers, data=message.to_json()) as resp:
# ## Not expecting a response, so just return # ## Not expecting a response, so just return
# if resp.status in {200, 202}: # if resp.status in {200, 202}:
# return logging.info(f'Successfully sent "{message.type}" to {url}') # return logging.info(f'Successfully sent "{message.type}" to {url}')
# logging.info(f'Received error when pushing to {url}: {resp.status}') # logging.info(f'Received error when pushing to {url}: {resp.status}')
# return logging.info(await resp.read()) # change this to debug # return logging.info(await resp.read()) # change this to debug
# Creating a session here works for some reason and does not throw an error # Creating a session here works for some reason and does not throw an error
async with ClientSession( async with ClientSession(
connector=TCPConnector( connector=TCPConnector(
limit=self.limit, ttl_dns_cache=300), limit=self.limit, ttl_dns_cache=300),
headers=HEADERS, headers=HEADERS,
connector_owner=True, connector_owner=True,
timeout=ClientTimeout(total=self.timeout)) as session: timeout=ClientTimeout(total=self.timeout)) as session:
async with session.post(url, headers=headers, data=message.to_json()) as resp: async with session.post(url, headers=headers, data=message.to_json()) as resp:
# Not expecting a response, so just return # Not expecting a response, so just return
if resp.status in {200, 202}: if resp.status in {200, 202}:
return logging.info(f'Successfully sent "{message.type}" to {url}') return logging.info(f'Successfully sent "{message.type}" to {url}')
logging.info( logging.info(
f'Received error when pushing to {url}: {resp.status}') f'Received error when pushing to {url}: {resp.status}')
# change this to debug # change this to debug
return logging.info(await resp.read()) return logging.info(await resp.read())
except (ClientConnectorError, ServerTimeoutError): except (ClientConnectorError, ServerTimeoutError):
logging.verbose(f'Failed to connect to {url}') logging.verbose(f'Failed to connect to {url}')
# prevent workers from being brought down # prevent workers from being brought down
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()
## Additional methods ## ## Additional methods ##
async def fetch_nodeinfo(self, domain): async def fetch_nodeinfo(self, domain):
nodeinfo_url = None nodeinfo_url = None
wk_nodeinfo = await self.get( wk_nodeinfo = await self.get(
f'https://{domain}/.well-known/nodeinfo', f'https://{domain}/.well-known/nodeinfo',
loads=WellKnownNodeinfo.new_from_json loads=WellKnownNodeinfo.new_from_json
) )
if not wk_nodeinfo: if not wk_nodeinfo:
logging.verbose( logging.verbose(
f'Failed to fetch well-known nodeinfo url for domain: {domain}') f'Failed to fetch well-known nodeinfo url for domain: {domain}')
return False return False
for version in ['20', '21']: for version in ['20', '21']:
try: try:
nodeinfo_url = wk_nodeinfo.get_url(version) nodeinfo_url = wk_nodeinfo.get_url(version)
except KeyError: except KeyError:
pass pass
if not nodeinfo_url: if not nodeinfo_url:
logging.verbose( logging.verbose(
f'Failed to fetch nodeinfo url for domain: {domain}') f'Failed to fetch nodeinfo url for domain: {domain}')
return False return False
return await self.get(nodeinfo_url, loads=Nodeinfo.new_from_json) or False return await self.get(nodeinfo_url, loads=Nodeinfo.new_from_json) or False