Add software check on follow/unfollow

This commit is contained in:
Valentin 2022-12-24 22:23:32 +01:00
parent 3d60ae2bbc
commit 6a0c1fe726
No known key found for this signature in database
GPG key ID: AA2F52BADD7EF624
2 changed files with 65 additions and 76 deletions

View file

@ -87,55 +87,60 @@ class HttpClient:
headers.update(self.database.signer.sign_headers( headers.update(self.database.signer.sign_headers(
'GET', url, algorithm='original')) 'GET', url, algorithm='original'))
try: async with ClientSession(
logging.verbose(f'Fetching resource: {url}') connector=TCPConnector(
limit=self.limit, ttl_dns_cache=300),
headers=HEADERS,
connector_owner=True,
timeout=ClientTimeout(total=self.timeout)) as session:
try:
logging.verbose(f'Fetching resource: {url}')
async with session.get(url, headers=headers) as resp:
# Not expecting a response with 202s, so just return
if resp.status == 202:
return
async with self._session.get(url, headers=headers) as resp: elif resp.status != 200:
# Not expecting a response with 202s, so just return logging.verbose(
if resp.status == 202: f'Received error when requesting {url}: {resp.status}')
return logging.verbose(await resp.read()) # change this to debug
return
elif resp.status != 200: if loads:
logging.verbose( message = await resp.json(loads=loads)
f'Received error when requesting {url}: {resp.status}')
logging.verbose(await resp.read()) # change this to debug
return
if loads: elif resp.content_type == MIMETYPES['activity']:
message = await resp.json(loads=loads) message = await resp.json(loads=Message.new_from_json)
elif resp.content_type == MIMETYPES['activity']: elif resp.content_type == MIMETYPES['json']:
message = await resp.json(loads=Message.new_from_json) message = await resp.json(loads=DotDict.new_from_json)
elif resp.content_type == MIMETYPES['json']: else:
message = await resp.json(loads=DotDict.new_from_json) # todo: raise TypeError or something
logging.verbose(
f'Invalid Content-Type for "{url}": {resp.content_type}')
return logging.debug(f'Response: {resp.read()}')
else: logging.debug(f'{url} >> resp {message.to_json(4)}')
# todo: raise TypeError or something
logging.verbose(
f'Invalid Content-Type for "{url}": {resp.content_type}')
return logging.debug(f'Response: {resp.read()}')
logging.debug(f'{url} >> resp {message.to_json(4)}') self.cache[url] = message
return message
self.cache[url] = message except JSONDecodeError:
return message logging.verbose(f'Failed to parse JSON')
except JSONDecodeError: except (ClientConnectorError, ServerTimeoutError):
logging.verbose(f'Failed to parse JSON') logging.verbose(f'Failed to connect to {urlparse(url).netloc}')
except (ClientConnectorError, ServerTimeoutError): except Exception as e:
logging.verbose(f'Failed to connect to {urlparse(url).netloc}') traceback.print_exc()
raise e
except Exception as e: async def post(self, url, message, software=None):
traceback.print_exc()
raise e
async def post(self, url, message):
instance = self.database.get_inbox(url) instance = self.database.get_inbox(url)
# Using the old algo by default is probably a better idea right now # Using the old algo by default is probably a better idea right now
if instance and instance.get('software') in {'mastodon'}: if (instance and instance.get('software') in {'mastodon'}) or (software and software in {'mastodon'}):
algorithm = 'hs2019' algorithm = 'hs2019'
else: else:
@ -145,42 +150,18 @@ class HttpClient:
headers.update(self.database.signer.sign_headers( headers.update(self.database.signer.sign_headers(
'POST', url, message, algorithm=algorithm)) 'POST', url, message, algorithm=algorithm))
try: async with ClientSession(
logging.verbose(f'Sending "{message.type}" to {url}') connector=TCPConnector(
logging.verbose( limit=self.limit, ttl_dns_cache=300),
f'url: {url}\nheaders: {headers}\ndata: {message.to_json()}') headers=HEADERS,
connector_owner=True,
timeout=ClientTimeout(total=self.timeout)) as session:
# The following does not work and throws exception on 'relay inbox follow': try:
# Traceback (most recent call last): logging.verbose(f'Sending "{message.type}" to {url}')
# File "/home/vriess/Dokumente/Repos/relay-1/relay/http_client.py", line 153, in post logging.verbose(
# async with self._session.post(url, headers=headers, data=message.to_json()) as resp: f'url: {url}\nheaders: {headers}\ndata: {message.to_json()}')
# File "/home/vriess/Dokumente/Repos/relay-1/.venv/lib/python3.10/site-packages/aiohttp/client.py", line 1141, in __aenter__
# self._resp = await self._coro
# File "/home/vriess/Dokumente/Repos/relay-1/.venv/lib/python3.10/site-packages/aiohttp/client.py", line 448, in _request
# handle = tm.start()
# File "/home/vriess/Dokumente/Repos/relay-1/.venv/lib/python3.10/site-packages/aiohttp/helpers.py", line 651, in start
# return self._loop.call_at(when, self.__call__)
# File "/usr/lib/python3.10/asyncio/base_events.py", line 732, in call_at
# self._check_closed()
# File "/usr/lib/python3.10/asyncio/base_events.py", line 515, in _check_closed
# raise RuntimeError('Event loop is closed')
# RuntimeError: Event loop is closed
# ↓↓↓↓
# async with self._session.post(url, headers=headers, data=message.to_json()) as resp:
# ## Not expecting a response, so just return
# if resp.status in {200, 202}:
# return logging.info(f'Successfully sent "{message.type}" to {url}')
# logging.info(f'Received error when pushing to {url}: {resp.status}')
# return logging.info(await resp.read()) # change this to debug
# Creating a session here works for some reason and does not throw an error
async with ClientSession(
connector=TCPConnector(
limit=self.limit, ttl_dns_cache=300),
headers=HEADERS,
connector_owner=True,
timeout=ClientTimeout(total=self.timeout)) as session:
async with session.post(url, headers=headers, data=message.to_json()) as resp: async with session.post(url, headers=headers, data=message.to_json()) as resp:
# Not expecting a response, so just return # Not expecting a response, so just return
if resp.status in {200, 202}: if resp.status in {200, 202}:
@ -191,12 +172,12 @@ class HttpClient:
# change this to debug # change this to debug
return logging.info(await resp.read()) return logging.info(await resp.read())
except (ClientConnectorError, ServerTimeoutError): except (ClientConnectorError, ServerTimeoutError):
logging.verbose(f'Failed to connect to {url}') logging.verbose(f'Failed to connect to {url}')
# prevent workers from being brought down # prevent workers from being brought down
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()
## Additional methods ## ## Additional methods ##

View file

@ -160,7 +160,11 @@ def cli_inbox_follow(actor):
actor = actor actor = actor
) )
asyncio.run(app.client.post(inbox, message)) # Fetch software to decide on algorithm
nodeinfo = asyncio.run(app.client.fetch_nodeinfo(domain))
software = nodeinfo.sw_name if nodeinfo else None
asyncio.run(app.client.post(inbox, message, software))
click.echo(f'Sent follow message to actor: {actor}') click.echo(f'Sent follow message to actor: {actor}')
@ -198,7 +202,11 @@ def cli_inbox_unfollow(actor):
} }
) )
asyncio.run(app.client.post(inbox, message)) # Fetch software to decide on algorithm
nodeinfo = asyncio.run(app.client.fetch_nodeinfo(domain))
software = nodeinfo.sw_name if nodeinfo else None
asyncio.run(app.client.post(inbox, message, software))
click.echo(f'Sent unfollow message to: {actor}') click.echo(f'Sent unfollow message to: {actor}')