Compare commits

..

4 commits

Author SHA1 Message Date
Izalia Mae 6df13c71f3 Merge branch 'dev' into 'main'
version 0.3.3

See merge request pleroma/relay!59
2024-09-18 00:20:05 +00:00
Izalia Mae c482677c32 raise error on non 200 or 202 status in HttpClient.post 2024-09-15 07:34:09 -04:00
Izalia Mae c5633808f7 use correct type when deleting old database cache items 2024-09-15 07:29:12 -04:00
Izalia Mae e6845136eb remove logging on non 202 and 200 statuses 2024-09-15 05:35:17 -04:00
3 changed files with 12 additions and 16 deletions

View file

@ -243,11 +243,10 @@ class SqlCache(Cache):
if self._db is None: if self._db is None:
raise RuntimeError("Database has not been setup") raise RuntimeError("Database has not been setup")
limit = Date.new_utc() - timedelta(days = days) date = Date.new_utc() - timedelta(days = days)
params = {"limit": limit.timestamp()}
with self._db.session(True) as conn: with self._db.session(True) as conn:
with conn.execute("DELETE FROM cache WHERE updated < :limit", params): with conn.execute("DELETE FROM cache WHERE updated < :limit", {"limit": date}):
pass pass

View file

@ -134,9 +134,6 @@ class HttpClient:
data = await resp.text() data = await resp.text()
if resp.status not in (200, 202): if resp.status not in (200, 202):
logging.verbose('Received error when requesting %s: %i', url, resp.status)
logging.debug(data)
try: try:
error = json.loads(data)["error"] error = json.loads(data)["error"]
@ -223,16 +220,12 @@ class HttpClient:
logging.verbose('Sending "%s" to %s', mtype, url) logging.verbose('Sending "%s" to %s', mtype, url)
async with self._session.post(url, headers = headers, data = body) as resp: async with self._session.post(url, headers = headers, data = body) as resp:
# Not expecting a response, so just return if resp.status not in (200, 202):
if resp.status in {200, 202}: raise HttpError(
logging.verbose('Successfully sent "%s" to %s', mtype, url) resp.status,
return await resp.text(),
headers = {k: v for k, v in resp.headers.items()}
logging.error('Received error when pushing to %s: %i', url, resp.status) )
logging.debug(await resp.read())
logging.debug("message: %s", body.decode("utf-8"))
logging.debug("headers: %s", json.dumps(headers, indent = 4))
return
async def fetch_nodeinfo(self, domain: str, force: bool = False) -> Nodeinfo: async def fetch_nodeinfo(self, domain: str, force: bool = False) -> Nodeinfo:

View file

@ -5,6 +5,7 @@ import traceback
from aiohttp.client_exceptions import ClientConnectionError, ClientSSLError from aiohttp.client_exceptions import ClientConnectionError, ClientSSLError
from asyncio.exceptions import TimeoutError as AsyncTimeoutError from asyncio.exceptions import TimeoutError as AsyncTimeoutError
from blib import HttpError
from dataclasses import dataclass from dataclasses import dataclass
from multiprocessing import Event, Process, Queue, Value from multiprocessing import Event, Process, Queue, Value
from multiprocessing.queues import Queue as QueueType from multiprocessing.queues import Queue as QueueType
@ -94,6 +95,9 @@ class PushWorker(Process):
try: try:
await self.client.post(item.inbox, item.message, item.instance) await self.client.post(item.inbox, item.message, item.instance)
except HttpError as e:
logging.error('HTTP Error when pushing to %s: %i %s', item.inbox, e.status, e.message)
except AsyncTimeoutError: except AsyncTimeoutError:
logging.error('Timeout when pushing to %s', item.domain) logging.error('Timeout when pushing to %s', item.domain)