use format strings for logging

This commit is contained in:
Izalia Mae 2024-01-10 10:49:43 -05:00
parent dcbde6d532
commit 3005e9b370
7 changed files with 67 additions and 50 deletions

View file

@ -1,5 +1,4 @@
import asyncio
import logging
import os
import queue
import signal
@ -9,6 +8,7 @@ import traceback
from aiohttp import web
from datetime import datetime, timedelta
from . import logger as logging
from .config import RelayConfig
from .database import RelayDatabase
from .http_client import HttpClient
@ -103,12 +103,18 @@ class Application(web.Application):
def run(self):
if not check_open_port(self.config.listen, self.config.port):
return logging.error(f'A server is already running on port {self.config.port}')
return logging.error('A server is already running on port %i', self.config.port)
for view in VIEWS:
self.router.add_view(*view)
logging.info(f'Starting webserver at {self.config.host} ({self.config.listen}:{self.config.port})')
logging.info(
'Starting webserver at %s (%s:%i)',
self.config.host,
self.config.listen,
self.config.port
)
asyncio.run(self.handle_run())
@ -174,7 +180,7 @@ class PushWorker(threading.Thread):
try:
inbox, message = self.queue.get(block=True, timeout=0.25)
self.queue.task_done()
logging.verbose(f'New push from Thread-{threading.get_ident()}')
logging.verbose('New push from Thread-%i', threading.get_ident())
await self.client.post(inbox, message)
except queue.Empty:

View file

@ -1,11 +1,12 @@
import aputils
import asyncio
import json
import logging
import traceback
from urllib.parse import urlparse
from . import logger as logging
class RelayDatabase(dict):
def __init__(self, config):
@ -75,7 +76,7 @@ class RelayDatabase(dict):
raise e from None
if not self['private-key']:
logging.info("No actor keys present, generating 4096-bit RSA keypair.")
logging.info('No actor keys present, generating 4096-bit RSA keypair.')
self.signer = aputils.Signer.new(self.config.keyid, size=4096)
self['private-key'] = self.signer.export()
@ -125,7 +126,7 @@ class RelayDatabase(dict):
'software': software
}
logging.verbose(f'Added inbox to database: {inbox}')
logging.verbose('Added inbox to database: %s', inbox)
return self['relay-list'][domain]
@ -140,13 +141,13 @@ class RelayDatabase(dict):
if not data['followid'] or not followid or data['followid'] == followid:
del self['relay-list'][data['domain']]
logging.verbose(f'Removed inbox from database: {data["inbox"]}')
logging.verbose('Removed inbox from database: %s', data['inbox'])
return True
if fail:
raise ValueError('Follow IDs do not match')
logging.debug(f'Follow ID does not match: db = {data["followid"]}, object = {followid}')
logging.debug('Follow ID does not match: db = %s, object = %s', data['followid'], followid)
return False

View file

@ -1,4 +1,3 @@
import logging
import traceback
from aiohttp import ClientSession, ClientTimeout, TCPConnector
@ -11,6 +10,7 @@ from json.decoder import JSONDecodeError
from urllib.parse import urlparse
from . import __version__
from . import logger as logging
from .misc import (
MIMETYPES,
DotDict,
@ -100,7 +100,7 @@ class HttpClient:
headers.update(self.database.signer.sign_headers('GET', url, algorithm='original'))
try:
logging.verbose(f'Fetching resource: {url}')
logging.debug('Fetching resource: %s', url)
async with self._session.get(url, headers=headers) as resp:
## Not expecting a response with 202s, so just return
@ -108,8 +108,8 @@ class HttpClient:
return
elif resp.status != 200:
logging.verbose(f'Received error when requesting {url}: {resp.status}')
logging.verbose(await resp.read()) # change this to debug
logging.verbose('Received error when requesting %s: %i', url, resp.status)
logging.debug(await resp.read())
return
if loads:
@ -123,22 +123,22 @@ class HttpClient:
else:
# todo: raise TypeError or something
logging.verbose(f'Invalid Content-Type for "{url}": {resp.content_type}')
return logging.debug(f'Response: {resp.read()}')
logging.verbose('Invalid Content-Type for "%s": %s', url, resp.content_type)
return logging.debug('Response: %s', await resp.read())
logging.debug(f'{url} >> resp {message.to_json(4)}')
logging.debug('%s >> resp %s', url, message.to_json(4))
self.cache[url] = message
return message
except JSONDecodeError:
logging.verbose(f'Failed to parse JSON')
logging.verbose('Failed to parse JSON')
except ClientSSLError:
logging.verbose(f'SSL error when connecting to {urlparse(url).netloc}')
logging.verbose('SSL error when connecting to %s', urlparse(url).netloc)
except (AsyncTimeoutError, ClientConnectionError):
logging.verbose(f'Failed to connect to {urlparse(url).netloc}')
logging.verbose('Failed to connect to %s', urlparse(url).netloc)
except Exception as e:
traceback.print_exc()
@ -160,21 +160,21 @@ class HttpClient:
headers.update(self.database.signer.sign_headers('POST', url, message, algorithm=algorithm))
try:
logging.verbose(f'Sending "{message.type}" to {url}')
logging.verbose('Sending "%s" to %s', message.type, url)
async with self._session.post(url, headers=headers, data=message.to_json()) as resp:
## Not expecting a response, so just return
if resp.status in {200, 202}:
return logging.verbose(f'Successfully sent "{message.type}" to {url}')
return logging.verbose('Successfully sent "%s" to %s', message.type, url)
logging.verbose(f'Received error when pushing to {url}: {resp.status}')
logging.verbose('Received error when pushing to %s: %i', url, resp.status)
return logging.verbose(await resp.read()) # change this to debug
except ClientSSLError:
logging.warning(f'SSL error when pushing to {urlparse(url).netloc}')
logging.warning('SSL error when pushing to %s', urlparse(url).netloc)
except (AsyncTimeoutError, ClientConnectionError):
logging.warning(f'Failed to connect to {urlparse(url).netloc} for message push')
logging.warning('Failed to connect to %s for message push', urlparse(url).netloc)
## prevent workers from being brought down
except Exception as e:
@ -190,7 +190,7 @@ class HttpClient:
)
if not wk_nodeinfo:
logging.verbose(f'Failed to fetch well-known nodeinfo url for domain: {domain}')
logging.verbose('Failed to fetch well-known nodeinfo url for %s', domain)
return False
for version in ['20', '21']:
@ -201,7 +201,7 @@ class HttpClient:
pass
if not nodeinfo_url:
logging.verbose(f'Failed to fetch nodeinfo url for domain: {domain}')
logging.verbose('Failed to fetch nodeinfo url for %s', domain)
return False
return await self.get(nodeinfo_url, loads=Nodeinfo.parse) or False

View file

@ -1,7 +1,6 @@
import Crypto
import asyncio
import click
import logging
import platform
from urllib.parse import urlparse

View file

@ -1,7 +1,6 @@
from __future__ import annotations
import json
import logging
import socket
import traceback
import typing
@ -19,6 +18,8 @@ from json.decoder import JSONDecodeError
from urllib.parse import urlparse
from uuid import uuid4
from . import logger as logging
if typing.TYPE_CHECKING:
from typing import Coroutine, Generator
@ -348,6 +349,7 @@ class View(AbstractView):
return self.app.database
# todo: move to views.ActorView
async def get_post_data(self) -> Response | None:
try:
self.signature = Signature.new_from_signature(self.request.headers['signature'])
@ -394,23 +396,22 @@ class View(AbstractView):
self.validate_signature(await self.request.read())
except SignatureFailureError as e:
logging.verbose(f'signature validation failed for "{self.actor.id}": {e}')
logging.verbose('signature validation failed for "%s": %s', self.actor.id, e)
return Response.new_error(401, str(e), 'json')
self.instance = self.database.get_inbox(self.actor.inbox)
# aputils.Signer.validate_signature is broken atm, so reimplement it
def validate_signature(self, body: bytes) -> None:
headers = {key.lower(): value for key, value in self.request.headers.items()}
headers["(request-target)"] = " ".join([self.request.method.lower(), self.request.path])
# if (digest := Digest.new_from_digest(headers.get("digest"))):
# if not body:
# raise SignatureFailureError("Missing body for digest verification")
#
# if not digest.validate(body):
# raise SignatureFailureError("Body digest does not match")
if (digest := Digest.new_from_digest(headers.get("digest"))):
if not body:
raise SignatureFailureError("Missing body for digest verification")
if not digest.validate(body):
raise SignatureFailureError("Body digest does not match")
if self.signature.algorithm_type == "hs2019":
if "(created)" not in self.signature.headers:

View file

@ -1,12 +1,12 @@
from __future__ import annotations
import asyncio
import logging
import typing
from cachetools import LRUCache
from uuid import uuid4
from . import logger as logging
from .misc import Message
if typing.TYPE_CHECKING:
@ -28,7 +28,7 @@ def person_check(actor, software):
async def handle_relay(view: View) -> None:
if view.message.objectid in cache:
logging.verbose(f'already relayed {view.message.objectid}')
logging.verbose('already relayed %s', view.message.objectid)
return
message = Message.new_announce(
@ -37,7 +37,7 @@ async def handle_relay(view: View) -> None:
)
cache[view.message.objectid] = message.id
logging.debug(f'>> relay: {message}')
logging.debug('>> relay: %s', message)
inboxes = view.database.distill_inboxes(message)
@ -47,7 +47,7 @@ async def handle_relay(view: View) -> None:
async def handle_forward(view: View) -> None:
if view.message.id in cache:
logging.verbose(f'already forwarded {view.message.id}')
logging.verbose('already forwarded %s', view.message.id)
return
message = Message.new_announce(
@ -56,7 +56,7 @@ async def handle_forward(view: View) -> None:
)
cache[view.message.id] = message.id
logging.debug(f'>> forward: {message}')
logging.debug('>> forward: %s', message)
inboxes = view.database.distill_inboxes(message.message)
@ -80,7 +80,11 @@ async def handle_follow(view: View) -> None:
)
)
return logging.verbose(f'Rejected follow from actor for using specific software: actor={view.actor.id}, software={software}')
return logging.verbose(
'Rejected follow from actor for using specific software: actor=%s, software=%s',
view.actor.id,
software
)
## reject if the actor is not an instance actor
if person_check(view.actor, software):
@ -94,7 +98,7 @@ async def handle_follow(view: View) -> None:
)
)
logging.verbose(f'Non-application actor tried to follow: {view.actor.id}')
logging.verbose('Non-application actor tried to follow: %s', view.actor.id)
return
view.database.add_inbox(view.actor.shared_inbox, view.message.id, software)
@ -161,7 +165,9 @@ processors = {
async def run_processor(view: View):
if view.message.type not in processors:
logging.verbose(
f'Message type "{view.message.type}" from actor cannot be handled: {view.actor.id}'
'Message type "%s" from actor cannot be handled: %s',
view.message.type,
view.actor.id
)
return
@ -173,5 +179,5 @@ async def run_processor(view: View):
view.instance['software'] = nodeinfo.sw_name
view.database.save()
logging.verbose(f'New "{view.message.type}" from actor: {view.actor.id}')
logging.verbose('New "%s" from actor: %s', view.message.type, view.actor.id)
return await processors[view.message.type](view)

View file

@ -2,7 +2,6 @@ from __future__ import annotations
import aputils
import asyncio
import logging
import subprocess
import traceback
import typing
@ -11,6 +10,7 @@ from aputils.objects import Nodeinfo, Webfinger, WellKnownNodeinfo
from pathlib import Path
from . import __version__, misc
from . import logger as logging
from .misc import Message, Response, View
from .processors import run_processor
@ -93,20 +93,24 @@ class ActorView(View):
## reject if the actor isn't whitelisted while the whiltelist is enabled
if self.config.whitelist_enabled and not self.config.is_whitelisted(self.actor.domain):
logging.verbose(f'Rejected actor for not being in the whitelist: {self.actor.id}')
logging.verbose('Rejected actor for not being in the whitelist: %s', self.actor.id)
return Response.new_error(403, 'access denied', 'json')
## reject if actor is banned
if self.config.is_banned(self.actor.domain):
logging.verbose(f'Ignored request from banned actor: {self.actor.id}')
logging.verbose('Ignored request from banned actor: %s', self.actor.id)
return Response.new_error(403, 'access denied', 'json')
## reject if activity type isn't 'Follow' and the actor isn't following
if self.message.type != 'Follow' and not self.database.get_inbox(self.actor.domain):
logging.verbose(f'Rejected actor for trying to post while not following: {self.actor.id}')
logging.verbose(
'Rejected actor for trying to post while not following: %s',
self.actor.id
)
return Response.new_error(401, 'access denied', 'json')
logging.debug(f">> payload {self.message.to_json(4)}")
logging.debug('>> payload %s', self.message.to_json(4))
asyncio.ensure_future(run_processor(self))
return Response.new(status = 202)