mirror of
https://git.pleroma.social/pleroma/relay.git
synced 2024-11-21 22:17:59 +00:00
use format strings for logging
This commit is contained in:
parent
dcbde6d532
commit
3005e9b370
|
@ -1,5 +1,4 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import queue
|
import queue
|
||||||
import signal
|
import signal
|
||||||
|
@ -9,6 +8,7 @@ import traceback
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from . import logger as logging
|
||||||
from .config import RelayConfig
|
from .config import RelayConfig
|
||||||
from .database import RelayDatabase
|
from .database import RelayDatabase
|
||||||
from .http_client import HttpClient
|
from .http_client import HttpClient
|
||||||
|
@ -103,12 +103,18 @@ class Application(web.Application):
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
if not check_open_port(self.config.listen, self.config.port):
|
if not check_open_port(self.config.listen, self.config.port):
|
||||||
return logging.error(f'A server is already running on port {self.config.port}')
|
return logging.error('A server is already running on port %i', self.config.port)
|
||||||
|
|
||||||
for view in VIEWS:
|
for view in VIEWS:
|
||||||
self.router.add_view(*view)
|
self.router.add_view(*view)
|
||||||
|
|
||||||
logging.info(f'Starting webserver at {self.config.host} ({self.config.listen}:{self.config.port})')
|
logging.info(
|
||||||
|
'Starting webserver at %s (%s:%i)',
|
||||||
|
self.config.host,
|
||||||
|
self.config.listen,
|
||||||
|
self.config.port
|
||||||
|
)
|
||||||
|
|
||||||
asyncio.run(self.handle_run())
|
asyncio.run(self.handle_run())
|
||||||
|
|
||||||
|
|
||||||
|
@ -174,7 +180,7 @@ class PushWorker(threading.Thread):
|
||||||
try:
|
try:
|
||||||
inbox, message = self.queue.get(block=True, timeout=0.25)
|
inbox, message = self.queue.get(block=True, timeout=0.25)
|
||||||
self.queue.task_done()
|
self.queue.task_done()
|
||||||
logging.verbose(f'New push from Thread-{threading.get_ident()}')
|
logging.verbose('New push from Thread-%i', threading.get_ident())
|
||||||
await self.client.post(inbox, message)
|
await self.client.post(inbox, message)
|
||||||
|
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
import aputils
|
import aputils
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from . import logger as logging
|
||||||
|
|
||||||
|
|
||||||
class RelayDatabase(dict):
|
class RelayDatabase(dict):
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
|
@ -75,7 +76,7 @@ class RelayDatabase(dict):
|
||||||
raise e from None
|
raise e from None
|
||||||
|
|
||||||
if not self['private-key']:
|
if not self['private-key']:
|
||||||
logging.info("No actor keys present, generating 4096-bit RSA keypair.")
|
logging.info('No actor keys present, generating 4096-bit RSA keypair.')
|
||||||
self.signer = aputils.Signer.new(self.config.keyid, size=4096)
|
self.signer = aputils.Signer.new(self.config.keyid, size=4096)
|
||||||
self['private-key'] = self.signer.export()
|
self['private-key'] = self.signer.export()
|
||||||
|
|
||||||
|
@ -125,7 +126,7 @@ class RelayDatabase(dict):
|
||||||
'software': software
|
'software': software
|
||||||
}
|
}
|
||||||
|
|
||||||
logging.verbose(f'Added inbox to database: {inbox}')
|
logging.verbose('Added inbox to database: %s', inbox)
|
||||||
return self['relay-list'][domain]
|
return self['relay-list'][domain]
|
||||||
|
|
||||||
|
|
||||||
|
@ -140,13 +141,13 @@ class RelayDatabase(dict):
|
||||||
|
|
||||||
if not data['followid'] or not followid or data['followid'] == followid:
|
if not data['followid'] or not followid or data['followid'] == followid:
|
||||||
del self['relay-list'][data['domain']]
|
del self['relay-list'][data['domain']]
|
||||||
logging.verbose(f'Removed inbox from database: {data["inbox"]}')
|
logging.verbose('Removed inbox from database: %s', data['inbox'])
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if fail:
|
if fail:
|
||||||
raise ValueError('Follow IDs do not match')
|
raise ValueError('Follow IDs do not match')
|
||||||
|
|
||||||
logging.debug(f'Follow ID does not match: db = {data["followid"]}, object = {followid}')
|
logging.debug('Follow ID does not match: db = %s, object = %s', data['followid'], followid)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import logging
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from aiohttp import ClientSession, ClientTimeout, TCPConnector
|
from aiohttp import ClientSession, ClientTimeout, TCPConnector
|
||||||
|
@ -11,6 +10,7 @@ from json.decoder import JSONDecodeError
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from . import __version__
|
from . import __version__
|
||||||
|
from . import logger as logging
|
||||||
from .misc import (
|
from .misc import (
|
||||||
MIMETYPES,
|
MIMETYPES,
|
||||||
DotDict,
|
DotDict,
|
||||||
|
@ -100,7 +100,7 @@ class HttpClient:
|
||||||
headers.update(self.database.signer.sign_headers('GET', url, algorithm='original'))
|
headers.update(self.database.signer.sign_headers('GET', url, algorithm='original'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logging.verbose(f'Fetching resource: {url}')
|
logging.debug('Fetching resource: %s', url)
|
||||||
|
|
||||||
async with self._session.get(url, headers=headers) as resp:
|
async with self._session.get(url, headers=headers) as resp:
|
||||||
## Not expecting a response with 202s, so just return
|
## Not expecting a response with 202s, so just return
|
||||||
|
@ -108,8 +108,8 @@ class HttpClient:
|
||||||
return
|
return
|
||||||
|
|
||||||
elif resp.status != 200:
|
elif resp.status != 200:
|
||||||
logging.verbose(f'Received error when requesting {url}: {resp.status}')
|
logging.verbose('Received error when requesting %s: %i', url, resp.status)
|
||||||
logging.verbose(await resp.read()) # change this to debug
|
logging.debug(await resp.read())
|
||||||
return
|
return
|
||||||
|
|
||||||
if loads:
|
if loads:
|
||||||
|
@ -123,22 +123,22 @@ class HttpClient:
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# todo: raise TypeError or something
|
# todo: raise TypeError or something
|
||||||
logging.verbose(f'Invalid Content-Type for "{url}": {resp.content_type}')
|
logging.verbose('Invalid Content-Type for "%s": %s', url, resp.content_type)
|
||||||
return logging.debug(f'Response: {resp.read()}')
|
return logging.debug('Response: %s', await resp.read())
|
||||||
|
|
||||||
logging.debug(f'{url} >> resp {message.to_json(4)}')
|
logging.debug('%s >> resp %s', url, message.to_json(4))
|
||||||
|
|
||||||
self.cache[url] = message
|
self.cache[url] = message
|
||||||
return message
|
return message
|
||||||
|
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
logging.verbose(f'Failed to parse JSON')
|
logging.verbose('Failed to parse JSON')
|
||||||
|
|
||||||
except ClientSSLError:
|
except ClientSSLError:
|
||||||
logging.verbose(f'SSL error when connecting to {urlparse(url).netloc}')
|
logging.verbose('SSL error when connecting to %s', urlparse(url).netloc)
|
||||||
|
|
||||||
except (AsyncTimeoutError, ClientConnectionError):
|
except (AsyncTimeoutError, ClientConnectionError):
|
||||||
logging.verbose(f'Failed to connect to {urlparse(url).netloc}')
|
logging.verbose('Failed to connect to %s', urlparse(url).netloc)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
@ -160,21 +160,21 @@ class HttpClient:
|
||||||
headers.update(self.database.signer.sign_headers('POST', url, message, algorithm=algorithm))
|
headers.update(self.database.signer.sign_headers('POST', url, message, algorithm=algorithm))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logging.verbose(f'Sending "{message.type}" to {url}')
|
logging.verbose('Sending "%s" to %s', message.type, url)
|
||||||
|
|
||||||
async with self._session.post(url, headers=headers, data=message.to_json()) as resp:
|
async with self._session.post(url, headers=headers, data=message.to_json()) as resp:
|
||||||
## Not expecting a response, so just return
|
## Not expecting a response, so just return
|
||||||
if resp.status in {200, 202}:
|
if resp.status in {200, 202}:
|
||||||
return logging.verbose(f'Successfully sent "{message.type}" to {url}')
|
return logging.verbose('Successfully sent "%s" to %s', message.type, url)
|
||||||
|
|
||||||
logging.verbose(f'Received error when pushing to {url}: {resp.status}')
|
logging.verbose('Received error when pushing to %s: %i', url, resp.status)
|
||||||
return logging.verbose(await resp.read()) # change this to debug
|
return logging.verbose(await resp.read()) # change this to debug
|
||||||
|
|
||||||
except ClientSSLError:
|
except ClientSSLError:
|
||||||
logging.warning(f'SSL error when pushing to {urlparse(url).netloc}')
|
logging.warning('SSL error when pushing to %s', urlparse(url).netloc)
|
||||||
|
|
||||||
except (AsyncTimeoutError, ClientConnectionError):
|
except (AsyncTimeoutError, ClientConnectionError):
|
||||||
logging.warning(f'Failed to connect to {urlparse(url).netloc} for message push')
|
logging.warning('Failed to connect to %s for message push', urlparse(url).netloc)
|
||||||
|
|
||||||
## prevent workers from being brought down
|
## prevent workers from being brought down
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -190,7 +190,7 @@ class HttpClient:
|
||||||
)
|
)
|
||||||
|
|
||||||
if not wk_nodeinfo:
|
if not wk_nodeinfo:
|
||||||
logging.verbose(f'Failed to fetch well-known nodeinfo url for domain: {domain}')
|
logging.verbose('Failed to fetch well-known nodeinfo url for %s', domain)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for version in ['20', '21']:
|
for version in ['20', '21']:
|
||||||
|
@ -201,7 +201,7 @@ class HttpClient:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not nodeinfo_url:
|
if not nodeinfo_url:
|
||||||
logging.verbose(f'Failed to fetch nodeinfo url for domain: {domain}')
|
logging.verbose('Failed to fetch nodeinfo url for %s', domain)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return await self.get(nodeinfo_url, loads=Nodeinfo.parse) or False
|
return await self.get(nodeinfo_url, loads=Nodeinfo.parse) or False
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import Crypto
|
import Crypto
|
||||||
import asyncio
|
import asyncio
|
||||||
import click
|
import click
|
||||||
import logging
|
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
|
||||||
import socket
|
import socket
|
||||||
import traceback
|
import traceback
|
||||||
import typing
|
import typing
|
||||||
|
@ -19,6 +18,8 @@ from json.decoder import JSONDecodeError
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from . import logger as logging
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from typing import Coroutine, Generator
|
from typing import Coroutine, Generator
|
||||||
|
|
||||||
|
@ -348,6 +349,7 @@ class View(AbstractView):
|
||||||
return self.app.database
|
return self.app.database
|
||||||
|
|
||||||
|
|
||||||
|
# todo: move to views.ActorView
|
||||||
async def get_post_data(self) -> Response | None:
|
async def get_post_data(self) -> Response | None:
|
||||||
try:
|
try:
|
||||||
self.signature = Signature.new_from_signature(self.request.headers['signature'])
|
self.signature = Signature.new_from_signature(self.request.headers['signature'])
|
||||||
|
@ -394,23 +396,22 @@ class View(AbstractView):
|
||||||
self.validate_signature(await self.request.read())
|
self.validate_signature(await self.request.read())
|
||||||
|
|
||||||
except SignatureFailureError as e:
|
except SignatureFailureError as e:
|
||||||
logging.verbose(f'signature validation failed for "{self.actor.id}": {e}')
|
logging.verbose('signature validation failed for "%s": %s', self.actor.id, e)
|
||||||
return Response.new_error(401, str(e), 'json')
|
return Response.new_error(401, str(e), 'json')
|
||||||
|
|
||||||
self.instance = self.database.get_inbox(self.actor.inbox)
|
self.instance = self.database.get_inbox(self.actor.inbox)
|
||||||
|
|
||||||
|
|
||||||
# aputils.Signer.validate_signature is broken atm, so reimplement it
|
|
||||||
def validate_signature(self, body: bytes) -> None:
|
def validate_signature(self, body: bytes) -> None:
|
||||||
headers = {key.lower(): value for key, value in self.request.headers.items()}
|
headers = {key.lower(): value for key, value in self.request.headers.items()}
|
||||||
headers["(request-target)"] = " ".join([self.request.method.lower(), self.request.path])
|
headers["(request-target)"] = " ".join([self.request.method.lower(), self.request.path])
|
||||||
|
|
||||||
# if (digest := Digest.new_from_digest(headers.get("digest"))):
|
if (digest := Digest.new_from_digest(headers.get("digest"))):
|
||||||
# if not body:
|
if not body:
|
||||||
# raise SignatureFailureError("Missing body for digest verification")
|
raise SignatureFailureError("Missing body for digest verification")
|
||||||
#
|
|
||||||
# if not digest.validate(body):
|
if not digest.validate(body):
|
||||||
# raise SignatureFailureError("Body digest does not match")
|
raise SignatureFailureError("Body digest does not match")
|
||||||
|
|
||||||
if self.signature.algorithm_type == "hs2019":
|
if self.signature.algorithm_type == "hs2019":
|
||||||
if "(created)" not in self.signature.headers:
|
if "(created)" not in self.signature.headers:
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
from cachetools import LRUCache
|
from cachetools import LRUCache
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from . import logger as logging
|
||||||
from .misc import Message
|
from .misc import Message
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
|
@ -28,7 +28,7 @@ def person_check(actor, software):
|
||||||
|
|
||||||
async def handle_relay(view: View) -> None:
|
async def handle_relay(view: View) -> None:
|
||||||
if view.message.objectid in cache:
|
if view.message.objectid in cache:
|
||||||
logging.verbose(f'already relayed {view.message.objectid}')
|
logging.verbose('already relayed %s', view.message.objectid)
|
||||||
return
|
return
|
||||||
|
|
||||||
message = Message.new_announce(
|
message = Message.new_announce(
|
||||||
|
@ -37,7 +37,7 @@ async def handle_relay(view: View) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
cache[view.message.objectid] = message.id
|
cache[view.message.objectid] = message.id
|
||||||
logging.debug(f'>> relay: {message}')
|
logging.debug('>> relay: %s', message)
|
||||||
|
|
||||||
inboxes = view.database.distill_inboxes(message)
|
inboxes = view.database.distill_inboxes(message)
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ async def handle_relay(view: View) -> None:
|
||||||
|
|
||||||
async def handle_forward(view: View) -> None:
|
async def handle_forward(view: View) -> None:
|
||||||
if view.message.id in cache:
|
if view.message.id in cache:
|
||||||
logging.verbose(f'already forwarded {view.message.id}')
|
logging.verbose('already forwarded %s', view.message.id)
|
||||||
return
|
return
|
||||||
|
|
||||||
message = Message.new_announce(
|
message = Message.new_announce(
|
||||||
|
@ -56,7 +56,7 @@ async def handle_forward(view: View) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
cache[view.message.id] = message.id
|
cache[view.message.id] = message.id
|
||||||
logging.debug(f'>> forward: {message}')
|
logging.debug('>> forward: %s', message)
|
||||||
|
|
||||||
inboxes = view.database.distill_inboxes(message.message)
|
inboxes = view.database.distill_inboxes(message.message)
|
||||||
|
|
||||||
|
@ -80,7 +80,11 @@ async def handle_follow(view: View) -> None:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return logging.verbose(f'Rejected follow from actor for using specific software: actor={view.actor.id}, software={software}')
|
return logging.verbose(
|
||||||
|
'Rejected follow from actor for using specific software: actor=%s, software=%s',
|
||||||
|
view.actor.id,
|
||||||
|
software
|
||||||
|
)
|
||||||
|
|
||||||
## reject if the actor is not an instance actor
|
## reject if the actor is not an instance actor
|
||||||
if person_check(view.actor, software):
|
if person_check(view.actor, software):
|
||||||
|
@ -94,7 +98,7 @@ async def handle_follow(view: View) -> None:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.verbose(f'Non-application actor tried to follow: {view.actor.id}')
|
logging.verbose('Non-application actor tried to follow: %s', view.actor.id)
|
||||||
return
|
return
|
||||||
|
|
||||||
view.database.add_inbox(view.actor.shared_inbox, view.message.id, software)
|
view.database.add_inbox(view.actor.shared_inbox, view.message.id, software)
|
||||||
|
@ -161,7 +165,9 @@ processors = {
|
||||||
async def run_processor(view: View):
|
async def run_processor(view: View):
|
||||||
if view.message.type not in processors:
|
if view.message.type not in processors:
|
||||||
logging.verbose(
|
logging.verbose(
|
||||||
f'Message type "{view.message.type}" from actor cannot be handled: {view.actor.id}'
|
'Message type "%s" from actor cannot be handled: %s',
|
||||||
|
view.message.type,
|
||||||
|
view.actor.id
|
||||||
)
|
)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -173,5 +179,5 @@ async def run_processor(view: View):
|
||||||
view.instance['software'] = nodeinfo.sw_name
|
view.instance['software'] = nodeinfo.sw_name
|
||||||
view.database.save()
|
view.database.save()
|
||||||
|
|
||||||
logging.verbose(f'New "{view.message.type}" from actor: {view.actor.id}')
|
logging.verbose('New "%s" from actor: %s', view.message.type, view.actor.id)
|
||||||
return await processors[view.message.type](view)
|
return await processors[view.message.type](view)
|
||||||
|
|
|
@ -2,7 +2,6 @@ from __future__ import annotations
|
||||||
|
|
||||||
import aputils
|
import aputils
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
import typing
|
import typing
|
||||||
|
@ -11,6 +10,7 @@ from aputils.objects import Nodeinfo, Webfinger, WellKnownNodeinfo
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from . import __version__, misc
|
from . import __version__, misc
|
||||||
|
from . import logger as logging
|
||||||
from .misc import Message, Response, View
|
from .misc import Message, Response, View
|
||||||
from .processors import run_processor
|
from .processors import run_processor
|
||||||
|
|
||||||
|
@ -93,20 +93,24 @@ class ActorView(View):
|
||||||
|
|
||||||
## reject if the actor isn't whitelisted while the whiltelist is enabled
|
## reject if the actor isn't whitelisted while the whiltelist is enabled
|
||||||
if self.config.whitelist_enabled and not self.config.is_whitelisted(self.actor.domain):
|
if self.config.whitelist_enabled and not self.config.is_whitelisted(self.actor.domain):
|
||||||
logging.verbose(f'Rejected actor for not being in the whitelist: {self.actor.id}')
|
logging.verbose('Rejected actor for not being in the whitelist: %s', self.actor.id)
|
||||||
return Response.new_error(403, 'access denied', 'json')
|
return Response.new_error(403, 'access denied', 'json')
|
||||||
|
|
||||||
## reject if actor is banned
|
## reject if actor is banned
|
||||||
if self.config.is_banned(self.actor.domain):
|
if self.config.is_banned(self.actor.domain):
|
||||||
logging.verbose(f'Ignored request from banned actor: {self.actor.id}')
|
logging.verbose('Ignored request from banned actor: %s', self.actor.id)
|
||||||
return Response.new_error(403, 'access denied', 'json')
|
return Response.new_error(403, 'access denied', 'json')
|
||||||
|
|
||||||
## reject if activity type isn't 'Follow' and the actor isn't following
|
## reject if activity type isn't 'Follow' and the actor isn't following
|
||||||
if self.message.type != 'Follow' and not self.database.get_inbox(self.actor.domain):
|
if self.message.type != 'Follow' and not self.database.get_inbox(self.actor.domain):
|
||||||
logging.verbose(f'Rejected actor for trying to post while not following: {self.actor.id}')
|
logging.verbose(
|
||||||
|
'Rejected actor for trying to post while not following: %s',
|
||||||
|
self.actor.id
|
||||||
|
)
|
||||||
|
|
||||||
return Response.new_error(401, 'access denied', 'json')
|
return Response.new_error(401, 'access denied', 'json')
|
||||||
|
|
||||||
logging.debug(f">> payload {self.message.to_json(4)}")
|
logging.debug('>> payload %s', self.message.to_json(4))
|
||||||
|
|
||||||
asyncio.ensure_future(run_processor(self))
|
asyncio.ensure_future(run_processor(self))
|
||||||
return Response.new(status = 202)
|
return Response.new(status = 202)
|
||||||
|
|
Loading…
Reference in a new issue