From d4955828d43be7be13701e4cac810b6fe878c628 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 13:45:26 -0500 Subject: [PATCH 01/50] return Nodeinfo object from fetch_nodeinfo --- relay/manage.py | 22 +++++++++------------- relay/misc.py | 13 +++++++++---- relay/processors.py | 16 ++++++++-------- relay/views.py | 8 ++++---- 4 files changed, 30 insertions(+), 29 deletions(-) diff --git a/relay/manage.py b/relay/manage.py index 2031cb8..3b55717 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -235,18 +235,16 @@ def cli_software_ban(name, fetch_nodeinfo): return click.echo('Banned all relay software') if fetch_nodeinfo: - software = asyncio.run(misc.fetch_nodeinfo(name)) + nodeinfo = asyncio.run(misc.fetch_nodeinfo(name)) if not software: click.echo(f'Failed to fetch software name from domain: {name}') - name = software - - if config.ban_software(name): + if config.ban_software(nodeinfo.swname): app.config.save() - return click.echo(f'Banned software: {name}') + return click.echo(f'Banned software: {nodeinfo.swname}') - click.echo(f'Software already banned: {name}') + click.echo(f'Software already banned: {nodeinfo.swname}') @cli_software.command('unban') @@ -265,18 +263,16 @@ def cli_software_unban(name, fetch_nodeinfo): return click.echo('Unbanned all relay software') if fetch_nodeinfo: - software = asyncio.run(misc.fetch_nodeinfo(name)) + nodeinfo = asyncio.run(misc.fetch_nodeinfo(name)) - if not software: + if not nodeinfo: click.echo(f'Failed to fetch software name from domain: {name}') - name = software - - if app.config.unban_software(name): + if app.config.unban_software(nodeinfo.swname): app.config.save() - return click.echo(f'Unbanned software: {name}') + return click.echo(f'Unbanned software: {nodeinfo.swname}') - click.echo(f'Software wasn\'t banned: {name}') + click.echo(f'Software wasn\'t banned: {nodeinfo.swname}') diff --git a/relay/misc.py b/relay/misc.py index e5f362e..980caf3 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -141,12 +141,11 @@ async def fetch_nodeinfo(domain): nodeinfo = await request(nodeinfo_url, sign_headers=False, activity=False) - try: - return nodeinfo['software']['name'] - - except KeyError: + if not nodeinfo: return False + return Nodeinfo(nodeinfo) + async def request(uri, data=None, force=False, sign_headers=True, activity=True): ## If a get request and not force, try to use the cache first @@ -459,6 +458,12 @@ class Message(DotDict): return self.object +class Nodeinfo(DotDict): + @property + def swname(self): + return self.software.name + + class Response(AiohttpResponse): @classmethod def new(cls, body='', status=200, headers=None, ctype='text'): diff --git a/relay/processors.py b/relay/processors.py index 69f8b59..a6ced5a 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -6,7 +6,7 @@ from uuid import uuid4 from . import misc -async def handle_relay(request, actor, data, software): +async def handle_relay(request, actor, data, nodeinfo): if data.objectid in request.app.cache.objects: logging.verbose(f'already relayed {data.objectid}') return @@ -27,7 +27,7 @@ async def handle_relay(request, actor, data, software): request.app.cache.objects[data.objectid] = message.id -async def handle_forward(request, actor, data, software): +async def handle_forward(request, actor, data, nodeinfo): if data.id in request.app.cache.objects: logging.verbose(f'already forwarded {data.id}') return @@ -47,7 +47,7 @@ async def handle_forward(request, actor, data, software): request.app.cache.objects[data.id] = message.id -async def handle_follow(request, actor, data, software): +async def handle_follow(request, actor, data, nodeinfo): if not request.app.database.add_inbox(actor.shared_inbox, data.id): request.app.database.set_followid(actor.id, data.id) @@ -65,7 +65,7 @@ async def handle_follow(request, actor, data, software): # Are Akkoma and Pleroma the only two that expect a follow back? # Ignoring only Mastodon for now - if software != 'mastodon': + if nodeinfo.swname != 'mastodon': await misc.request( actor.shared_inbox, misc.Message.new_follow( @@ -75,10 +75,10 @@ async def handle_follow(request, actor, data, software): ) -async def handle_undo(request, actor, data, software): +async def handle_undo(request, actor, data, nodeinfo): ## If the object is not a Follow, forward it if data['object']['type'] != 'Follow': - return await handle_forward(request, actor, data, software) + return await handle_forward(request, actor, data, nodeinfo) if not request.app.database.del_inbox(actor.domain, data.id): return @@ -104,9 +104,9 @@ processors = { } -async def run_processor(request, actor, data, software): +async def run_processor(request, actor, data, nodeinfo): if data.type not in processors: return logging.verbose(f'New "{data.type}" from actor: {actor.id}') - return await processors[data.type](request, actor, data, software) + return await processors[data.type](request, actor, data, nodeinfo) diff --git a/relay/views.py b/relay/views.py index 8d6be5e..44a1686 100644 --- a/relay/views.py +++ b/relay/views.py @@ -100,7 +100,7 @@ async def inbox(request): return Response.new_error(400, 'failed to parse message', 'json') actor = await misc.request(signature.keyid) - software = await misc.fetch_nodeinfo(actor.domain) + nodeinfo = await misc.fetch_nodeinfo(actor.domain) ## reject if actor is empty if not actor: @@ -118,8 +118,8 @@ async def inbox(request): return Response.new_error(403, 'access denied', 'json') ## reject if software used by actor is banned - if config.is_banned_software(software): - logging.verbose(f'Rejected actor for using specific software: {software}') + if config.is_banned_software(nodeinfo.swname): + logging.verbose(f'Rejected actor for using specific software: {nodeinfo.swname}') return Response.new_error(403, 'access denied', 'json') ## reject if the signature is invalid @@ -134,7 +134,7 @@ async def inbox(request): logging.debug(f">> payload {data}") - await run_processor(request, actor, data, software) + await run_processor(request, actor, data, nodeinfo) return Response.new(status=202) From 9369b598fa3ac8dad0d4725eda213d83298c42a1 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 14:10:39 -0500 Subject: [PATCH 02/50] add software name for inboxes --- relay/application.py | 11 ++++++----- relay/database.py | 14 +++++++++++++- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/relay/application.py b/relay/application.py index cc2815b..6c17d16 100644 --- a/relay/application.py +++ b/relay/application.py @@ -9,7 +9,7 @@ from datetime import datetime, timedelta from .config import RelayConfig from .database import RelayDatabase -from .misc import DotDict, check_open_port, set_app +from .misc import DotDict, check_open_port, fetch_nodeinfo, set_app from .views import routes @@ -25,15 +25,16 @@ class Application(web.Application): if not self['config'].load(): self['config'].save() - self['database'] = RelayDatabase(self['config']) - self['database'].load() - self['cache'] = DotDict({key: Cache(maxsize=self['config'][key]) for key in self['config'].cachekeys}) self['semaphore'] = asyncio.Semaphore(self['config'].push_limit) - self.set_signal_handler() set_app(self) + self['database'] = RelayDatabase(self['config']) + self['database'].load() + + self.set_signal_handler() + @property def cache(self): diff --git a/relay/database.py b/relay/database.py index 90b1473..b79c0aa 100644 --- a/relay/database.py +++ b/relay/database.py @@ -1,3 +1,4 @@ +import asyncio import json import logging import traceback @@ -5,6 +6,8 @@ import traceback from Crypto.PublicKey import RSA from urllib.parse import urlparse +from .misc import fetch_nodeinfo + class RelayDatabase(dict): def __init__(self, config): @@ -75,9 +78,18 @@ class RelayDatabase(dict): else: self['relay-list'] = data.get('relay-list', {}) - for domain in self['relay-list'].keys(): + for domain, instance in self['relay-list'].items(): if self.config.is_banned(domain) or (self.config.whitelist_enabled and not self.config.is_whitelisted(domain)): self.del_inbox(domain) + continue + + if not instance.get('software'): + nodeinfo = asyncio.run(fetch_nodeinfo(domain)) + + if not nodeinfo: + continue + + instance['software'] = nodeinfo.swname new_db = False From 4ea6a040fb849a00e12065452942373223c56ba0 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 14:36:30 -0500 Subject: [PATCH 03/50] optimize RelayDatabase.get_inbox --- relay/database.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/relay/database.py b/relay/database.py index b79c0aa..975174d 100644 --- a/relay/database.py +++ b/relay/database.py @@ -120,13 +120,13 @@ class RelayDatabase(dict): if domain.startswith('http'): domain = urlparse(domain).hostname - if domain not in self['relay-list']: - if fail: - raise KeyError(domain) + inbox = self['relay-list'].get(domain) - return + if inbox: + return inbox - return self['relay-list'][domain] + if fail: + raise KeyError(domain) def add_inbox(self, inbox, followid=None, fail=False): From 306b526808bdab6e37b9ce4a69b300540cabd2bd Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 16:38:39 -0500 Subject: [PATCH 04/50] add properties to aiohttp.web.Request --- relay/application.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/relay/application.py b/relay/application.py index 6c17d16..f0ea4fe 100644 --- a/relay/application.py +++ b/relay/application.py @@ -126,3 +126,32 @@ class Application(web.Application): class Cache(LRUCache): def set_maxsize(self, value): self.__maxsize = int(value) + + +## Can't sub-class web.Request, so let's just add some properties +def request_actor(self): + try: return self['actor'] + except KeyError: pass + + +def request_message(self): + try: return self['message'] + except KeyError: pass + + +def request_signature(self): + if 'signature' not in self._state: + try: self['signature'] = DotDict.new_from_signature(self.headers['signature']) + except KeyError: return + + return self['signature'] + + +setattr(web.Request, 'actor', property(request_actor)) +setattr(web.Request, 'message', property(request_message)) +setattr(web.Request, 'signature', property(request_signature)) + +setattr(web.Request, 'cache', property(lambda self: self.app.cache)) +setattr(web.Request, 'config', property(lambda self: self.app.config)) +setattr(web.Request, 'database', property(lambda self: self.app.database)) +setattr(web.Request, 'semaphore', property(lambda self: self.app.semaphore)) From 4a8a8da74087300a56d4aeb089b4d5dc347cb380 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 16:39:53 -0500 Subject: [PATCH 05/50] add software kwarg to RelayDatabase.add_inbox --- relay/database.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/relay/database.py b/relay/database.py index 975174d..3a9f5c9 100644 --- a/relay/database.py +++ b/relay/database.py @@ -129,20 +129,25 @@ class RelayDatabase(dict): raise KeyError(domain) - def add_inbox(self, inbox, followid=None, fail=False): + def add_inbox(self, inbox, followid=None, software=None): assert inbox.startswith('https'), 'Inbox must be a url' domain = urlparse(inbox).hostname + instance = self.get_inbox(domain) - if self.get_inbox(domain): - if fail: - raise KeyError(domain) + if instance: + if followid: + instance['followid'] = followid - return False + if software: + instance['software'] = software + + return instance self['relay-list'][domain] = { 'domain': domain, 'inbox': inbox, - 'followid': followid + 'followid': followid, + 'software': software } logging.verbose(f'Added inbox to database: {inbox}') From ba9f2718aaeb03ef7b1c461a359f148255849ffe Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 16:41:14 -0500 Subject: [PATCH 06/50] use new request properties and only fetch nodeinfo on follow --- relay/processors.py | 89 ++++++++++++++++++++++++--------------------- relay/views.py | 78 +++++++++++++++++++-------------------- 2 files changed, 84 insertions(+), 83 deletions(-) diff --git a/relay/processors.py b/relay/processors.py index a6ced5a..4ccb0d1 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -6,92 +6,97 @@ from uuid import uuid4 from . import misc -async def handle_relay(request, actor, data, nodeinfo): - if data.objectid in request.app.cache.objects: - logging.verbose(f'already relayed {data.objectid}') +async def handle_relay(request): + if request.message.objectid in request.cache.objects: + logging.verbose(f'already relayed {request.message.objectid}') return - logging.verbose(f'Relaying post from {data.actorid}') + logging.verbose(f'Relaying post from {request.message.actorid}') message = misc.Message.new_announce( - host = request.app.config.host, - object = data.objectid + host = request.config.host, + object = request.message.objectid ) logging.debug(f'>> relay: {message}') - inboxes = misc.distill_inboxes(actor, data.objectid) + inboxes = misc.distill_inboxes(request.actor, request.message.objectid) futures = [misc.request(inbox, data=message) for inbox in inboxes] asyncio.ensure_future(asyncio.gather(*futures)) - request.app.cache.objects[data.objectid] = message.id + request.cache.objects[request.message.objectid] = message.id -async def handle_forward(request, actor, data, nodeinfo): - if data.id in request.app.cache.objects: - logging.verbose(f'already forwarded {data.id}') +async def handle_forward(request): + if request.message.id in request.cache.objects: + logging.verbose(f'already forwarded {request.message.id}') return message = misc.Message.new_announce( - host = request.app.config.host, - object = data + host = request.config.host, + object = request.message ) - logging.verbose(f'Forwarding post from {actor.id}') - logging.debug(f'>> Relay {data}') + logging.verbose(f'Forwarding post from {request.actor.id}') + logging.debug(f'>> Relay {request.message}') - inboxes = misc.distill_inboxes(actor, data.id) + inboxes = misc.distill_inboxes(request.actor, request.message.id) futures = [misc.request(inbox, data=message) for inbox in inboxes] asyncio.ensure_future(asyncio.gather(*futures)) - request.app.cache.objects[data.id] = message.id + request.cache.objects[request.message.id] = message.id -async def handle_follow(request, actor, data, nodeinfo): - if not request.app.database.add_inbox(actor.shared_inbox, data.id): - request.app.database.set_followid(actor.id, data.id) +async def handle_follow(request): + nodeinfo = await misc.fetch_nodeinfo(request.actor.domain) + software = nodeinfo.swname if nodeinfo else None - request.app.database.save() + ## reject if software used by actor is banned + if request.config.is_banned_software(software): + return logging.verbose(f'Rejected follow from actor for using specific software: actor={request.actor.id}, software={software}') + + request.database.add_inbox(request.actor.shared_inbox, request.message.id, software) + request.database.save() await misc.request( - actor.shared_inbox, + request.actor.shared_inbox, misc.Message.new_response( - host = request.app.config.host, - actor = actor.id, - followid = data.id, + host = request.config.host, + actor = request.actor.id, + followid = request.message.id, accept = True ) ) # Are Akkoma and Pleroma the only two that expect a follow back? # Ignoring only Mastodon for now - if nodeinfo.swname != 'mastodon': + if software != 'mastodon': await misc.request( - actor.shared_inbox, + request.actor.shared_inbox, misc.Message.new_follow( - host = request.app.config.host, - actor = actor.id + host = request.config.host, + actor = request.actor.id ) ) -async def handle_undo(request, actor, data, nodeinfo): +async def handle_undo(request): ## If the object is not a Follow, forward it - if data['object']['type'] != 'Follow': - return await handle_forward(request, actor, data, nodeinfo) + if request.message.object.type != 'Follow': + return await handle_forward(request) - if not request.app.database.del_inbox(actor.domain, data.id): + if not request.database.del_inbox(request.actor.domain, request.message.id): return - request.app.database.save() + request.database.save() message = misc.Message.new_unfollow( - host = request.app.config.host, - actor = actor.id, - follow = data + host = request.config.host, + actor = request.actor.id, + follow = request.message ) - await misc.request(actor.shared_inbox, message) + await misc.request(request.actor.shared_inbox, message) processors = { @@ -104,9 +109,9 @@ processors = { } -async def run_processor(request, actor, data, nodeinfo): - if data.type not in processors: +async def run_processor(request): + if request.message.type not in processors: return - logging.verbose(f'New "{data.type}" from actor: {actor.id}') - return await processors[data.type](request, actor, data, nodeinfo) + logging.verbose(f'New "{request.message.type}" from actor: {request.actor.id}') + return await processors[request.message.type](request) diff --git a/relay/views.py b/relay/views.py index 44a1686..a223d6d 100644 --- a/relay/views.py +++ b/relay/views.py @@ -33,10 +33,10 @@ def register_route(method, path): @register_route('GET', '/') async def home(request): - targets = '
'.join(request.app.database.hostnames) - note = request.app.config.note - count = len(request.app.database.hostnames) - host = request.app.config.host + targets = '
'.join(request.database.hostnames) + note = request.config.note + count = len(request.database.hostnames) + host = request.config.host text = f""" @@ -64,8 +64,8 @@ a:hover {{ color: #8AF; }} @register_route('GET', '/actor') async def actor(request): data = Message.new_actor( - host = request.app.config.host, - pubkey = request.app.database.pubkey + host = request.config.host, + pubkey = request.database.pubkey ) return Response.new(data, ctype='activity') @@ -74,67 +74,63 @@ async def actor(request): @register_route('POST', '/inbox') @register_route('POST', '/actor') async def inbox(request): - config = request.app.config - database = request.app.database + config = request.config + database = request.database ## reject if missing signature header - try: - signature = DotDict.new_from_signature(request.headers['signature']) - - except KeyError: + if not request.signature: logging.verbose('Actor missing signature header') raise HTTPUnauthorized(body='missing signature') - ## read message try: - data = await request.json(loads=Message.new_from_json) + request['message'] = await request.json(loads=Message.new_from_json) + + ## reject if there is no message + if not request.message: + logging.verbose('empty message') + return Response.new_error(400, 'missing message', 'json') ## reject if there is no actor in the message - if 'actor' not in data: - logging.verbose('actor not in data') + if 'actor' not in request.message: + logging.verbose('actor not in message') return Response.new_error(400, 'no actor in message', 'json') except: + ## this code should hopefully never get called traceback.print_exc() logging.verbose('Failed to parse inbox message') return Response.new_error(400, 'failed to parse message', 'json') - actor = await misc.request(signature.keyid) - nodeinfo = await misc.fetch_nodeinfo(actor.domain) + request['actor'] = await misc.request(request.signature.keyid) ## reject if actor is empty - if not actor: - logging.verbose(f'Failed to fetch actor: {actor.id}') + if not request.actor: + logging.verbose(f'Failed to fetch actor: {request.actor.id}') return Response.new_error(400, 'failed to fetch actor', 'json') ## reject if the actor isn't whitelisted while the whiltelist is enabled - elif config.whitelist_enabled and not config.is_whitelisted(actor.domain): - logging.verbose(f'Rejected actor for not being in the whitelist: {actor.id}') + if config.whitelist_enabled and not config.is_whitelisted(request.actor.domain): + logging.verbose(f'Rejected actor for not being in the whitelist: {request.actor.id}') return Response.new_error(403, 'access denied', 'json') ## reject if actor is banned - if request.app['config'].is_banned(actor.domain): + if request.config.is_banned(request.actor.domain): logging.verbose(f'Ignored request from banned actor: {actor.id}') return Response.new_error(403, 'access denied', 'json') - ## reject if software used by actor is banned - if config.is_banned_software(nodeinfo.swname): - logging.verbose(f'Rejected actor for using specific software: {nodeinfo.swname}') - return Response.new_error(403, 'access denied', 'json') - ## reject if the signature is invalid - if not (await misc.validate_signature(actor, signature, request)): + if not (await misc.validate_signature(request.actor, request.signature, request)): logging.verbose(f'signature validation failed for: {actor.id}') return Response.new_error(401, 'signature check failed', 'json') ## reject if activity type isn't 'Follow' and the actor isn't following - if data['type'] != 'Follow' and not database.get_inbox(actor.domain): - logging.verbose(f'Rejected actor for trying to post while not following: {actor.id}') + if request.message.type != 'Follow' and not database.get_inbox(request.actor.domain): + logging.verbose(f'Rejected actor for trying to post while not following: {request.actor.id}') return Response.new_error(401, 'access denied', 'json') - logging.debug(f">> payload {data}") + logging.debug(f">> payload {request.message.to_json(4)}") - await run_processor(request, actor, data, nodeinfo) + await run_processor(request) return Response.new(status=202) @@ -146,15 +142,15 @@ async def webfinger(request): except KeyError: return Response.new_error(400, 'missing \'resource\' query key', 'json') - if subject != f'acct:relay@{request.app.config.host}': + if subject != f'acct:relay@{request.config.host}': return Response.new_error(404, 'user not found', 'json') data = { 'subject': subject, - 'aliases': [request.app.config.actor], + 'aliases': [request.config.actor], 'links': [ - {'href': request.app.config.actor, 'rel': 'self', 'type': 'application/activity+json'}, - {'href': request.app.config.actor, 'rel': 'self', 'type': 'application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"'} + {'href': request.config.actor, 'rel': 'self', 'type': 'application/activity+json'}, + {'href': request.config.actor, 'rel': 'self', 'type': 'application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"'} ] } @@ -165,7 +161,7 @@ async def webfinger(request): async def nodeinfo_2_0(request): niversion = request.match_info['version'][:3] data = { - 'openRegistrations': not request.app.config.whitelist_enabled, + 'openRegistrations': not request.config.whitelist_enabled, 'protocols': ['activitypub'], 'services': { 'inbound': [], @@ -182,7 +178,7 @@ async def nodeinfo_2_0(request): } }, 'metadata': { - 'peers': request.app.database.hostnames + 'peers': request.database.hostnames }, 'version': niversion } @@ -196,8 +192,8 @@ async def nodeinfo_2_0(request): @register_route('GET', '/.well-known/nodeinfo') async def nodeinfo_wellknown(request): data = WKNodeinfo.new( - v20 = f'https://{request.app.config.host}/nodeinfo/2.0.json', - v21 = f'https://{request.app.config.host}/nodeinfo/2.1.json' + v20 = f'https://{request.config.host}/nodeinfo/2.0.json', + v21 = f'https://{request.config.host}/nodeinfo/2.1.json' ) return Response.new(data, ctype='json') From 85c4df7d8c9f6ddfbd0e6db04ab5e45121f01fc9 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 18 Nov 2022 16:57:34 -0500 Subject: [PATCH 07/50] remove unecessary method --- relay/database.py | 5 ----- relay/manage.py | 10 ++++++---- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/relay/database.py b/relay/database.py index 3a9f5c9..ca26f50 100644 --- a/relay/database.py +++ b/relay/database.py @@ -175,11 +175,6 @@ class RelayDatabase(dict): return False - def set_followid(self, domain, followid): - data = self.get_inbox(domain, fail=True) - data['followid'] = followid - - def get_request(self, domain, fail=True): if domain.startswith('http'): domain = urlparse(domain).hostname diff --git a/relay/manage.py b/relay/manage.py index 3b55717..ecd466f 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -128,11 +128,13 @@ def cli_inbox_add(inbox): if app.config.is_banned(inbox): return click.echo(f'Error: Refusing to add banned inbox: {inbox}') - if app.database.add_inbox(inbox): - app.database.save() - return click.echo(f'Added inbox to the database: {inbox}') + if app.database.get_inbox(inbox): + return click.echo(f'Error: Inbox already in database: {inbox}') - click.echo(f'Error: Inbox already in database: {inbox}') + app.database.add_inbox(inbox) + app.database.save() + + click.echo(f'Added inbox to the database: {inbox}') @cli_inbox.command('remove') From ffe14bead35cb7313805c0540d14ab0de3a60db1 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 20 Nov 2022 05:12:11 -0500 Subject: [PATCH 08/50] ignore account Deletes --- relay/views.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/relay/views.py b/relay/views.py index a223d6d..f39b497 100644 --- a/relay/views.py +++ b/relay/views.py @@ -105,7 +105,12 @@ async def inbox(request): ## reject if actor is empty if not request.actor: - logging.verbose(f'Failed to fetch actor: {request.actor.id}') + ## ld signatures aren't handled atm, so just ignore it + if data.type == 'Delete': + logging.verbose(f'Instance sent a delete which cannot be handled') + return Response.new(status=202) + + logging.verbose(f'Failed to fetch actor: {request.signature.keyid}') return Response.new_error(400, 'failed to fetch actor', 'json') ## reject if the actor isn't whitelisted while the whiltelist is enabled From c04965776555d3d3f2d4a835f69626a5f969fc81 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 20 Nov 2022 05:22:57 -0500 Subject: [PATCH 09/50] fetch nodeinfo software name on inbox request instead of startup --- relay/application.py | 6 ++++++ relay/database.py | 8 -------- relay/processors.py | 7 +++++++ relay/views.py | 2 ++ 4 files changed, 15 insertions(+), 8 deletions(-) diff --git a/relay/application.py b/relay/application.py index f0ea4fe..52e16c9 100644 --- a/relay/application.py +++ b/relay/application.py @@ -134,6 +134,11 @@ def request_actor(self): except KeyError: pass +def request_instance(self): + try: return self['instance'] + except KeyError: pass + + def request_message(self): try: return self['message'] except KeyError: pass @@ -148,6 +153,7 @@ def request_signature(self): setattr(web.Request, 'actor', property(request_actor)) +setattr(web.Request, 'instance', property(request_instance)) setattr(web.Request, 'message', property(request_message)) setattr(web.Request, 'signature', property(request_signature)) diff --git a/relay/database.py b/relay/database.py index ca26f50..d1d64a7 100644 --- a/relay/database.py +++ b/relay/database.py @@ -83,14 +83,6 @@ class RelayDatabase(dict): self.del_inbox(domain) continue - if not instance.get('software'): - nodeinfo = asyncio.run(fetch_nodeinfo(domain)) - - if not nodeinfo: - continue - - instance['software'] = nodeinfo.swname - new_db = False except FileNotFoundError: diff --git a/relay/processors.py b/relay/processors.py index 4ccb0d1..92df00d 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -113,5 +113,12 @@ async def run_processor(request): if request.message.type not in processors: return + if request.instance and not request.instance.get('software'): + nodeinfo = await misc.fetch_nodeinfo(request.instance.domain) + + if nodeinfo: + request.instance[nodeinfo] = nodeinfo.swname + request.database.save() + logging.verbose(f'New "{request.message.type}" from actor: {request.actor.id}') return await processors[request.message.type](request) diff --git a/relay/views.py b/relay/views.py index f39b497..c360ca2 100644 --- a/relay/views.py +++ b/relay/views.py @@ -113,6 +113,8 @@ async def inbox(request): logging.verbose(f'Failed to fetch actor: {request.signature.keyid}') return Response.new_error(400, 'failed to fetch actor', 'json') + request['instance'] = request.database.get_inbox(request['actor'].inbox) + ## reject if the actor isn't whitelisted while the whiltelist is enabled if config.whitelist_enabled and not config.is_whitelisted(request.actor.domain): logging.verbose(f'Rejected actor for not being in the whitelist: {request.actor.id}') From 9839da906c82cc7d35cdd1a72839294ea01dd300 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 20 Nov 2022 05:50:14 -0500 Subject: [PATCH 10/50] add optional push worker threads --- relay/application.py | 48 +++++++++++++++++++++++++++++++++++++++++++- relay/config.py | 4 +++- relay/processors.py | 27 ++++++++++++++++--------- relay/views.py | 3 ++- 4 files changed, 70 insertions(+), 12 deletions(-) diff --git a/relay/application.py b/relay/application.py index 52e16c9..a216584 100644 --- a/relay/application.py +++ b/relay/application.py @@ -1,7 +1,9 @@ import asyncio import logging import os +import queue import signal +import threading from aiohttp import web from cachetools import LRUCache @@ -9,7 +11,7 @@ from datetime import datetime, timedelta from .config import RelayConfig from .database import RelayDatabase -from .misc import DotDict, check_open_port, fetch_nodeinfo, set_app +from .misc import DotDict, check_open_port, request, set_app from .views import routes @@ -27,6 +29,8 @@ class Application(web.Application): self['cache'] = DotDict({key: Cache(maxsize=self['config'][key]) for key in self['config'].cachekeys}) self['semaphore'] = asyncio.Semaphore(self['config'].push_limit) + self['workers'] = [] + self['last_worker'] = 0 set_app(self) @@ -71,6 +75,16 @@ class Application(web.Application): return timedelta(seconds=uptime.seconds) + def push_message(self, inbox, message): + worker = self['workers'][self['last_worker']] + worker.queue.put((inbox, message)) + + self['last_worker'] += 1 + + if self['last_worker'] >= len(self['workers']): + self['last_worker'] = 0 + + def set_signal_handler(self): for sig in {'SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGTERM'}: try: @@ -102,6 +116,13 @@ class Application(web.Application): async def handle_run(self): self['running'] = True + if self.config.workers > 0: + for i in range(self.config.workers): + worker = PushWorker(self) + worker.start() + + self['workers'].append(worker) + runner = web.AppRunner(self, access_log_format='%{X-Forwarded-For}i "%r" %s %b "%{User-Agent}i"') await runner.setup() @@ -121,6 +142,7 @@ class Application(web.Application): self['starttime'] = None self['running'] = False + self['workers'].clear() class Cache(LRUCache): @@ -128,6 +150,30 @@ class Cache(LRUCache): self.__maxsize = int(value) +class PushWorker(threading.Thread): + def __init__(self, app): + threading.Thread.__init__(self) + self.app = app + self.queue = queue.Queue() + + + def run(self): + asyncio.run(self.handle_queue()) + + + async def handle_queue(self): + while self.app['running']: + try: + inbox, message = self.queue.get(block=True, timeout=0.25) + self.queue.task_done() + await request(inbox, message) + + logging.verbose(f'New push from Thread-{threading.get_ident()}') + + except queue.Empty: + pass + + ## Can't sub-class web.Request, so let's just add some properties def request_actor(self): try: return self['actor'] diff --git a/relay/config.py b/relay/config.py index fd22f22..998e5d6 100644 --- a/relay/config.py +++ b/relay/config.py @@ -50,7 +50,7 @@ class RelayConfig(DotDict): if key in ['blocked_instances', 'blocked_software', 'whitelist']: assert isinstance(value, (list, set, tuple)) - elif key in ['port', 'json', 'objects', 'digests']: + elif key in ['port', 'workers', 'json', 'objects', 'digests']: assert isinstance(value, (int)) elif key == 'whitelist_enabled': @@ -92,6 +92,7 @@ class RelayConfig(DotDict): 'port': 8080, 'note': 'Make a note about your instance here.', 'push_limit': 512, + 'workers': 0, 'host': 'relay.example.com', 'blocked_software': [], 'blocked_instances': [], @@ -233,6 +234,7 @@ class RelayConfig(DotDict): 'port': self.port, 'note': self.note, 'push_limit': self.push_limit, + 'workers': self.workers, 'ap': {key: self[key] for key in self.apkeys}, 'cache': {key: self[key] for key in self.cachekeys} } diff --git a/relay/processors.py b/relay/processors.py index 92df00d..46d23e4 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -11,20 +11,24 @@ async def handle_relay(request): logging.verbose(f'already relayed {request.message.objectid}') return - logging.verbose(f'Relaying post from {request.message.actorid}') - message = misc.Message.new_announce( host = request.config.host, object = request.message.objectid ) + request.cache.objects[request.message.objectid] = message.id + logging.verbose(f'Relaying post from {request.message.actorid}') logging.debug(f'>> relay: {message}') inboxes = misc.distill_inboxes(request.actor, request.message.objectid) - futures = [misc.request(inbox, data=message) for inbox in inboxes] - asyncio.ensure_future(asyncio.gather(*futures)) - request.cache.objects[request.message.objectid] = message.id + if request.config.workers > 0: + for inbox in inboxes: + request.app.push_message(inbox, message) + + else: + futures = [misc.request(inbox, data=message) for inbox in inboxes] + asyncio.ensure_future(asyncio.gather(*futures)) async def handle_forward(request): @@ -37,14 +41,19 @@ async def handle_forward(request): object = request.message ) + request.cache.objects[request.message.id] = message.id logging.verbose(f'Forwarding post from {request.actor.id}') logging.debug(f'>> Relay {request.message}') - inboxes = misc.distill_inboxes(request.actor, request.message.id) - futures = [misc.request(inbox, data=message) for inbox in inboxes] + inboxes = misc.distill_inboxes(request.actor, request.message.objectid) - asyncio.ensure_future(asyncio.gather(*futures)) - request.cache.objects[request.message.id] = message.id + if request.config.workers > 0: + for inbox in inboxes: + request.app.push_message(inbox, message) + + else: + futures = [misc.request(inbox, data=message) for inbox in inboxes] + asyncio.ensure_future(asyncio.gather(*futures)) async def handle_follow(request): diff --git a/relay/views.py b/relay/views.py index c360ca2..9727207 100644 --- a/relay/views.py +++ b/relay/views.py @@ -1,3 +1,4 @@ +import asyncio import logging import subprocess import traceback @@ -137,7 +138,7 @@ async def inbox(request): logging.debug(f">> payload {request.message.to_json(4)}") - await run_processor(request) + asyncio.ensure_future(run_processor(request)) return Response.new(status=202) From c96640bfd77937231ac1b94c3b6ec17387ac3791 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 20 Nov 2022 06:14:37 -0500 Subject: [PATCH 11/50] add config cli commands --- relay/config.py | 10 +++++++--- relay/manage.py | 33 +++++++++++++++++++++++++++++++-- relay/misc.py | 31 +++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+), 5 deletions(-) diff --git a/relay/config.py b/relay/config.py index 998e5d6..e4ee5f8 100644 --- a/relay/config.py +++ b/relay/config.py @@ -4,7 +4,7 @@ import yaml from pathlib import Path from urllib.parse import urlparse -from .misc import DotDict +from .misc import DotDict, boolean relay_software_names = [ @@ -51,10 +51,12 @@ class RelayConfig(DotDict): assert isinstance(value, (list, set, tuple)) elif key in ['port', 'workers', 'json', 'objects', 'digests']: - assert isinstance(value, (int)) + if not isinstance(value, int): + value = int(value) elif key == 'whitelist_enabled': - assert isinstance(value, bool) + if not isinstance(value, bool): + value = boolean(value) super().__setitem__(key, value) @@ -216,6 +218,8 @@ class RelayConfig(DotDict): self[k] = v + continue + elif key not in self: continue diff --git a/relay/manage.py b/relay/manage.py index ecd466f..8458fd1 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -12,6 +12,7 @@ from .config import relay_software_names app = None +CONFIG_IGNORE = {'blocked_software', 'blocked_instances', 'whitelist'} @click.group('cli', context_settings={'show_default': True}, invoke_without_command=True) @@ -30,9 +31,37 @@ def cli(ctx, config): relay_run.callback() -@cli.group('inbox') +# todo: add config default command for resetting config key +@cli.group('config', invoke_without_command=True) @click.pass_context -def cli_inbox(ctx): +def cli_config(ctx): + 'List the current relay config' + + if ctx.invoked_subcommand: + return + + click.echo('Relay Config:') + + for key, value in app.config.items(): + if key not in CONFIG_IGNORE: + key = f'{key}:'.ljust(20) + click.echo(f'- {key} {value}') + + +@cli_config.command('set') +@click.argument('key') +@click.argument('value') +def cli_config_set(key, value): + 'Set a config value' + + app.config[key] = value + app.config.save() + + print(f'{key}: {app.config[key]}') + + +@cli.group('inbox') +def cli_inbox(): 'Manage the inboxes in the database' pass diff --git a/relay/misc.py b/relay/misc.py index 980caf3..e1fe01e 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -50,6 +50,37 @@ def build_signing_string(headers, used_headers): return '\n'.join(map(lambda x: ': '.join([x.lower(), headers[x]]), used_headers)) +def boolean(value): + if isinstance(value, str): + if value.lower() in ['on', 'y', 'yes', 'true', 'enable', 'enabled', '1']: + return True + + elif value.lower() in ['off', 'n', 'no', 'false', 'disable', 'disable', '0']: + return False + + else: + raise TypeError(f'Cannot parse string "{value}" as a boolean') + + elif isinstance(value, int): + if value == 1: + return True + + elif value == 0: + return False + + else: + raise ValueError('Integer value must be 1 or 0') + + elif value == None: + return False + + try: + return value.__bool__() + + except AttributeError: + raise TypeError(f'Cannot convert object of type "{clsname(value)}"') + + def check_open_port(host, port): if host == '0.0.0.0': host = '127.0.0.1' From 395971914b68f2e53ada81495349fbe5fb3c2474 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 20 Nov 2022 06:24:33 -0500 Subject: [PATCH 12/50] organize manage.py --- relay/manage.py | 102 ++++++++++++++++++++++++------------------------ 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/relay/manage.py b/relay/manage.py index 8458fd1..a901397 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -25,10 +25,59 @@ def cli(ctx, config): if not ctx.invoked_subcommand: if app.config.host.endswith('example.com'): - relay_setup.callback() + cli_setup.callback() else: - relay_run.callback() + cli_run.callback() + + +@cli.command('setup') +def cli_setup(): + 'Generate a new config' + + while True: + app.config.host = click.prompt('What domain will the relay be hosted on?', default=app.config.host) + + if not config.host.endswith('example.com'): + break + + click.echo('The domain must not be example.com') + + app.config.listen = click.prompt('Which address should the relay listen on?', default=app.config.listen) + + while True: + app.config.port = click.prompt('What TCP port should the relay listen on?', default=app.config.port, type=int) + break + + app.config.save() + + if not app['is_docker'] and click.confirm('Relay all setup! Would you like to run it now?'): + cli_run.callback() + + +@cli.command('run') +def cli_run(): + 'Run the relay' + + if app.config.host.endswith('example.com'): + return click.echo('Relay is not set up. Please edit your relay config or run "activityrelay setup".') + + vers_split = platform.python_version().split('.') + pip_command = 'pip3 uninstall pycrypto && pip3 install pycryptodome' + + if Crypto.__version__ == '2.6.1': + if int(vers_split[1]) > 7: + click.echo('Error: PyCrypto is broken on Python 3.8+. Please replace it with pycryptodome before running again. Exiting...') + return click.echo(pip_command) + + else: + click.echo('Warning: PyCrypto is old and should be replaced with pycryptodome') + return click.echo(pip_command) + + if not misc.check_open_port(app.config.listen, app.config.port): + return click.echo(f'Error: A server is already running on port {app.config.port}') + + app.run() # todo: add config default command for resetting config key @@ -350,55 +399,6 @@ def cli_whitelist_remove(instance): click.echo(f'Removed instance from the whitelist: {instance}') -@cli.command('setup') -def relay_setup(): - 'Generate a new config' - - while True: - app.config.host = click.prompt('What domain will the relay be hosted on?', default=app.config.host) - - if not config.host.endswith('example.com'): - break - - click.echo('The domain must not be example.com') - - app.config.listen = click.prompt('Which address should the relay listen on?', default=app.config.listen) - - while True: - app.config.port = click.prompt('What TCP port should the relay listen on?', default=app.config.port, type=int) - break - - app.config.save() - - if not app['is_docker'] and click.confirm('Relay all setup! Would you like to run it now?'): - relay_run.callback() - - -@cli.command('run') -def relay_run(): - 'Run the relay' - - if app.config.host.endswith('example.com'): - return click.echo('Relay is not set up. Please edit your relay config or run "activityrelay setup".') - - vers_split = platform.python_version().split('.') - pip_command = 'pip3 uninstall pycrypto && pip3 install pycryptodome' - - if Crypto.__version__ == '2.6.1': - if int(vers_split[1]) > 7: - click.echo('Error: PyCrypto is broken on Python 3.8+. Please replace it with pycryptodome before running again. Exiting...') - return click.echo(pip_command) - - else: - click.echo('Warning: PyCrypto is old and should be replaced with pycryptodome') - return click.echo(pip_command) - - if not misc.check_open_port(app.config.listen, app.config.port): - return click.echo(f'Error: A server is already running on port {app.config.port}') - - app.run() - - def main(): cli(prog_name='relay') From 242052386e7c980f74d3b720269b542e64856bf5 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 20 Nov 2022 22:24:36 -0500 Subject: [PATCH 13/50] use correct actor variable for cli_inbox_follow --- relay/manage.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/relay/manage.py b/relay/manage.py index a901397..3b31889 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -146,11 +146,15 @@ def cli_inbox_follow(actor): except KeyError: actor_data = asyncio.run(misc.request(actor)) + + if not actor_data: + return click.echo(f'Failed to fetch actor: {actor}') + inbox = actor_data.shared_inbox message = misc.Message.new_follow( host = app.config.host, - actor = actor.id + actor = actor ) asyncio.run(misc.request(inbox, message)) From a838e4324b9cd20d9082eeda3fd3f467340a3211 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Tue, 22 Nov 2022 18:09:25 -0500 Subject: [PATCH 14/50] fix NameError in inbox --- relay/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/relay/views.py b/relay/views.py index 9727207..a91f078 100644 --- a/relay/views.py +++ b/relay/views.py @@ -107,7 +107,7 @@ async def inbox(request): ## reject if actor is empty if not request.actor: ## ld signatures aren't handled atm, so just ignore it - if data.type == 'Delete': + if request['message'].type == 'Delete': logging.verbose(f'Instance sent a delete which cannot be handled') return Response.new(status=202) From da56d4bb610a09d0265ae243778d6d9174d52e7c Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Tue, 22 Nov 2022 18:11:41 -0500 Subject: [PATCH 15/50] add extra logging in misc.request --- relay/misc.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/relay/misc.py b/relay/misc.py index e1fe01e..f749db1 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -10,6 +10,7 @@ from Crypto.Hash import SHA, SHA256, SHA512 from Crypto.PublicKey import RSA from Crypto.Signature import PKCS1_v1_5 from aiohttp import ClientSession +from aiohttp.client_exceptions import ClientConnectorError from aiohttp.hdrs import METH_ALL as METHODS from aiohttp.web import Response as AiohttpResponse, View as AiohttpView from datetime import datetime @@ -260,6 +261,11 @@ async def request(uri, data=None, force=False, sign_headers=True, activity=True) return resp_data except JSONDecodeError: + logging.verbose(f'Failed to parse JSON') + return + + except ClientConnectorError: + logging.verbose(f'Failed to connect to {url.netloc}') return except Exception: From 8541f637629ed060c318df4a52b7beb8f8970fd6 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Thu, 24 Nov 2022 16:01:23 -0500 Subject: [PATCH 16/50] add timeout option to misc.request --- relay/misc.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/relay/misc.py b/relay/misc.py index f749db1..7f2bb56 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -9,8 +9,8 @@ import uuid from Crypto.Hash import SHA, SHA256, SHA512 from Crypto.PublicKey import RSA from Crypto.Signature import PKCS1_v1_5 -from aiohttp import ClientSession -from aiohttp.client_exceptions import ClientConnectorError +from aiohttp import ClientSession, ClientTimeout +from aiohttp.client_exceptions import ClientConnectorError, ServerTimeoutError from aiohttp.hdrs import METH_ALL as METHODS from aiohttp.web import Response as AiohttpResponse, View as AiohttpView from datetime import datetime @@ -179,7 +179,7 @@ async def fetch_nodeinfo(domain): return Nodeinfo(nodeinfo) -async def request(uri, data=None, force=False, sign_headers=True, activity=True): +async def request(uri, data=None, force=False, sign_headers=True, activity=True, timeout=10): ## If a get request and not force, try to use the cache first if not data and not force: try: @@ -229,7 +229,8 @@ async def request(uri, data=None, force=False, sign_headers=True, activity=True) else: logging.verbose(f'Sending GET request to url: {uri}') - async with ClientSession(trace_configs=http_debug()) as session, app.semaphore: + timeout_cfg = ClientTimeout(connect=timeout) + async with ClientSession(trace_configs=http_debug(), timeout=timeout_cfg) as session, app.semaphore: async with session.request(method, uri, headers=headers, data=data) as resp: ## aiohttp has been known to leak if the response hasn't been read, ## so we're just gonna read the request no matter what @@ -264,7 +265,7 @@ async def request(uri, data=None, force=False, sign_headers=True, activity=True) logging.verbose(f'Failed to parse JSON') return - except ClientConnectorError: + except (ClientConnectorError, ServerTimeoutError): logging.verbose(f'Failed to connect to {url.netloc}') return From 017363ecd5a371e25c7d9c23254df9e43f230378 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 25 Nov 2022 13:19:29 -0500 Subject: [PATCH 17/50] fix nodeinfo fetching in run_processor --- relay/processors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/relay/processors.py b/relay/processors.py index 46d23e4..0b575b4 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -123,10 +123,10 @@ async def run_processor(request): return if request.instance and not request.instance.get('software'): - nodeinfo = await misc.fetch_nodeinfo(request.instance.domain) + nodeinfo = await misc.fetch_nodeinfo(request.instance['domain']) if nodeinfo: - request.instance[nodeinfo] = nodeinfo.swname + request.instance['software'] = nodeinfo.swname request.database.save() logging.verbose(f'New "{request.message.type}" from actor: {request.actor.id}') From fbe5746a18ad6721d48d7ef94e20378f9acad35f Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 25 Nov 2022 13:29:45 -0500 Subject: [PATCH 18/50] fix NameError in cli_whitelist_remove --- relay/manage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/relay/manage.py b/relay/manage.py index 3b31889..4fb9614 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -397,7 +397,7 @@ def cli_whitelist_remove(instance): app.config.save() if app.config.whitelist_enabled: - if app.database.del_inbox(inbox): + if app.database.del_inbox(instance): app.database.save() click.echo(f'Removed instance from the whitelist: {instance}') From 32764a1f93956e2c237c689aaf31389feceb9264 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 25 Nov 2022 13:39:52 -0500 Subject: [PATCH 19/50] make sure domain key exists for inboxes --- relay/database.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/relay/database.py b/relay/database.py index d1d64a7..85daf6b 100644 --- a/relay/database.py +++ b/relay/database.py @@ -71,6 +71,7 @@ class RelayDatabase(dict): for item in data.get('relay-list', []): domain = urlparse(item).hostname self['relay-list'][domain] = { + 'domain': domain, 'inbox': item, 'followid': None } @@ -83,6 +84,9 @@ class RelayDatabase(dict): self.del_inbox(domain) continue + if not instance.get('domain'): + instance['domain'] = domain + new_db = False except FileNotFoundError: From b85b4ab80b1e7f6d0e0966749eb89c5f709f9c2d Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 18:56:34 -0500 Subject: [PATCH 20/50] create HttpClient class to avoid creating a new session every request --- relay/application.py | 36 +++++--- relay/config.py | 24 ++--- relay/database.py | 2 - relay/http_client.py | 203 +++++++++++++++++++++++++++++++++++++++++++ relay/manage.py | 12 +-- relay/misc.py | 148 ------------------------------- relay/processors.py | 69 +++++++-------- relay/views.py | 2 +- 8 files changed, 272 insertions(+), 224 deletions(-) create mode 100644 relay/http_client.py diff --git a/relay/application.py b/relay/application.py index a216584..8133d7c 100644 --- a/relay/application.py +++ b/relay/application.py @@ -6,12 +6,12 @@ import signal import threading from aiohttp import web -from cachetools import LRUCache from datetime import datetime, timedelta from .config import RelayConfig from .database import RelayDatabase -from .misc import DotDict, check_open_port, request, set_app +from .http_client import HttpClient +from .misc import DotDict, check_open_port, set_app from .views import routes @@ -27,8 +27,6 @@ class Application(web.Application): if not self['config'].load(): self['config'].save() - self['cache'] = DotDict({key: Cache(maxsize=self['config'][key]) for key in self['config'].cachekeys}) - self['semaphore'] = asyncio.Semaphore(self['config'].push_limit) self['workers'] = [] self['last_worker'] = 0 @@ -37,12 +35,18 @@ class Application(web.Application): self['database'] = RelayDatabase(self['config']) self['database'].load() + self['client'] = HttpClient( + limit = self.config.push_limit, + timeout = self.config.timeout, + cache_size = self.config.json_cache + ) + self.set_signal_handler() @property - def cache(self): - return self['cache'] + def client(self): + return self['client'] @property @@ -76,6 +80,9 @@ class Application(web.Application): def push_message(self, inbox, message): + if self.config.workers <= 0: + return asyncio.ensure_future(self.client.post(inbox, message)) + worker = self['workers'][self['last_worker']] worker.queue.put((inbox, message)) @@ -145,11 +152,6 @@ class Application(web.Application): self['workers'].clear() -class Cache(LRUCache): - def set_maxsize(self, value): - self.__maxsize = int(value) - - class PushWorker(threading.Thread): def __init__(self, app): threading.Thread.__init__(self) @@ -158,6 +160,12 @@ class PushWorker(threading.Thread): def run(self): + self.client = HttpClient( + limit = self.app.config.push_limit, + timeout = self.app.config.timeout, + cache_size = self.app.config.json_cache + ) + asyncio.run(self.handle_queue()) @@ -166,13 +174,14 @@ class PushWorker(threading.Thread): try: inbox, message = self.queue.get(block=True, timeout=0.25) self.queue.task_done() - await request(inbox, message) - logging.verbose(f'New push from Thread-{threading.get_ident()}') + await self.client.post(inbox, message) except queue.Empty: pass + await self.client.close() + ## Can't sub-class web.Request, so let's just add some properties def request_actor(self): @@ -203,7 +212,6 @@ setattr(web.Request, 'instance', property(request_instance)) setattr(web.Request, 'message', property(request_message)) setattr(web.Request, 'signature', property(request_signature)) -setattr(web.Request, 'cache', property(lambda self: self.app.cache)) setattr(web.Request, 'config', property(lambda self: self.app.config)) setattr(web.Request, 'database', property(lambda self: self.app.database)) setattr(web.Request, 'semaphore', property(lambda self: self.app.semaphore)) diff --git a/relay/config.py b/relay/config.py index e4ee5f8..090b63c 100644 --- a/relay/config.py +++ b/relay/config.py @@ -24,12 +24,6 @@ class RelayConfig(DotDict): 'whitelist' } - cachekeys = { - 'json', - 'objects', - 'digests' - } - def __init__(self, path, is_docker): DotDict.__init__(self, {}) @@ -50,7 +44,7 @@ class RelayConfig(DotDict): if key in ['blocked_instances', 'blocked_software', 'whitelist']: assert isinstance(value, (list, set, tuple)) - elif key in ['port', 'workers', 'json', 'objects', 'digests']: + elif key in ['port', 'workers', 'json_cache', 'timeout']: if not isinstance(value, int): value = int(value) @@ -94,15 +88,14 @@ class RelayConfig(DotDict): 'port': 8080, 'note': 'Make a note about your instance here.', 'push_limit': 512, + 'json_cache': 1024, + 'timeout': 10, 'workers': 0, 'host': 'relay.example.com', + 'whitelist_enabled': False, 'blocked_software': [], 'blocked_instances': [], - 'whitelist': [], - 'whitelist_enabled': False, - 'json': 1024, - 'objects': 1024, - 'digests': 1024 + 'whitelist': [] }) def ban_instance(self, instance): @@ -211,7 +204,7 @@ class RelayConfig(DotDict): return False for key, value in config.items(): - if key in ['ap', 'cache']: + if key in ['ap']: for k, v in value.items(): if k not in self: continue @@ -239,8 +232,9 @@ class RelayConfig(DotDict): 'note': self.note, 'push_limit': self.push_limit, 'workers': self.workers, - 'ap': {key: self[key] for key in self.apkeys}, - 'cache': {key: self[key] for key in self.cachekeys} + 'json_cache': self.json_cache, + 'timeout': self.timeout, + 'ap': {key: self[key] for key in self.apkeys} } with open(self._path, 'w') as fd: diff --git a/relay/database.py b/relay/database.py index 85daf6b..82adce4 100644 --- a/relay/database.py +++ b/relay/database.py @@ -6,8 +6,6 @@ import traceback from Crypto.PublicKey import RSA from urllib.parse import urlparse -from .misc import fetch_nodeinfo - class RelayDatabase(dict): def __init__(self, config): diff --git a/relay/http_client.py b/relay/http_client.py new file mode 100644 index 0000000..d664a88 --- /dev/null +++ b/relay/http_client.py @@ -0,0 +1,203 @@ +import logging +import traceback + +from aiohttp import ClientSession, ClientTimeout, TCPConnector +from aiohttp.client_exceptions import ClientConnectorError, ServerTimeoutError +from datetime import datetime +from cachetools import LRUCache +from json.decoder import JSONDecodeError +from urllib.parse import urlparse + +from . import __version__ +from .misc import ( + MIMETYPES, + DotDict, + Message, + create_signature_header, + generate_body_digest +) + + +HEADERS = { + 'Accept': f'{MIMETYPES["activity"]}, {MIMETYPES["json"]};q=0.9', + 'User-Agent': f'ActivityRelay/{__version__}' +} + + +class Cache(LRUCache): + def set_maxsize(self, value): + self.__maxsize = int(value) + + +class HttpClient: + def __init__(self, limit=100, timeout=10, cache_size=1024): + self.cache = Cache(cache_size) + self.cfg = {'limit': limit, 'timeout': timeout} + self._conn = None + self._session = None + + + @property + def limit(self): + return self.cfg['limit'] + + + @property + def timeout(self): + return self.cfg['timeout'] + + + def sign_headers(self, method, url, message=None): + parsed = urlparse(url) + headers = { + '(request-target)': f'{method.lower()} {parsed.path}', + 'Date': datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'), + 'Host': parsed.netloc + } + + if message: + data = message.to_json() + headers.update({ + 'Digest': f'SHA-256={generate_body_digest(data)}', + 'Content-Length': str(len(data.encode('utf-8'))) + }) + + headers['Signature'] = create_signature_header(headers) + + del headers['(request-target)'] + del headers['Host'] + + return headers + + + async def open(self): + if self._session: + return + + self._conn = TCPConnector( + limit = self.limit, + ttl_dns_cache = 300, + ) + + self._session = ClientSession( + connector = self._conn, + headers = HEADERS, + connector_owner = True, + timeout = ClientTimeout(total=self.timeout) + ) + + + async def close(self): + if not self._session: + return + + await self._session.close() + await self._conn.close() + + self._conn = None + self._session = None + + + async def get(self, url, sign_headers=False, loads=None, force=False): + await self.open() + + try: url, _ = url.split('#', 1) + except: pass + + if not force and url in self.cache: + return self.cache[url] + + headers = {} + + if sign_headers: + headers.update(self.sign_headers('GET', url)) + + try: + logging.verbose(f'Fetching resource: {url}') + + async with self._session.get(url, headers=headers) as resp: + ## Not expecting a response with 202s, so just return + if resp.status == 202: + return + + elif resp.status != 200: + logging.verbose(f'Received error when requesting {url}: {resp.status}') + logging.verbose(await resp.read()) # change this to debug + return + + if loads: + if issubclass(loads, DotDict): + message = await resp.json(loads=loads.new_from_json) + + else: + message = await resp.json(loads=loads) + + elif resp.content_type == MIMETYPES['activity']: + message = await resp.json(loads=Message.new_from_json) + + elif resp.content_type == MIMETYPES['json']: + message = await resp.json(loads=DotDict.new_from_json) + + else: + # todo: raise TypeError or something + logging.verbose(f'Invalid Content-Type for "{url}": {resp.content_type}') + return logging.debug(f'Response: {resp.read()}') + + logging.debug(f'{url} >> resp {message.to_json(4)}') + + self.cache[url] = message + return message + + except JSONDecodeError: + logging.verbose(f'Failed to parse JSON') + + except (ClientConnectorError, ServerTimeoutError): + logging.verbose(f'Failed to connect to {urlparse(url).netloc}') + + except Exception as e: + traceback.print_exc() + raise e + + + async def post(self, url, message): + await self.open() + + headers = {'Content-Type': 'application/activity+json'} + headers.update(self.sign_headers('POST', url, message)) + + try: + logging.verbose(f'Sending "{message.type}" to {url}') + + async with self._session.post(url, headers=headers, data=message.to_json()) as resp: + ## Not expecting a response, so just return + if resp.status in {200, 202}: + return logging.verbose(f'Successfully sent "{message.type}" to {url}') + + logging.verbose(f'Received error when pushing to {url}: {resp.status}') + return logging.verbose(await resp.read()) # change this to debug + + except (ClientConnectorError, ServerTimeoutError): + logging.verbose(f'Failed to connect to {url.netloc}') + + ## prevent workers from being brought down + except Exception as e: + traceback.print_exc() + + + ## Additional methods ## + async def fetch_nodeinfo(domain): + nodeinfo_url = None + wk_nodeinfo = await self.get(f'https://{domain}/.well-known/nodeinfo', loads=WKNodeinfo) + + for version in ['20', '21']: + try: + nodeinfo_url = wk_nodeinfo.get_url(version) + + except KeyError: + pass + + if not nodeinfo_url: + logging.verbose(f'Failed to fetch nodeinfo url for domain: {domain}') + return False + + return await request(nodeinfo_url, loads=Nodeinfo) or False diff --git a/relay/manage.py b/relay/manage.py index 4fb9614..48f3700 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -145,7 +145,7 @@ def cli_inbox_follow(actor): inbox = inbox_data['inbox'] except KeyError: - actor_data = asyncio.run(misc.request(actor)) + actor_data = asyncio.run(app.client.get(actor, sign_headers=True)) if not actor_data: return click.echo(f'Failed to fetch actor: {actor}') @@ -157,7 +157,7 @@ def cli_inbox_follow(actor): actor = actor ) - asyncio.run(misc.request(inbox, message)) + asyncio.run(app.client.post(inbox, message)) click.echo(f'Sent follow message to actor: {actor}') @@ -183,7 +183,7 @@ def cli_inbox_unfollow(actor): ) except KeyError: - actor_data = asyncio.run(misc.request(actor)) + actor_data = asyncio.run(app.client.get(actor, sign_headers=True)) inbox = actor_data.shared_inbox message = misc.Message.new_unfollow( host = app.config.host, @@ -195,7 +195,7 @@ def cli_inbox_unfollow(actor): } ) - asyncio.run(misc.request(inbox, message)) + asyncio.run(app.client.post(inbox, message)) click.echo(f'Sent unfollow message to: {actor}') @@ -319,7 +319,7 @@ def cli_software_ban(name, fetch_nodeinfo): return click.echo('Banned all relay software') if fetch_nodeinfo: - nodeinfo = asyncio.run(misc.fetch_nodeinfo(name)) + nodeinfo = asyncio.run(app.client.fetch_nodeinfo(name)) if not software: click.echo(f'Failed to fetch software name from domain: {name}') @@ -347,7 +347,7 @@ def cli_software_unban(name, fetch_nodeinfo): return click.echo('Unbanned all relay software') if fetch_nodeinfo: - nodeinfo = asyncio.run(misc.fetch_nodeinfo(name)) + nodeinfo = asyncio.run(app.client.fetch_nodeinfo(name)) if not nodeinfo: click.echo(f'Failed to fetch software name from domain: {name}') diff --git a/relay/misc.py b/relay/misc.py index 7f2bb56..628800d 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -9,8 +9,6 @@ import uuid from Crypto.Hash import SHA, SHA256, SHA512 from Crypto.PublicKey import RSA from Crypto.Signature import PKCS1_v1_5 -from aiohttp import ClientSession, ClientTimeout -from aiohttp.client_exceptions import ClientConnectorError, ServerTimeoutError from aiohttp.hdrs import METH_ALL as METHODS from aiohttp.web import Response as AiohttpResponse, View as AiohttpView from datetime import datetime @@ -117,14 +115,8 @@ def distill_inboxes(actor, object_id): def generate_body_digest(body): - bodyhash = app.cache.digests.get(body) - - if bodyhash: - return bodyhash - h = SHA256.new(body.encode('utf-8')) bodyhash = base64.b64encode(h.digest()).decode('utf-8') - app.cache.digests[body] = bodyhash return bodyhash @@ -138,141 +130,6 @@ def sign_signing_string(sigstring, key): return base64.b64encode(sigdata).decode('utf-8') -async def fetch_actor_key(actor): - actor_data = await request(actor) - - if not actor_data: - return None - - try: - return RSA.importKey(actor_data['publicKey']['publicKeyPem']) - - except Exception as e: - logging.debug(f'Exception occured while fetching actor key: {e}') - - -async def fetch_nodeinfo(domain): - nodeinfo_url = None - wk_nodeinfo = await request(f'https://{domain}/.well-known/nodeinfo', sign_headers=False, activity=False) - - if not wk_nodeinfo: - return - - wk_nodeinfo = WKNodeinfo(wk_nodeinfo) - - for version in ['20', '21']: - try: - nodeinfo_url = wk_nodeinfo.get_url(version) - - except KeyError: - pass - - if not nodeinfo_url: - logging.verbose(f'Failed to fetch nodeinfo url for domain: {domain}') - return False - - nodeinfo = await request(nodeinfo_url, sign_headers=False, activity=False) - - if not nodeinfo: - return False - - return Nodeinfo(nodeinfo) - - -async def request(uri, data=None, force=False, sign_headers=True, activity=True, timeout=10): - ## If a get request and not force, try to use the cache first - if not data and not force: - try: - return app.cache.json[uri] - - except KeyError: - pass - - url = urlparse(uri) - method = 'POST' if data else 'GET' - action = data.get('type') if data else None - headers = { - 'Accept': f'{MIMETYPES["activity"]}, {MIMETYPES["json"]};q=0.9', - 'User-Agent': 'ActivityRelay', - } - - if data: - headers['Content-Type'] = MIMETYPES['activity' if activity else 'json'] - - if sign_headers: - signing_headers = { - '(request-target)': f'{method.lower()} {url.path}', - 'Date': datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'), - 'Host': url.netloc - } - - if data: - assert isinstance(data, dict) - - data = json.dumps(data) - signing_headers.update({ - 'Digest': f'SHA-256={generate_body_digest(data)}', - 'Content-Length': str(len(data.encode('utf-8'))) - }) - - signing_headers['Signature'] = create_signature_header(signing_headers) - - del signing_headers['(request-target)'] - del signing_headers['Host'] - - headers.update(signing_headers) - - try: - if data: - logging.verbose(f'Sending "{action}" to inbox: {uri}') - - else: - logging.verbose(f'Sending GET request to url: {uri}') - - timeout_cfg = ClientTimeout(connect=timeout) - async with ClientSession(trace_configs=http_debug(), timeout=timeout_cfg) as session, app.semaphore: - async with session.request(method, uri, headers=headers, data=data) as resp: - ## aiohttp has been known to leak if the response hasn't been read, - ## so we're just gonna read the request no matter what - resp_data = await resp.read() - - ## Not expecting a response, so just return - if resp.status == 202: - return - - elif resp.status != 200: - if not resp_data: - return logging.verbose(f'Received error when requesting {uri}: {resp.status} {resp_data}') - - return logging.verbose(f'Received error when sending {action} to {uri}: {resp.status} {resp_data}') - - if resp.content_type == MIMETYPES['activity']: - resp_data = await resp.json(loads=Message.new_from_json) - - elif resp.content_type == MIMETYPES['json']: - resp_data = await resp.json(loads=DotDict.new_from_json) - - else: - logging.verbose(f'Invalid Content-Type for "{url}": {resp.content_type}') - return logging.debug(f'Response: {resp_data}') - - logging.debug(f'{uri} >> resp {resp_data}') - - app.cache.json[uri] = resp_data - return resp_data - - except JSONDecodeError: - logging.verbose(f'Failed to parse JSON') - return - - except (ClientConnectorError, ServerTimeoutError): - logging.verbose(f'Failed to connect to {url.netloc}') - return - - except Exception: - traceback.print_exc() - - async def validate_signature(actor, signature, http_request): headers = {key.lower(): value for key, value in http_request.headers.items()} headers['(request-target)'] = ' '.join([http_request.method.lower(), http_request.path]) @@ -559,11 +416,6 @@ class View(AiohttpView): return self._request.app - @property - def cache(self): - return self.app.cache - - @property def config(self): return self.app.config diff --git a/relay/processors.py b/relay/processors.py index 0b575b4..5b76485 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -1,63 +1,55 @@ import asyncio import logging +from cachetools import LRUCache from uuid import uuid4 -from . import misc +from .misc import Message, distill_inboxes + + +cache = LRUCache(1024) async def handle_relay(request): - if request.message.objectid in request.cache.objects: + if request.message.objectid in cache: logging.verbose(f'already relayed {request.message.objectid}') return - message = misc.Message.new_announce( + message = Message.new_announce( host = request.config.host, object = request.message.objectid ) - request.cache.objects[request.message.objectid] = message.id - logging.verbose(f'Relaying post from {request.message.actorid}') + cache[request.message.objectid] = message.id logging.debug(f'>> relay: {message}') - inboxes = misc.distill_inboxes(request.actor, request.message.objectid) + inboxes = distill_inboxes(request.actor, request.message.objectid) - if request.config.workers > 0: - for inbox in inboxes: - request.app.push_message(inbox, message) - - else: - futures = [misc.request(inbox, data=message) for inbox in inboxes] - asyncio.ensure_future(asyncio.gather(*futures)) + for inbox in inboxes: + request.app.push_message(inbox, message) async def handle_forward(request): - if request.message.id in request.cache.objects: + if request.message.id in cache: logging.verbose(f'already forwarded {request.message.id}') return - message = misc.Message.new_announce( + message = Message.new_announce( host = request.config.host, object = request.message ) - request.cache.objects[request.message.id] = message.id - logging.verbose(f'Forwarding post from {request.actor.id}') - logging.debug(f'>> Relay {request.message}') + cache[request.message.id] = message.id + logging.debug(f'>> forward: {message}') - inboxes = misc.distill_inboxes(request.actor, request.message.objectid) + inboxes = distill_inboxes(request.actor, request.message.objectid) - if request.config.workers > 0: - for inbox in inboxes: - request.app.push_message(inbox, message) - - else: - futures = [misc.request(inbox, data=message) for inbox in inboxes] - asyncio.ensure_future(asyncio.gather(*futures)) + for inbox in inboxes: + request.app.push_message(inbox, message) async def handle_follow(request): - nodeinfo = await misc.fetch_nodeinfo(request.actor.domain) + nodeinfo = await request.app.client.fetch_nodeinfo(request.actor.domain) software = nodeinfo.swname if nodeinfo else None ## reject if software used by actor is banned @@ -67,9 +59,9 @@ async def handle_follow(request): request.database.add_inbox(request.actor.shared_inbox, request.message.id, software) request.database.save() - await misc.request( + await request.app.push_message( request.actor.shared_inbox, - misc.Message.new_response( + Message.new_response( host = request.config.host, actor = request.actor.id, followid = request.message.id, @@ -80,9 +72,9 @@ async def handle_follow(request): # Are Akkoma and Pleroma the only two that expect a follow back? # Ignoring only Mastodon for now if software != 'mastodon': - await misc.request( + await request.app.push_message( request.actor.shared_inbox, - misc.Message.new_follow( + Message.new_follow( host = request.config.host, actor = request.actor.id ) @@ -99,14 +91,15 @@ async def handle_undo(request): request.database.save() - message = misc.Message.new_unfollow( - host = request.config.host, - actor = request.actor.id, - follow = request.message + await request.app.push_message( + request.actor.shared_inbox, + Message.new_unfollow( + host = request.config.host, + actor = request.actor.id, + follow = request.message + ) ) - await misc.request(request.actor.shared_inbox, message) - processors = { 'Announce': handle_relay, @@ -123,7 +116,7 @@ async def run_processor(request): return if request.instance and not request.instance.get('software'): - nodeinfo = await misc.fetch_nodeinfo(request.instance['domain']) + nodeinfo = await request.app.client.fetch_nodeinfo(request.instance['domain']) if nodeinfo: request.instance['software'] = nodeinfo.swname diff --git a/relay/views.py b/relay/views.py index a91f078..76cafec 100644 --- a/relay/views.py +++ b/relay/views.py @@ -102,7 +102,7 @@ async def inbox(request): logging.verbose('Failed to parse inbox message') return Response.new_error(400, 'failed to parse message', 'json') - request['actor'] = await misc.request(request.signature.keyid) + request['actor'] = await request.app.client.get(request.signature.keyid, sign_headers=True) ## reject if actor is empty if not request.actor: From 15b314922c27db91d407557076fec90ff2907434 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 19:59:20 -0500 Subject: [PATCH 21/50] fix running via docker --- relay/application.py | 8 +------- relay/config.py | 25 +++++++++++++++++-------- relay/manage.py | 2 +- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/relay/application.py b/relay/application.py index 8133d7c..26d8d4a 100644 --- a/relay/application.py +++ b/relay/application.py @@ -21,8 +21,7 @@ class Application(web.Application): self['starttime'] = None self['running'] = False - self['is_docker'] = bool(os.environ.get('DOCKER_RUNNING')) - self['config'] = RelayConfig(cfgpath, self['is_docker']) + self['config'] = RelayConfig(cfgpath) if not self['config'].load(): self['config'].save() @@ -59,11 +58,6 @@ class Application(web.Application): return self['database'] - @property - def is_docker(self): - return self['is_docker'] - - @property def semaphore(self): return self['semaphore'] diff --git a/relay/config.py b/relay/config.py index 090b63c..aa23ee2 100644 --- a/relay/config.py +++ b/relay/config.py @@ -1,6 +1,8 @@ import json +import os import yaml +from functools import cached_property from pathlib import Path from urllib.parse import urlparse @@ -25,22 +27,17 @@ class RelayConfig(DotDict): } - def __init__(self, path, is_docker): + def __init__(self, path): DotDict.__init__(self, {}) - if is_docker: - path = '/data/relay.yaml' + if self.is_docker: + path = '/data/config.yaml' - self._isdocker = is_docker self._path = Path(path).expanduser() - self.reset() def __setitem__(self, key, value): - if self._isdocker and key in ['db', 'listen', 'port']: - return - if key in ['blocked_instances', 'blocked_software', 'whitelist']: assert isinstance(value, (list, set, tuple)) @@ -80,6 +77,11 @@ class RelayConfig(DotDict): return f'{self.actor}#main-key' + @cached_property + def is_docker(self): + return bool(os.environ.get('DOCKER_RUNNING')) + + def reset(self): self.clear() self.update({ @@ -98,6 +100,13 @@ class RelayConfig(DotDict): 'whitelist': [] }) + if self.is_docker: + self.update({ + 'db': Path('/data/relay.jsonld'), + 'listen': '127.0.0.1' + }) + + def ban_instance(self, instance): if instance.startswith('http'): instance = urlparse(instance).hostname diff --git a/relay/manage.py b/relay/manage.py index 48f3700..0429461 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -51,7 +51,7 @@ def cli_setup(): app.config.save() - if not app['is_docker'] and click.confirm('Relay all setup! Would you like to run it now?'): + if not app.config.is_docker and click.confirm('Relay all setup! Would you like to run it now?'): cli_run.callback() From 10301ecbde35061f28d0d06b99d5cea79b92326a Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 20:25:20 -0500 Subject: [PATCH 22/50] update example config file --- relay.yaml.example | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/relay.yaml.example b/relay.yaml.example index d123e08..4e35697 100644 --- a/relay.yaml.example +++ b/relay.yaml.example @@ -9,13 +9,19 @@ port: 8080 # Note note: "Make a note about your instance here." -# maximum number of inbox posts to do at once +# Number of worker threads to start. If 0, use asyncio futures instead of threads. +workers: 0 + +# Maximum number of inbox posts to do at once +# If workers is set to 1 or above, this is the max for each worker push_limit: 512 -# this section is for ActivityPub +# The amount of json objects to cache from GET requests +json_cache: 1024 + ap: - # this is used for generating activitypub messages, as well as instructions for - # linking AP identities. it should be an SSL-enabled domain reachable by https. + # This is used for generating activitypub messages, as well as instructions for + # linking AP identities. It should be an SSL-enabled domain reachable by https. host: 'relay.example.com' blocked_instances: @@ -35,9 +41,3 @@ ap: #- 'aoderelay' #- 'social.seattle.wa.us-relay' #- 'unciarelay' - -# cache limits as number of items. only change this if you know what you're doing -cache: - objects: 1024 - json: 1024 - digests: 1024 From 130111c847fadb1981feac94cb95b09f7db55a62 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 20:53:06 -0500 Subject: [PATCH 23/50] update documentation --- docs/commands.md | 14 +++++++++++ docs/configuration.md | 55 ++++++++++++++++++++----------------------- 2 files changed, 39 insertions(+), 30 deletions(-) diff --git a/docs/commands.md b/docs/commands.md index 6e7da54..f8880f6 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -24,6 +24,20 @@ Run the setup wizard to configure your relay. activityrelay setup +## Config + +List the current configuration key/value pairs + + activityrelay config + + +### Set + +Set a value for a config option + + activityrelay config set + + ## Inbox Manage the list of subscribed instances. diff --git a/docs/configuration.md b/docs/configuration.md index ecadac3..d52ca9e 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -1,6 +1,8 @@ # Configuration -## DB +## General + +### DB The path to the database. It contains the relay actor private key and all subscribed instances. If the path is not absolute, it is relative to the working directory. @@ -8,7 +10,7 @@ instances. If the path is not absolute, it is relative to the working directory. db: relay.jsonld -## Listener +### Listener The address and port the relay will listen on. If the reverse proxy (nginx, apache, caddy, etc) is running on the same host, it is recommended to change `listen` to `localhost` @@ -17,22 +19,41 @@ is running on the same host, it is recommended to change `listen` to `localhost` port: 8080 -## Note +### Note A small blurb to describe your relay instance. This will show up on the relay's home page. note: "Make a note about your instance here." -## Post Limit +### Post Limit The maximum number of messages to send out at once. For each incoming message, a message will be sent out to every subscribed instance minus the instance which sent the message. This limit is to prevent too many outgoing connections from being made, so adjust if necessary. +Note: If the `workers` option is set to anything above 0, this limit will be per worker. + push_limit: 512 +### Push Workers + +The relay can be configured to use threads to push messages out. For smaller relays, this isn't +necessary, but bigger ones (>100 instances) will want to set this to the number of available cpu +threads. + + workers: 0 + + +### JSON GET cache limit + +JSON objects (actors, nodeinfo, etc) will get cached when fetched. This will set the max number of +objects to keep in the cache. + + json_cache: 1024 + + ## AP Various ActivityPub-related settings @@ -82,29 +103,3 @@ setting this to the below list will block all other relays and prevent relay cha - aoderelay - social.seattle.wa.us-relay - unciarelay - - -## Cache - -These are object limits for various caches. Only change if you know what you're doing. - - -### Objects - -The urls of messages which have been processed by the relay. - - objects: 1024 - - -### Actors - -The ActivityPub actors of incoming messages. - - actors: 1024 - - -### Actors - -The base64 encoded hashes of messages. - - digests: 1024 From 5d01211a34b9f65a0b4db9e24088e6e769b7bf2c Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 22:16:14 -0500 Subject: [PATCH 24/50] add aputils module for hs2019 support --- relay/application.py | 2 ++ relay/database.py | 31 ++++------------- relay/http_client.py | 48 ++++++++++---------------- relay/misc.py | 80 ++++---------------------------------------- relay/views.py | 11 ++++-- setup.cfg | 1 + 6 files changed, 41 insertions(+), 132 deletions(-) diff --git a/relay/application.py b/relay/application.py index 26d8d4a..b339672 100644 --- a/relay/application.py +++ b/relay/application.py @@ -35,6 +35,7 @@ class Application(web.Application): self['database'].load() self['client'] = HttpClient( + database = self.database, limit = self.config.push_limit, timeout = self.config.timeout, cache_size = self.config.json_cache @@ -155,6 +156,7 @@ class PushWorker(threading.Thread): def run(self): self.client = HttpClient( + database = self.app.database, limit = self.app.config.push_limit, timeout = self.app.config.timeout, cache_size = self.app.config.json_cache diff --git a/relay/database.py b/relay/database.py index 82adce4..d9cbe07 100644 --- a/relay/database.py +++ b/relay/database.py @@ -1,9 +1,9 @@ +import aputils import asyncio import json import logging import traceback -from Crypto.PublicKey import RSA from urllib.parse import urlparse @@ -17,22 +17,7 @@ class RelayDatabase(dict): }) self.config = config - self.PRIVKEY = None - - - @property - def PUBKEY(self): - return self.PRIVKEY.publickey() - - - @property - def pubkey(self): - return self.PUBKEY.exportKey('PEM').decode('utf-8') - - - @property - def privkey(self): - return self['private-key'] + self.signer = None @property @@ -45,11 +30,6 @@ class RelayDatabase(dict): return tuple(data['inbox'] for data in self['relay-list'].values()) - def generate_key(self): - self.PRIVKEY = RSA.generate(4096) - self['private-key'] = self.PRIVKEY.exportKey('PEM').decode('utf-8') - - def load(self): new_db = True @@ -94,12 +74,13 @@ class RelayDatabase(dict): if self.config.db.stat().st_size > 0: raise e from None - if not self.privkey: + if not self['private-key']: logging.info("No actor keys present, generating 4096-bit RSA keypair.") - self.generate_key() + self.signer = aputils.Signer.new(self.config.keyid, 4096) + self['private-key'] = self.signer.export() else: - self.PRIVKEY = RSA.importKey(self.privkey) + self.signer = aputils.Signer(self['private-key'], self.config.keyid) self.save() return not new_db diff --git a/relay/http_client.py b/relay/http_client.py index d664a88..f4337b3 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -1,3 +1,4 @@ +import aputils import logging import traceback @@ -12,9 +13,7 @@ from . import __version__ from .misc import ( MIMETYPES, DotDict, - Message, - create_signature_header, - generate_body_digest + Message ) @@ -30,7 +29,8 @@ class Cache(LRUCache): class HttpClient: - def __init__(self, limit=100, timeout=10, cache_size=1024): + def __init__(self, database, limit=100, timeout=10, cache_size=1024): + self.database = database self.cache = Cache(cache_size) self.cfg = {'limit': limit, 'timeout': timeout} self._conn = None @@ -47,29 +47,6 @@ class HttpClient: return self.cfg['timeout'] - def sign_headers(self, method, url, message=None): - parsed = urlparse(url) - headers = { - '(request-target)': f'{method.lower()} {parsed.path}', - 'Date': datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'), - 'Host': parsed.netloc - } - - if message: - data = message.to_json() - headers.update({ - 'Digest': f'SHA-256={generate_body_digest(data)}', - 'Content-Length': str(len(data.encode('utf-8'))) - }) - - headers['Signature'] = create_signature_header(headers) - - del headers['(request-target)'] - del headers['Host'] - - return headers - - async def open(self): if self._session: return @@ -110,7 +87,7 @@ class HttpClient: headers = {} if sign_headers: - headers.update(self.sign_headers('GET', url)) + headers.update(self.database.signer.sign_headers('GET', url)) try: logging.verbose(f'Fetching resource: {url}') @@ -162,8 +139,19 @@ class HttpClient: async def post(self, url, message): await self.open() + instance = self.database.get_inbox(url) + + ## Akkoma (and probably pleroma) doesn't support hs2019, so use the old algorithm + if instance.get('software') in {'akkoma', 'pleroma'}: + algorithm = aputils.Algorithm.RSASHA256 + + else: + algorithm = aputils.Algorithm.HS2019 + headers = {'Content-Type': 'application/activity+json'} - headers.update(self.sign_headers('POST', url, message)) + headers.update(self.database.signer.sign_headers('POST', url, message, algorithm=algorithm)) + + print(headers) try: logging.verbose(f'Sending "{message.type}" to {url}') @@ -185,7 +173,7 @@ class HttpClient: ## Additional methods ## - async def fetch_nodeinfo(domain): + async def fetch_nodeinfo(self, domain): nodeinfo_url = None wk_nodeinfo = await self.get(f'https://{domain}/.well-known/nodeinfo', loads=WKNodeinfo) diff --git a/relay/misc.py b/relay/misc.py index 628800d..c243a7d 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -1,3 +1,4 @@ +import aputils import asyncio import base64 import json @@ -6,9 +7,6 @@ import socket import traceback import uuid -from Crypto.Hash import SHA, SHA256, SHA512 -from Crypto.PublicKey import RSA -from Crypto.Signature import PKCS1_v1_5 from aiohttp.hdrs import METH_ALL as METHODS from aiohttp.web import Response as AiohttpResponse, View as AiohttpView from datetime import datetime @@ -21,12 +19,6 @@ from .http_debug import http_debug app = None -HASHES = { - 'sha1': SHA, - 'sha256': SHA256, - 'sha512': SHA512 -} - MIMETYPES = { 'activity': 'application/activity+json', 'html': 'text/html', @@ -92,67 +84,12 @@ def check_open_port(host, port): return False -def create_signature_header(headers): - headers = {k.lower(): v for k, v in headers.items()} - used_headers = headers.keys() - sigstring = build_signing_string(headers, used_headers) - - sig = { - 'keyId': app.config.keyid, - 'algorithm': 'rsa-sha256', - 'headers': ' '.join(used_headers), - 'signature': sign_signing_string(sigstring, app.database.PRIVKEY) - } - - chunks = ['{}="{}"'.format(k, v) for k, v in sig.items()] - return ','.join(chunks) - - def distill_inboxes(actor, object_id): for inbox in app.database.inboxes: if inbox != actor.shared_inbox and urlparse(inbox).hostname != urlparse(object_id).hostname: yield inbox -def generate_body_digest(body): - h = SHA256.new(body.encode('utf-8')) - bodyhash = base64.b64encode(h.digest()).decode('utf-8') - - return bodyhash - - -def sign_signing_string(sigstring, key): - pkcs = PKCS1_v1_5.new(key) - h = SHA256.new() - h.update(sigstring.encode('ascii')) - sigdata = pkcs.sign(h) - - return base64.b64encode(sigdata).decode('utf-8') - - -async def validate_signature(actor, signature, http_request): - headers = {key.lower(): value for key, value in http_request.headers.items()} - headers['(request-target)'] = ' '.join([http_request.method.lower(), http_request.path]) - - sigstring = build_signing_string(headers, signature['headers']) - logging.debug(f'sigstring: {sigstring}') - - sign_alg, _, hash_alg = signature['algorithm'].partition('-') - logging.debug(f'sign alg: {sign_alg}, hash alg: {hash_alg}') - - sigdata = base64.b64decode(signature['signature']) - - pkcs = PKCS1_v1_5.new(actor.PUBKEY) - h = HASHES[hash_alg].new() - h.update(sigstring.encode('ascii')) - result = pkcs.verify(h, sigdata) - - http_request['validated'] = result - - logging.debug(f'validates? {result}') - return result - - class DotDict(dict): def __init__(self, _data, **kwargs): dict.__init__(self) @@ -321,16 +258,6 @@ class Message(DotDict): # actor properties - @property - def PUBKEY(self): - return RSA.import_key(self.pubkey) - - - @property - def pubkey(self): - return self.publicKey.publicKeyPem - - @property def shared_inbox(self): return self.get('endpoints', {}).get('sharedInbox', self.inbox) @@ -353,6 +280,11 @@ class Message(DotDict): return self.object + @property + def signer(self): + return aputils.Signer.new_from_actor(self) + + class Nodeinfo(DotDict): @property def swname(self): diff --git a/relay/views.py b/relay/views.py index 76cafec..6b6b43f 100644 --- a/relay/views.py +++ b/relay/views.py @@ -1,3 +1,4 @@ +import aputils import asyncio import logging import subprocess @@ -66,7 +67,7 @@ a:hover {{ color: #8AF; }} async def actor(request): data = Message.new_actor( host = request.config.host, - pubkey = request.database.pubkey + pubkey = request.database.signer.pubkey ) return Response.new(data, ctype='activity') @@ -127,9 +128,13 @@ async def inbox(request): return Response.new_error(403, 'access denied', 'json') ## reject if the signature is invalid - if not (await misc.validate_signature(request.actor, request.signature, request)): + try: + await request.actor.signer.validate_aiohttp_request(request) + + except aputils.SignatureValidationError as e: logging.verbose(f'signature validation failed for: {actor.id}') - return Response.new_error(401, 'signature check failed', 'json') + logging.debug(str(e)) + return Response.new_error(401, str(e), 'json') ## reject if activity type isn't 'Follow' and the actor isn't following if request.message.type != 'Follow' and not database.get_inbox(request.actor.domain): diff --git a/setup.cfg b/setup.cfg index 2345151..cc99d3b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,6 +28,7 @@ install_requires = click >= 8.1.2 pycryptodome >= 3.14.1 PyYAML >= 5.0.0 + aputils @ https://git.barkshark.xyz/barkshark/aputils/archive/0.1.1.tar.gz python_requires = >=3.6 [options.extras_require] From 335146a9702a878a869d4a5f1b34af87eb68290e Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 23:01:18 -0500 Subject: [PATCH 25/50] fix NameError in cli_setup --- relay/manage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/relay/manage.py b/relay/manage.py index 0429461..b805a56 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -38,7 +38,7 @@ def cli_setup(): while True: app.config.host = click.prompt('What domain will the relay be hosted on?', default=app.config.host) - if not config.host.endswith('example.com'): + if not app.config.host.endswith('example.com'): break click.echo('The domain must not be example.com') From ce9e0c4d000ac79d22e3381e20c25003a853bd7b Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 23:11:51 -0500 Subject: [PATCH 26/50] remove unnecessary print --- relay/http_client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/relay/http_client.py b/relay/http_client.py index f4337b3..4bfd94e 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -151,8 +151,6 @@ class HttpClient: headers = {'Content-Type': 'application/activity+json'} headers.update(self.database.signer.sign_headers('POST', url, message, algorithm=algorithm)) - print(headers) - try: logging.verbose(f'Sending "{message.type}" to {url}') From a640db8f0636ba675dd9325471d607ac4507bb5c Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 26 Nov 2022 23:41:57 -0500 Subject: [PATCH 27/50] update list of active relay software --- relay/config.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/relay/config.py b/relay/config.py index aa23ee2..71956ed 100644 --- a/relay/config.py +++ b/relay/config.py @@ -10,10 +10,9 @@ from .misc import DotDict, boolean relay_software_names = [ - 'activityrelay', - 'aoderelay', - 'social.seattle.wa.us-relay', - 'unciarelay' + 'activityrelay', # https://git.pleroma.social/pleroma/relay + 'aoderelay', # https://git.asonix.dog/asonix/relay + 'feditools-relay' # https://git.ptzo.gdn/feditools/relay ] From 5397bb4653b39953c2e25175a789426b308bcdcb Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 27 Nov 2022 17:25:54 -0500 Subject: [PATCH 28/50] only use hs2019 for mastodon --- relay/http_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/relay/http_client.py b/relay/http_client.py index 4bfd94e..4c82fa9 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -141,8 +141,8 @@ class HttpClient: instance = self.database.get_inbox(url) - ## Akkoma (and probably pleroma) doesn't support hs2019, so use the old algorithm - if instance.get('software') in {'akkoma', 'pleroma'}: + ## Using the old algo by default is probably a better idea right now + if instance and instance.get('software') not in {'mastodon'}: algorithm = aputils.Algorithm.RSASHA256 else: From 1a7abb4ecb76bfc1292ca2bc46b37876b89186c6 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Tue, 29 Nov 2022 17:41:04 -0500 Subject: [PATCH 29/50] fix distill_inboxes --- relay/database.py | 11 +++++++++++ relay/misc.py | 6 ------ relay/processors.py | 6 +++--- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/relay/database.py b/relay/database.py index d9cbe07..4f83db0 100644 --- a/relay/database.py +++ b/relay/database.py @@ -184,3 +184,14 @@ class RelayDatabase(dict): domain = urlparse(inbox).hostname del self['follow-requests'][domain] + + + def distill_inboxes(self, message): + src_domains = { + message.domain, + urlparse(message.objectid).netloc + } + + for domain, instance in self['relay-list'].items(): + if domain not in src_domains: + yield instance['inbox'] diff --git a/relay/misc.py b/relay/misc.py index c243a7d..c155cfe 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -84,12 +84,6 @@ def check_open_port(host, port): return False -def distill_inboxes(actor, object_id): - for inbox in app.database.inboxes: - if inbox != actor.shared_inbox and urlparse(inbox).hostname != urlparse(object_id).hostname: - yield inbox - - class DotDict(dict): def __init__(self, _data, **kwargs): dict.__init__(self) diff --git a/relay/processors.py b/relay/processors.py index 5b76485..1af7ed0 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -4,7 +4,7 @@ import logging from cachetools import LRUCache from uuid import uuid4 -from .misc import Message, distill_inboxes +from .misc import Message cache = LRUCache(1024) @@ -23,7 +23,7 @@ async def handle_relay(request): cache[request.message.objectid] = message.id logging.debug(f'>> relay: {message}') - inboxes = distill_inboxes(request.actor, request.message.objectid) + niboxes = request.database.distill_inboxes(request.message) for inbox in inboxes: request.app.push_message(inbox, message) @@ -42,7 +42,7 @@ async def handle_forward(request): cache[request.message.id] = message.id logging.debug(f'>> forward: {message}') - inboxes = distill_inboxes(request.actor, request.message.objectid) + inboxes = request.database.distill_inboxes(request.message) for inbox in inboxes: request.app.push_message(inbox, message) From d172439fac755df00994286997617b2e195b0507 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 2 Dec 2022 00:11:22 -0500 Subject: [PATCH 30/50] update aputils --- relay/http_client.py | 8 ++++---- relay/processors.py | 2 +- setup.cfg | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/relay/http_client.py b/relay/http_client.py index 4c82fa9..3b8a182 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -87,7 +87,7 @@ class HttpClient: headers = {} if sign_headers: - headers.update(self.database.signer.sign_headers('GET', url)) + headers.update(self.database.signer.sign_headers('GET', url, algorithm='original')) try: logging.verbose(f'Fetching resource: {url}') @@ -142,11 +142,11 @@ class HttpClient: instance = self.database.get_inbox(url) ## Using the old algo by default is probably a better idea right now - if instance and instance.get('software') not in {'mastodon'}: - algorithm = aputils.Algorithm.RSASHA256 + if instance and instance.get('software') in {'mastodon'}: + algorithm = 'hs2019' else: - algorithm = aputils.Algorithm.HS2019 + algorithm = 'original' headers = {'Content-Type': 'application/activity+json'} headers.update(self.database.signer.sign_headers('POST', url, message, algorithm=algorithm)) diff --git a/relay/processors.py b/relay/processors.py index 1af7ed0..2e2a977 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -23,7 +23,7 @@ async def handle_relay(request): cache[request.message.objectid] = message.id logging.debug(f'>> relay: {message}') - niboxes = request.database.distill_inboxes(request.message) + inboxes = request.database.distill_inboxes(request.message) for inbox in inboxes: request.app.push_message(inbox, message) diff --git a/setup.cfg b/setup.cfg index cc99d3b..da4201b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,7 +28,7 @@ install_requires = click >= 8.1.2 pycryptodome >= 3.14.1 PyYAML >= 5.0.0 - aputils @ https://git.barkshark.xyz/barkshark/aputils/archive/0.1.1.tar.gz + aputils @ https://git.barkshark.xyz/barkshark/aputils/archive/0.1.2.tar.gz python_requires = >=3.6 [options.extras_require] From d5b9053f71affcc2a6ac4644c28b8bcb6c6e1859 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 2 Dec 2022 00:50:57 -0500 Subject: [PATCH 31/50] replace various classes with aputils classes --- relay/http_client.py | 9 ++++--- relay/misc.py | 29 --------------------- relay/views.py | 60 +++++++++++++++----------------------------- 3 files changed, 26 insertions(+), 72 deletions(-) diff --git a/relay/http_client.py b/relay/http_client.py index 3b8a182..e164430 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -1,9 +1,9 @@ -import aputils import logging import traceback from aiohttp import ClientSession, ClientTimeout, TCPConnector from aiohttp.client_exceptions import ClientConnectorError, ServerTimeoutError +from aputils import Nodeinfo, WellKnownNodeinfo from datetime import datetime from cachetools import LRUCache from json.decoder import JSONDecodeError @@ -173,7 +173,10 @@ class HttpClient: ## Additional methods ## async def fetch_nodeinfo(self, domain): nodeinfo_url = None - wk_nodeinfo = await self.get(f'https://{domain}/.well-known/nodeinfo', loads=WKNodeinfo) + wk_nodeinfo = await self.get( + f'https://{domain}/.well-known/nodeinfo', + loads = WellKnownNodeinfo.new_from_json + ) for version in ['20', '21']: try: @@ -186,4 +189,4 @@ class HttpClient: logging.verbose(f'Failed to fetch nodeinfo url for domain: {domain}') return False - return await request(nodeinfo_url, loads=Nodeinfo) or False + return await request(nodeinfo_url, loads=Nodeinfo.new_from_json) or False diff --git a/relay/misc.py b/relay/misc.py index c155cfe..68b6a18 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -37,10 +37,6 @@ def set_app(new_app): app = new_app -def build_signing_string(headers, used_headers): - return '\n'.join(map(lambda x: ': '.join([x.lower(), headers[x]]), used_headers)) - - def boolean(value): if isinstance(value, str): if value.lower() in ['on', 'y', 'yes', 'true', 'enable', 'enabled', '1']: @@ -279,12 +275,6 @@ class Message(DotDict): return aputils.Signer.new_from_actor(self) -class Nodeinfo(DotDict): - @property - def swname(self): - return self.software.name - - class Response(AiohttpResponse): @classmethod def new(cls, body='', status=200, headers=None, ctype='text'): @@ -350,22 +340,3 @@ class View(AiohttpView): @property def database(self): return self.app.database - - -class WKNodeinfo(DotDict): - @classmethod - def new(cls, v20, v21): - return cls({ - 'links': [ - {'rel': NODEINFO_NS['20'], 'href': v20}, - {'rel': NODEINFO_NS['21'], 'href': v21} - ] - }) - - - def get_url(self, version='20'): - for item in self.links: - if item['rel'] == NODEINFO_NS[version]: - return item['href'] - - raise KeyError(version) diff --git a/relay/views.py b/relay/views.py index 6b6b43f..d658468 100644 --- a/relay/views.py +++ b/relay/views.py @@ -8,7 +8,7 @@ from pathlib import Path from . import __version__, misc from .http_debug import STATS -from .misc import DotDict, Message, Response, WKNodeinfo +from .misc import DotDict, Message, Response from .processors import run_processor @@ -158,57 +158,37 @@ async def webfinger(request): if subject != f'acct:relay@{request.config.host}': return Response.new_error(404, 'user not found', 'json') - data = { - 'subject': subject, - 'aliases': [request.config.actor], - 'links': [ - {'href': request.config.actor, 'rel': 'self', 'type': 'application/activity+json'}, - {'href': request.config.actor, 'rel': 'self', 'type': 'application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"'} - ] - } + data = aputils.Webfinger.new( + handle = 'relay', + domain = request.config.host, + actor = request.config.actor + ) return Response.new(data, ctype='json') @register_route('GET', '/nodeinfo/{version:\d.\d\.json}') -async def nodeinfo_2_0(request): +async def nodeinfo(request): niversion = request.match_info['version'][:3] - data = { - 'openRegistrations': not request.config.whitelist_enabled, - 'protocols': ['activitypub'], - 'services': { - 'inbound': [], - 'outbound': [] - }, - 'software': { - 'name': 'activityrelay', - 'version': version - }, - 'usage': { - 'localPosts': 0, - 'users': { - 'total': 1 - } - }, - 'metadata': { - 'peers': request.database.hostnames - }, - 'version': niversion - } - if version == '2.1': - data['software']['repository'] = 'https://git.pleroma.social/pleroma/relay' + data = dict( + name = 'activityrelay', + version = version, + protocols = ['activitypub'], + open_regs = not request.config.whitelist_enabled, + users = 1, + metadata = {'peers': request.database.hostnames} + ) - return Response.new(data, ctype='json') + if niversion == '2.1': + data['repo'] = 'https://git.pleroma.social/pleroma/relay' + + return Response.new(aputils.Nodeinfo.new(**data), ctype='json') @register_route('GET', '/.well-known/nodeinfo') async def nodeinfo_wellknown(request): - data = WKNodeinfo.new( - v20 = f'https://{request.config.host}/nodeinfo/2.0.json', - v21 = f'https://{request.config.host}/nodeinfo/2.1.json' - ) - + data = aputils.WellKnownNodeinfo.new_template(request.config.host) return Response.new(data, ctype='json') From dcca1eb0dcb163d9e3127618bb5189cd02899bb1 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 2 Dec 2022 00:52:15 -0500 Subject: [PATCH 32/50] fix HttpClient fetch_nodeinfo and get --- relay/http_client.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/relay/http_client.py b/relay/http_client.py index e164430..28153a5 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -103,11 +103,7 @@ class HttpClient: return if loads: - if issubclass(loads, DotDict): - message = await resp.json(loads=loads.new_from_json) - - else: - message = await resp.json(loads=loads) + message = await resp.json(loads=loads) elif resp.content_type == MIMETYPES['activity']: message = await resp.json(loads=Message.new_from_json) @@ -189,4 +185,4 @@ class HttpClient: logging.verbose(f'Failed to fetch nodeinfo url for domain: {domain}') return False - return await request(nodeinfo_url, loads=Nodeinfo.new_from_json) or False + return await self.get(nodeinfo_url, loads=Nodeinfo.new_from_json) or False From f7e1c6b0b88f6a50cde996f0b18d195cecc39155 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Fri, 2 Dec 2022 11:43:39 -0500 Subject: [PATCH 33/50] make sure db config is a string when saving --- relay/config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/relay/config.py b/relay/config.py index 71956ed..9af25bc 100644 --- a/relay/config.py +++ b/relay/config.py @@ -234,7 +234,8 @@ class RelayConfig(DotDict): def save(self): config = { - 'db': self['db'], + # just turning config.db into a string is good enough for now + 'db': str(self.db), 'listen': self.listen, 'port': self.port, 'note': self.note, From 3b89aa5e842c729861441682cf6bfd09aa4c505e Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 01:09:45 -0500 Subject: [PATCH 34/50] sort out cli added `whitelist import` command which adds all current inboxes to the whitelist added `config list` fixed a few errors --- relay/manage.py | 47 +++++++++++++++++++++++++++++++---------------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/relay/manage.py b/relay/manage.py index b805a56..0b58ecb 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -81,13 +81,15 @@ def cli_run(): # todo: add config default command for resetting config key -@cli.group('config', invoke_without_command=True) -@click.pass_context -def cli_config(ctx): - 'List the current relay config' +@cli.group('config') +def cli_config(): + 'Manage the relay config' + pass - if ctx.invoked_subcommand: - return + +@cli_config.command('list') +def cli_config_list(): + 'List the current relay config' click.echo('Relay Config:') @@ -321,14 +323,16 @@ def cli_software_ban(name, fetch_nodeinfo): if fetch_nodeinfo: nodeinfo = asyncio.run(app.client.fetch_nodeinfo(name)) - if not software: + if not nodeinfo: click.echo(f'Failed to fetch software name from domain: {name}') - if config.ban_software(nodeinfo.swname): - app.config.save() - return click.echo(f'Banned software: {nodeinfo.swname}') + name = nodeinfo.sw_name - click.echo(f'Software already banned: {nodeinfo.swname}') + if app.config.ban_software(name): + app.config.save() + return click.echo(f'Banned software: {name}') + + click.echo(f'Software already banned: {name}') @cli_software.command('unban') @@ -343,7 +347,7 @@ def cli_software_unban(name, fetch_nodeinfo): for name in relay_software_names: app.config.unban_software(name) - config.save() + app.config.save() return click.echo('Unbanned all relay software') if fetch_nodeinfo: @@ -352,12 +356,13 @@ def cli_software_unban(name, fetch_nodeinfo): if not nodeinfo: click.echo(f'Failed to fetch software name from domain: {name}') - if app.config.unban_software(nodeinfo.swname): + name = nodeinfo.sw_name + + if app.config.unban_software(name): app.config.save() - return click.echo(f'Unbanned software: {nodeinfo.swname}') - - click.echo(f'Software wasn\'t banned: {nodeinfo.swname}') + return click.echo(f'Unbanned software: {name}') + click.echo(f'Software wasn\'t banned: {name}') @cli.group('whitelist') @@ -368,6 +373,8 @@ def cli_whitelist(): @cli_whitelist.command('list') def cli_whitelist_list(): + 'List all the instances in the whitelist' + click.echo('Current whitelisted domains') for domain in app.config.whitelist: @@ -403,6 +410,14 @@ def cli_whitelist_remove(instance): click.echo(f'Removed instance from the whitelist: {instance}') +@cli_whitelist.command('import') +def cli_whitelist_import(): + 'Add all current inboxes to the whitelist' + + for domain in app.database.hostnames: + cli_whitelist_add.callback(domain) + + def main(): cli(prog_name='relay') From eab8a310015ea83087e75a146e34289994ad8f34 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 01:12:58 -0500 Subject: [PATCH 35/50] document new commands --- docs/commands.md | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/docs/commands.md b/docs/commands.md index f8880f6..aea0a82 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -26,11 +26,18 @@ Run the setup wizard to configure your relay. ## Config -List the current configuration key/value pairs +Manage the relay config activityrelay config +### List + +List the current config key/value pairs + + activityrelay config list + + ### Set Set a value for a config option @@ -111,6 +118,13 @@ Remove a domain from the whitelist. activityrelay whitelist remove +### Import + +Add all current inboxes to the whitelist + + activityrelay whitelist import + + ## Instance Manage the instance ban list. From b0851c065225930d2eb55eba72b2d66d89b218b3 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 01:15:28 -0500 Subject: [PATCH 36/50] remove http_debug --- relay/application.py | 3 -- relay/http_debug.py | 68 -------------------------------------------- relay/misc.py | 2 -- relay/views.py | 6 ---- 4 files changed, 79 deletions(-) delete mode 100644 relay/http_debug.py diff --git a/relay/application.py b/relay/application.py index b339672..498fdd9 100644 --- a/relay/application.py +++ b/relay/application.py @@ -102,9 +102,6 @@ class Application(web.Application): return logging.error(f'A server is already running on port {self.config.port}') for route in routes: - if route[1] == '/stats' and logging.DEBUG < logging.root.level: - continue - self.router.add_route(*route) logging.info(f'Starting webserver at {self.config.host} ({self.config.listen}:{self.config.port})') diff --git a/relay/http_debug.py b/relay/http_debug.py deleted file mode 100644 index 2a2ae67..0000000 --- a/relay/http_debug.py +++ /dev/null @@ -1,68 +0,0 @@ -import logging -import aiohttp - -from collections import defaultdict - - -STATS = { - 'requests': defaultdict(int), - 'response_codes': defaultdict(int), - 'response_codes_per_domain': defaultdict(lambda: defaultdict(int)), - 'delivery_codes': defaultdict(int), - 'delivery_codes_per_domain': defaultdict(lambda: defaultdict(int)), - 'exceptions': defaultdict(int), - 'exceptions_per_domain': defaultdict(lambda: defaultdict(int)), - 'delivery_exceptions': defaultdict(int), - 'delivery_exceptions_per_domain': defaultdict(lambda: defaultdict(int)) -} - - -async def on_request_start(session, trace_config_ctx, params): - global STATS - - logging.debug("HTTP START [%r], [%r]", session, params) - - STATS['requests'][params.url.host] += 1 - - -async def on_request_end(session, trace_config_ctx, params): - global STATS - - logging.debug("HTTP END [%r], [%r]", session, params) - - host = params.url.host - status = params.response.status - - STATS['response_codes'][status] += 1 - STATS['response_codes_per_domain'][host][status] += 1 - - if params.method == 'POST': - STATS['delivery_codes'][status] += 1 - STATS['delivery_codes_per_domain'][host][status] += 1 - - -async def on_request_exception(session, trace_config_ctx, params): - global STATS - - logging.debug("HTTP EXCEPTION [%r], [%r]", session, params) - - host = params.url.host - exception = repr(params.exception) - - STATS['exceptions'][exception] += 1 - STATS['exceptions_per_domain'][host][exception] += 1 - - if params.method == 'POST': - STATS['delivery_exceptions'][exception] += 1 - STATS['delivery_exceptions_per_domain'][host][exception] += 1 - - -def http_debug(): - if logging.DEBUG >= logging.root.level: - return - - trace_config = aiohttp.TraceConfig() - trace_config.on_request_start.append(on_request_start) - trace_config.on_request_end.append(on_request_end) - trace_config.on_request_exception.append(on_request_exception) - return [trace_config] diff --git a/relay/misc.py b/relay/misc.py index 68b6a18..a98088f 100644 --- a/relay/misc.py +++ b/relay/misc.py @@ -14,8 +14,6 @@ from json.decoder import JSONDecodeError from urllib.parse import urlparse from uuid import uuid4 -from .http_debug import http_debug - app = None diff --git a/relay/views.py b/relay/views.py index d658468..9cea1ef 100644 --- a/relay/views.py +++ b/relay/views.py @@ -7,7 +7,6 @@ import traceback from pathlib import Path from . import __version__, misc -from .http_debug import STATS from .misc import DotDict, Message, Response from .processors import run_processor @@ -190,8 +189,3 @@ async def nodeinfo(request): async def nodeinfo_wellknown(request): data = aputils.WellKnownNodeinfo.new_template(request.config.host) return Response.new(data, ctype='json') - - -@register_route('GET', '/stats') -async def stats(request): - return Response.new(STATS, ctype='json') From 90234a972434358852d501c9021e9203ec2c77a7 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 01:20:17 -0500 Subject: [PATCH 37/50] move apkeys out of RelayConfig and rename relay_software_names --- relay/config.py | 21 ++++++++++----------- relay/manage.py | 6 +++--- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/relay/config.py b/relay/config.py index 9af25bc..77b5db4 100644 --- a/relay/config.py +++ b/relay/config.py @@ -9,23 +9,22 @@ from urllib.parse import urlparse from .misc import DotDict, boolean -relay_software_names = [ +RELAY_SOFTWARE = [ 'activityrelay', # https://git.pleroma.social/pleroma/relay 'aoderelay', # https://git.asonix.dog/asonix/relay 'feditools-relay' # https://git.ptzo.gdn/feditools/relay ] +APKEYS = [ + 'host', + 'whitelist_enabled', + 'blocked_software', + 'blocked_instances', + 'whitelist' +] + class RelayConfig(DotDict): - apkeys = { - 'host', - 'whitelist_enabled', - 'blocked_software', - 'blocked_instances', - 'whitelist' - } - - def __init__(self, path): DotDict.__init__(self, {}) @@ -243,7 +242,7 @@ class RelayConfig(DotDict): 'workers': self.workers, 'json_cache': self.json_cache, 'timeout': self.timeout, - 'ap': {key: self[key] for key in self.apkeys} + 'ap': {key: self[key] for key in APKEYS} } with open(self._path, 'w') as fd: diff --git a/relay/manage.py b/relay/manage.py index 0b58ecb..1bf2c03 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -8,7 +8,7 @@ from urllib.parse import urlparse from . import misc, __version__ from .application import Application -from .config import relay_software_names +from .config import RELAY_SOFTWARE app = None @@ -314,7 +314,7 @@ def cli_software_ban(name, fetch_nodeinfo): 'Ban software. Use RELAYS for NAME to ban relays' if name == 'RELAYS': - for name in relay_software_names: + for name in RELAY_SOFTWARE: app.config.ban_software(name) app.config.save() @@ -344,7 +344,7 @@ def cli_software_unban(name, fetch_nodeinfo): 'Ban software. Use RELAYS for NAME to unban relays' if name == 'RELAYS': - for name in relay_software_names: + for name in RELAY_SOFTWARE: app.config.unban_software(name) app.config.save() From 6b86bb7d988e1c02a61def8d411af6cd4b5e8226 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 02:13:13 -0500 Subject: [PATCH 38/50] remove leftover semaphore property --- relay/application.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/relay/application.py b/relay/application.py index 498fdd9..538a9ce 100644 --- a/relay/application.py +++ b/relay/application.py @@ -59,11 +59,6 @@ class Application(web.Application): return self['database'] - @property - def semaphore(self): - return self['semaphore'] - - @property def uptime(self): if not self['starttime']: @@ -207,4 +202,3 @@ setattr(web.Request, 'signature', property(request_signature)) setattr(web.Request, 'config', property(lambda self: self.app.config)) setattr(web.Request, 'database', property(lambda self: self.app.database)) -setattr(web.Request, 'semaphore', property(lambda self: self.app.semaphore)) From 9f58c88e9f9b221944c8a26de1a3b9382338aa77 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 04:16:50 -0500 Subject: [PATCH 39/50] Fix NameError when getting nodeinfo software name in processors --- relay/processors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/relay/processors.py b/relay/processors.py index 2e2a977..b980f87 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -50,7 +50,7 @@ async def handle_forward(request): async def handle_follow(request): nodeinfo = await request.app.client.fetch_nodeinfo(request.actor.domain) - software = nodeinfo.swname if nodeinfo else None + software = nodeinfo.sw_name if nodeinfo else None ## reject if software used by actor is banned if request.config.is_banned_software(software): @@ -119,7 +119,7 @@ async def run_processor(request): nodeinfo = await request.app.client.fetch_nodeinfo(request.instance['domain']) if nodeinfo: - request.instance['software'] = nodeinfo.swname + request.instance['software'] = nodeinfo.sw_name request.database.save() logging.verbose(f'New "{request.message.type}" from actor: {request.actor.id}') From 7d37ec8145a7c01f86f038978a1c523fc2216b64 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sun, 4 Dec 2022 04:40:40 -0500 Subject: [PATCH 40/50] remove await from push_message calls and reject non-system actors --- relay/processors.py | 40 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/relay/processors.py b/relay/processors.py index b980f87..2d34246 100644 --- a/relay/processors.py +++ b/relay/processors.py @@ -10,6 +10,16 @@ from .misc import Message cache = LRUCache(1024) +def person_check(actor, software): + ## pleroma and akkoma use Person for the actor type for some reason + if software in {'akkoma', 'pleroma'} and actor.id != f'https://{actor.domain}/relay': + return True + + ## make sure the actor is an application + elif actor.type != 'Application': + return True + + async def handle_relay(request): if request.message.objectid in cache: logging.verbose(f'already relayed {request.message.objectid}') @@ -54,12 +64,36 @@ async def handle_follow(request): ## reject if software used by actor is banned if request.config.is_banned_software(software): + request.app.push_message( + request.actor.shared_inbox, + Message.new_response( + host = request.config.host, + actor = request.actor.id, + followid = request.message.id, + accept = False + ) + ) + return logging.verbose(f'Rejected follow from actor for using specific software: actor={request.actor.id}, software={software}') + ## reject if the actor is not an instance actor + if person_check(request.actor, software): + request.app.push_message( + request.actor.shared_inbox, + Message.new_response( + host = request.config.host, + actor = request.actor.id, + followid = request.message.id, + accept = False + ) + ) + + return logging.verbose(f'Non-application actor tried to follow: {request.actor.id}') + request.database.add_inbox(request.actor.shared_inbox, request.message.id, software) request.database.save() - await request.app.push_message( + request.app.push_message( request.actor.shared_inbox, Message.new_response( host = request.config.host, @@ -72,7 +106,7 @@ async def handle_follow(request): # Are Akkoma and Pleroma the only two that expect a follow back? # Ignoring only Mastodon for now if software != 'mastodon': - await request.app.push_message( + request.app.push_message( request.actor.shared_inbox, Message.new_follow( host = request.config.host, @@ -91,7 +125,7 @@ async def handle_undo(request): request.database.save() - await request.app.push_message( + request.app.push_message( request.actor.shared_inbox, Message.new_unfollow( host = request.config.host, From 8f16cab0484a06f7be6e6df3abb979272db5e948 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Wed, 7 Dec 2022 23:15:31 -0500 Subject: [PATCH 41/50] prevent errors in post and fetch_nodeinfo --- relay/http_client.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/relay/http_client.py b/relay/http_client.py index 28153a5..8802471 100644 --- a/relay/http_client.py +++ b/relay/http_client.py @@ -159,7 +159,7 @@ class HttpClient: return logging.verbose(await resp.read()) # change this to debug except (ClientConnectorError, ServerTimeoutError): - logging.verbose(f'Failed to connect to {url.netloc}') + logging.verbose(f'Failed to connect to {url}') ## prevent workers from being brought down except Exception as e: @@ -174,6 +174,10 @@ class HttpClient: loads = WellKnownNodeinfo.new_from_json ) + if not wk_nodeinfo: + logging.verbose(f'Failed to fetch well-known nodeinfo url for domain: {domain}') + return False + for version in ['20', '21']: try: nodeinfo_url = wk_nodeinfo.get_url(version) From e281a06e7f8d6dfa1bdd7ba7f8a8697e1a5fa77e Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Wed, 7 Dec 2022 23:15:54 -0500 Subject: [PATCH 42/50] correctly call aputils.Signer.new --- relay/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/relay/database.py b/relay/database.py index 4f83db0..ad093cd 100644 --- a/relay/database.py +++ b/relay/database.py @@ -76,7 +76,7 @@ class RelayDatabase(dict): if not self['private-key']: logging.info("No actor keys present, generating 4096-bit RSA keypair.") - self.signer = aputils.Signer.new(self.config.keyid, 4096) + self.signer = aputils.Signer.new(self.config.keyid, size=4096) self['private-key'] = self.signer.export() else: From dc74bfb58880e8a3cd68c6b0b978672f39e48103 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Wed, 7 Dec 2022 23:16:48 -0500 Subject: [PATCH 43/50] force certain config values in docker installs --- relay/application.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/relay/application.py b/relay/application.py index 538a9ce..de68bc2 100644 --- a/relay/application.py +++ b/relay/application.py @@ -26,6 +26,13 @@ class Application(web.Application): if not self['config'].load(): self['config'].save() + if self.config.is_docker: + self.config.update({ + 'db': '/data/relay.jsonld', + 'listen': '0.0.0.0', + 'port': 8080 + }) + self['workers'] = [] self['last_worker'] = 0 From f287b84ea32bf493d76c2a5de20a1a7f4c7792aa Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Wed, 7 Dec 2022 23:23:13 -0500 Subject: [PATCH 44/50] update aputils --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index da4201b..e54b9a9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,7 +28,7 @@ install_requires = click >= 8.1.2 pycryptodome >= 3.14.1 PyYAML >= 5.0.0 - aputils @ https://git.barkshark.xyz/barkshark/aputils/archive/0.1.2.tar.gz + aputils @ git+https://git.barkshark.xyz/barkshark/aputils@2cd1776f12 python_requires = >=3.6 [options.extras_require] From aa8090eebb12c967b2e2d6d2e87eefeb982b9527 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Thu, 8 Dec 2022 03:31:47 -0500 Subject: [PATCH 45/50] don't prompt for ignored settings in docker instances --- relay/manage.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/relay/manage.py b/relay/manage.py index 1bf2c03..0d7decc 100644 --- a/relay/manage.py +++ b/relay/manage.py @@ -43,11 +43,12 @@ def cli_setup(): click.echo('The domain must not be example.com') - app.config.listen = click.prompt('Which address should the relay listen on?', default=app.config.listen) + if not app.config.is_docker: + app.config.listen = click.prompt('Which address should the relay listen on?', default=app.config.listen) - while True: - app.config.port = click.prompt('What TCP port should the relay listen on?', default=app.config.port, type=int) - break + while True: + app.config.port = click.prompt('What TCP port should the relay listen on?', default=app.config.port, type=int) + break app.config.save() From 3968799d6ffbbacc0d51f7445ef5892e29a82359 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Thu, 8 Dec 2022 03:51:10 -0500 Subject: [PATCH 46/50] make sure exceptions don't bring down workers --- relay/application.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/relay/application.py b/relay/application.py index de68bc2..dbe464f 100644 --- a/relay/application.py +++ b/relay/application.py @@ -4,6 +4,7 @@ import os import queue import signal import threading +import traceback from aiohttp import web from datetime import datetime, timedelta @@ -175,6 +176,10 @@ class PushWorker(threading.Thread): except queue.Empty: pass + ## make sure an exception doesn't bring down the worker + except Exception: + traceback.print_exc() + await self.client.close() From 0e45763eff2e8b9e810ce92e2bae3f882552462b Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Thu, 8 Dec 2022 03:53:13 -0500 Subject: [PATCH 47/50] remove unnecessary config update --- relay/config.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/relay/config.py b/relay/config.py index 77b5db4..996fa9f 100644 --- a/relay/config.py +++ b/relay/config.py @@ -98,12 +98,6 @@ class RelayConfig(DotDict): 'whitelist': [] }) - if self.is_docker: - self.update({ - 'db': Path('/data/relay.jsonld'), - 'listen': '127.0.0.1' - }) - def ban_instance(self, instance): if instance.startswith('http'): From 17f3e6be559ed4fdba97bfc7fdcd06f1f8525697 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Thu, 8 Dec 2022 04:17:17 -0500 Subject: [PATCH 48/50] version 0.2.4 --- docs/installation.md | 4 ++-- relay/__init__.py | 2 +- setup.cfg | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index a852ab1..8363faa 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -15,7 +15,7 @@ the [official pipx docs](https://pypa.github.io/pipx/installation/) for more in- Now simply install ActivityRelay directly from git - pipx install git+https://git.pleroma.social/pleroma/relay@0.2.3 + pipx install git+https://git.pleroma.social/pleroma/relay@0.2.4 Or from a cloned git repo. @@ -39,7 +39,7 @@ be installed via [pyenv](https://github.com/pyenv/pyenv). The instructions for installation via pip are very similar to pipx. Installation can be done from git - python3 -m pip install git+https://git.pleroma.social/pleroma/relay@0.2.3 + python3 -m pip install git+https://git.pleroma.social/pleroma/relay@0.2.4 or a cloned git repo. diff --git a/relay/__init__.py b/relay/__init__.py index 82aaa8a..426b03e 100644 --- a/relay/__init__.py +++ b/relay/__init__.py @@ -1,3 +1,3 @@ -__version__ = '0.2.3' +__version__ = '0.2.4' from . import logger diff --git a/setup.cfg b/setup.cfg index e54b9a9..c6deb96 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = relay -version = 0.2.3 +version = 0.2.4 description = Generic LitePub relay (works with all LitePub consumers and Mastodon) long_description = file: README.md long_description_content_type = text/markdown; charset=UTF-8 From a742e7fb306b2e8b5b91068212a72a4086010256 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 10 Dec 2022 02:10:56 -0500 Subject: [PATCH 49/50] update setup.cfg and requirements.txt * move deps to requirements.txt * reference deps from requirements.txt in setup.cfg * bump minimum python version to 3.7 * set version in setup.cfg from attribute --- requirements.txt | 6 +++++- setup.cfg | 12 +++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9c558e3..7ffa255 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,5 @@ -. +aiohttp>=3.8.0 +aputils@git+https://git.barkshark.xyz/barkshark/aputils@0.1.3 +cachetools>=5.2.0 +click>=8.1.2 +pyyaml>=6.0 diff --git a/setup.cfg b/setup.cfg index c6deb96..bb97663 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = relay -version = 0.2.4 +version = attr: relay.__version__ description = Generic LitePub relay (works with all LitePub consumers and Mastodon) long_description = file: README.md long_description_content_type = text/markdown; charset=UTF-8 @@ -22,14 +22,8 @@ project_urls = [options] zip_safe = False packages = find: -install_requires = - aiohttp >= 3.8.0 - cachetools >= 5.0.0 - click >= 8.1.2 - pycryptodome >= 3.14.1 - PyYAML >= 5.0.0 - aputils @ git+https://git.barkshark.xyz/barkshark/aputils@2cd1776f12 -python_requires = >=3.6 +install_requires = file: requirements.txt +python_requires = >=3.7 [options.extras_require] dev = From 04368c782d9255a4c1bcd6fa3fb58a6d4af61591 Mon Sep 17 00:00:00 2001 From: Izalia Mae Date: Sat, 10 Dec 2022 02:44:07 -0500 Subject: [PATCH 50/50] replace aputils git url with tar.gz --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7ffa255..9199741 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ aiohttp>=3.8.0 -aputils@git+https://git.barkshark.xyz/barkshark/aputils@0.1.3 +aputils@https://git.barkshark.xyz/barkshark/aputils/archive/0.1.3.tar.gz cachetools>=5.2.0 click>=8.1.2 pyyaml>=6.0