mirror of
https://git.pleroma.social/pleroma/relay.git
synced 2024-11-21 22:17:59 +00:00
Merge branch 'rework' into 'master'
Reorganize codebase See merge request pleroma/relay!33
This commit is contained in:
commit
7eac3609a6
27
Dockerfile
27
Dockerfile
|
@ -1,11 +1,28 @@
|
|||
FROM python:3-alpine
|
||||
WORKDIR /workdir
|
||||
|
||||
# install build deps for pycryptodome and other c-based python modules
|
||||
RUN apk add alpine-sdk autoconf automake libtool gcc
|
||||
|
||||
ADD requirements.txt /workdir/
|
||||
RUN pip3 install -r requirements.txt
|
||||
# add env var to let the relay know it's in a container
|
||||
ENV DOCKER_RUNNING=true
|
||||
|
||||
ADD . /workdir/
|
||||
# setup various container properties
|
||||
VOLUME ["/data"]
|
||||
CMD ["python", "-m", "relay"]
|
||||
EXPOSE 8080/tcp
|
||||
WORKDIR /opt/activityrelay
|
||||
|
||||
VOLUME ["/workdir/data"]
|
||||
# install and update important python modules
|
||||
RUN pip3 install -U setuptools wheel pip
|
||||
|
||||
# only copy necessary files
|
||||
COPY relay ./relay
|
||||
COPY LICENSE .
|
||||
COPY README.md .
|
||||
COPY requirements.txt .
|
||||
COPY setup.cfg .
|
||||
COPY setup.py .
|
||||
COPY .git ./.git
|
||||
|
||||
# install relay deps
|
||||
RUN pip3 install -r requirements.txt
|
||||
|
|
64
README.md
64
README.md
|
@ -10,72 +10,14 @@ Affero General Public License version 3 (AGPLv3) license. You can find a copy o
|
|||
in this package as the `LICENSE` file.
|
||||
|
||||
|
||||
## Setup
|
||||
|
||||
You need at least Python 3.6 (latest version of 3.x recommended) to make use of this software.
|
||||
It simply will not run on older Python versions.
|
||||
|
||||
Download the project and install with pip (`pip3 install .`).
|
||||
|
||||
Copy `relay.yaml.example` to `relay.yaml` and edit it as appropriate:
|
||||
|
||||
$ cp relay.yaml.example relay.yaml
|
||||
$ $EDITOR relay.yaml
|
||||
|
||||
Finally, you can launch the relay:
|
||||
|
||||
$ python3 -m relay
|
||||
|
||||
It is suggested to run this under some sort of supervisor, such as runit, daemontools,
|
||||
s6 or systemd. Configuration of the supervisor is not covered here, as it is different
|
||||
depending on which system you have available.
|
||||
|
||||
The bot runs a webserver, internally, on localhost at port 8080. This needs to be
|
||||
forwarded by nginx or similar. The webserver is used to receive ActivityPub messages,
|
||||
and needs to be secured with an SSL certificate inside nginx or similar. Configuration
|
||||
of your webserver is not discussed here, but any guide explaining how to configure a
|
||||
modern non-PHP web application should cover it.
|
||||
|
||||
|
||||
## Getting Started
|
||||
|
||||
Normally, you would direct your LitePub instance software to follow the LitePub actor
|
||||
found on the relay. In Pleroma this would be something like:
|
||||
|
||||
$ MIX_ENV=prod mix relay_follow https://your.relay.hostname/actor
|
||||
|
||||
Mastodon uses an entirely different relay protocol but supports LitePub relay protocol
|
||||
as well when the Mastodon relay handshake is used. In these cases, Mastodon relay
|
||||
clients should follow `http://your.relay.hostname/inbox` as they would with Mastodon's
|
||||
own relay software.
|
||||
$ MIX_ENV=prod mix relay_follow https://your.relay.hostname/actor
|
||||
|
||||
|
||||
## Performance
|
||||
## Documentation
|
||||
|
||||
Performance is very good, with all data being stored in memory and serialized to a
|
||||
JSON-LD object graph. Worker coroutines are spawned in the background to distribute
|
||||
the messages in a scatter-gather pattern. Performance is comparable to, if not
|
||||
superior to, the Mastodon relay software, with improved memory efficiency.
|
||||
|
||||
|
||||
## Management
|
||||
|
||||
You can perform a few management tasks such as peering or depeering other relays by
|
||||
invoking the `relay.manage` module.
|
||||
|
||||
This will show the available management tasks:
|
||||
|
||||
$ python3 -m relay.manage
|
||||
|
||||
When following remote relays, you should use the `/actor` endpoint as you would in
|
||||
Pleroma and other LitePub-compliant software.
|
||||
|
||||
## Docker
|
||||
|
||||
You can run ActivityRelay with docker. Edit `relay.yaml` so that the database
|
||||
location is set to `./data/relay.jsonld` and then build and run the docker
|
||||
image :
|
||||
|
||||
$ docker volume create activityrelay-data
|
||||
$ docker build -t activityrelay .
|
||||
$ docker run -d -p 8080:8080 -v activityrelay-data:/workdir/data activityrelay
|
||||
To install or manage your relay, check the [documentation](docs/index.md)
|
||||
|
|
66
docker.sh
Executable file
66
docker.sh
Executable file
|
@ -0,0 +1,66 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
case $1 in
|
||||
install)
|
||||
docker build -f Dockerfile -t activityrelay . && \
|
||||
docker volume create activityrelay-data && \
|
||||
docker run -it -p 8080:8080 -v activityrelay-data:/data --name activityrelay activityrelay
|
||||
;;
|
||||
|
||||
uninstall)
|
||||
docker stop activityrelay && \
|
||||
docker container rm activityrelay && \
|
||||
docker volume rm activityrelay-data && \
|
||||
docker image rm activityrelay
|
||||
;;
|
||||
|
||||
start)
|
||||
docker start activityrelay
|
||||
;;
|
||||
|
||||
stop)
|
||||
docker stop activityrelay
|
||||
;;
|
||||
|
||||
manage)
|
||||
shift
|
||||
docker exec -it activityrelay python3 -m relay "$@"
|
||||
;;
|
||||
|
||||
shell)
|
||||
docker exec -it activityrelay bash
|
||||
;;
|
||||
|
||||
rescue)
|
||||
docker run -it --rm --entrypoint bash -v activityrelay-data:/data activityrelay
|
||||
;;
|
||||
|
||||
edit)
|
||||
if [ -z ${EDITOR} ]; then
|
||||
echo "EDITOR environmental variable not set"
|
||||
exit
|
||||
fi
|
||||
|
||||
CONFIG="/tmp/relay-$(date +"%T").yaml"
|
||||
|
||||
docker cp activityrelay:/data/relay.yaml $CONFIG && \
|
||||
$EDITOR $CONFIG && \
|
||||
|
||||
docker cp $CONFIG activityrelay:/data/relay.yaml && \
|
||||
rm $CONFIG
|
||||
;;
|
||||
|
||||
*)
|
||||
COLS="%-22s %s\n"
|
||||
|
||||
echo "Valid commands:"
|
||||
printf "$COLS" "- start" "Run the relay in the background"
|
||||
printf "$COLS" "- stop" "Stop the relay"
|
||||
printf "$COLS" "- manage <cmd> [args]" "Run a relay management command"
|
||||
printf "$COLS" "- edit" "Edit the relay's config in \$EDITOR"
|
||||
printf "$COLS" "- shell" "Drop into a bash shell on the running container"
|
||||
printf "$COLS" "- rescue" "Drop into a bash shell on a temp container with the data volume mounted"
|
||||
printf "$COLS" "- install" "Build the image, create a new container and volume, and run relay setup"
|
||||
printf "$COLS" "- uninstall" "Delete the relay image, container, and volume"
|
||||
;;
|
||||
esac
|
157
docs/commands.md
Normal file
157
docs/commands.md
Normal file
|
@ -0,0 +1,157 @@
|
|||
# Commands
|
||||
|
||||
There are a number of commands to manage your relay's database and config. You can add `--help` to
|
||||
any category or command to get help on that specific option (ex. `activityrelay inbox --help`).
|
||||
|
||||
Note: Unless specified, it is recommended to run any commands while the relay is shutdown.
|
||||
|
||||
|
||||
## Run
|
||||
|
||||
Run the relay.
|
||||
|
||||
activityrelay run
|
||||
|
||||
|
||||
## Setup
|
||||
|
||||
Run the setup wizard to configure your relay.
|
||||
|
||||
activityrelay setup
|
||||
|
||||
|
||||
## Inbox
|
||||
|
||||
Manage the list of subscribed instances.
|
||||
|
||||
|
||||
### List
|
||||
|
||||
List the currently subscribed instances or relays.
|
||||
|
||||
activityrelay inbox list
|
||||
|
||||
|
||||
### Add
|
||||
|
||||
Add an inbox to the database. If a domain is specified, it will default to `https://{domain}/inbox`.
|
||||
If the added instance is not following the relay, expect errors when pushing messages.
|
||||
|
||||
activityrelay inbox add <inbox or domain>
|
||||
|
||||
|
||||
### Remove
|
||||
|
||||
Remove an inbox from the database. An inbox or domain can be specified.
|
||||
|
||||
activityrelay inbox remove <inbox or domain>
|
||||
|
||||
|
||||
### Follow
|
||||
|
||||
Follow an instance or relay actor and add it to the database. If a domain is specified, it will
|
||||
default to `https://{domain}/actor`.
|
||||
|
||||
activityrelay inbox follow <actor or domain>
|
||||
|
||||
Note: The relay must be running for this command to work.
|
||||
|
||||
|
||||
### Unfollow
|
||||
|
||||
Unfollow an instance or relay actor and remove it from the database. If the instance or relay does
|
||||
not exist anymore, use the `inbox remove` command instead.
|
||||
|
||||
activityrelay inbox unfollow <domain, actor, or inbox>
|
||||
|
||||
Note: The relay must be running for this command to work.
|
||||
|
||||
|
||||
## Whitelist
|
||||
|
||||
Manage the whitelisted domains.
|
||||
|
||||
|
||||
### List
|
||||
|
||||
List the current whitelist.
|
||||
|
||||
activityrelay whitelist list
|
||||
|
||||
|
||||
### Add
|
||||
|
||||
Add a domain to the whitelist.
|
||||
|
||||
activityrelay whitelist add <domain>
|
||||
|
||||
|
||||
### Remove
|
||||
|
||||
Remove a domain from the whitelist.
|
||||
|
||||
activityrelay whitelist remove <domain>
|
||||
|
||||
|
||||
## Instance
|
||||
|
||||
Manage the instance ban list.
|
||||
|
||||
|
||||
### List
|
||||
|
||||
List the currently banned instances
|
||||
|
||||
activityrelay instance list
|
||||
|
||||
|
||||
### Ban
|
||||
|
||||
Add an instance to the ban list. If the instance is currently subscribed, remove it from the
|
||||
database.
|
||||
|
||||
activityrelay instance ban <domain>
|
||||
|
||||
|
||||
### Unban
|
||||
|
||||
Remove an instance from the ban list.
|
||||
|
||||
activityrelay instance unban <domain>
|
||||
|
||||
|
||||
## Software
|
||||
|
||||
Manage the software ban list. To get the correct name, check the software's nodeinfo endpoint.
|
||||
You can find it at nodeinfo\['software']\['name'].
|
||||
|
||||
|
||||
### List
|
||||
|
||||
List the currently banned software.
|
||||
|
||||
activityrelay software list
|
||||
|
||||
|
||||
### Ban
|
||||
|
||||
Add a software name to the ban list.
|
||||
|
||||
If `-f` or `--fetch-nodeinfo` is set, treat the name as a domain and try to fetch the software
|
||||
name via nodeinfo.
|
||||
|
||||
If the name is `RELAYS` (case-sensitive), add all known relay software names to the list.
|
||||
|
||||
activityrelay software ban [-f/--fetch-nodeinfo] <name, domain, or RELAYS>
|
||||
|
||||
|
||||
### Unban
|
||||
|
||||
Remove a software name from the ban list.
|
||||
|
||||
If `-f` or `--fetch-nodeinfo` is set, treat the name as a domain and try to fetch the software
|
||||
name via nodeinfo.
|
||||
|
||||
If the name is `RELAYS` (case-sensitive), remove all known relay software names from the list.
|
||||
|
||||
activityrelay unban [-f/--fetch-nodeinfo] <name, domain, or RELAYS>
|
110
docs/configuration.md
Normal file
110
docs/configuration.md
Normal file
|
@ -0,0 +1,110 @@
|
|||
# Configuration
|
||||
|
||||
## DB
|
||||
|
||||
The path to the database. It contains the relay actor private key and all subscribed
|
||||
instances. If the path is not absolute, it is relative to the working directory.
|
||||
|
||||
db: relay.jsonld
|
||||
|
||||
|
||||
## Listener
|
||||
|
||||
The address and port the relay will listen on. If the reverse proxy (nginx, apache, caddy, etc)
|
||||
is running on the same host, it is recommended to change `listen` to `localhost`
|
||||
|
||||
listen: 0.0.0.0
|
||||
port: 8080
|
||||
|
||||
|
||||
## Note
|
||||
|
||||
A small blurb to describe your relay instance. This will show up on the relay's home page.
|
||||
|
||||
note: "Make a note about your instance here."
|
||||
|
||||
|
||||
## Post Limit
|
||||
|
||||
The maximum number of messages to send out at once. For each incoming message, a message will be
|
||||
sent out to every subscribed instance minus the instance which sent the message. This limit
|
||||
is to prevent too many outgoing connections from being made, so adjust if necessary.
|
||||
|
||||
push_limit: 512
|
||||
|
||||
|
||||
## AP
|
||||
|
||||
Various ActivityPub-related settings
|
||||
|
||||
|
||||
### Host
|
||||
|
||||
The domain your relay will use to identify itself.
|
||||
|
||||
host: relay.example.com
|
||||
|
||||
|
||||
### Whitelist Enabled
|
||||
|
||||
If set to `true`, only instances in the whitelist can follow the relay. Any subscribed instances
|
||||
not in the whitelist will be removed from the inbox list on startup.
|
||||
|
||||
whitelist_enabled: false
|
||||
|
||||
|
||||
### Whitelist
|
||||
|
||||
A list of domains of instances which are allowed to subscribe to your relay.
|
||||
|
||||
whitelist:
|
||||
- bad-instance.example.com
|
||||
- another-bad-instance.example.com
|
||||
|
||||
|
||||
### Blocked Instances
|
||||
|
||||
A list of instances which are unable to follow the instance. If a subscribed instance is added to
|
||||
the block list, it will be removed from the inbox list on startup.
|
||||
|
||||
blocked_instances:
|
||||
- bad-instance.example.com
|
||||
- another-bad-instance.example.com
|
||||
|
||||
|
||||
### Blocked Software
|
||||
|
||||
A list of ActivityPub software which cannot follow your relay. This list is empty by default, but
|
||||
setting this to the above list will block all other relays and prevent relay chains
|
||||
|
||||
blocked_software:
|
||||
- activityrelay
|
||||
- aoderelay
|
||||
- social.seattle.wa.us-relay
|
||||
- unciarelay
|
||||
|
||||
|
||||
## Cache
|
||||
|
||||
These are object limits for various caches. Only change if you know what you're doing.
|
||||
|
||||
|
||||
### Objects
|
||||
|
||||
The urls of messages which have been processed by the relay.
|
||||
|
||||
objects: 1024
|
||||
|
||||
|
||||
### Actors
|
||||
|
||||
The ActivityPub actors of incoming messages.
|
||||
|
||||
actors: 1024
|
||||
|
||||
|
||||
### Actors
|
||||
|
||||
The base64 encoded hashes of messages.
|
||||
|
||||
digests: 1024
|
9
docs/index.md
Normal file
9
docs/index.md
Normal file
|
@ -0,0 +1,9 @@
|
|||
# ActivityRelay Documentation
|
||||
|
||||
ActivityRelay is a small ActivityPub server that relays messages to subscribed instances.
|
||||
|
||||
[Installation](installation.md)
|
||||
|
||||
[Configuration](configuration.md)
|
||||
|
||||
[Commands](commands.md)
|
67
docs/installation.md
Normal file
67
docs/installation.md
Normal file
|
@ -0,0 +1,67 @@
|
|||
# Installation
|
||||
|
||||
There are a few ways to install ActivityRelay. Follow one of the methods below, setup a reverse
|
||||
proxy, and setup the relay to run via a supervisor. Example configs for caddy, nginx, and systemd
|
||||
in `installation/`
|
||||
|
||||
|
||||
## Pipx
|
||||
|
||||
Pipx uses pip and a custom venv implementation to automatically install modules into a Python
|
||||
environment and is the recommended method. Install pipx if it isn't installed already. Check out
|
||||
the [official pipx docs](https://pypa.github.io/pipx/installation/) for more in-depth instructions.
|
||||
|
||||
python3 -m pip install pipx
|
||||
|
||||
Now simply install ActivityRelay directly from git
|
||||
|
||||
pipx install git+https://git.pleroma.social/pleroma/relay@0.2.0
|
||||
|
||||
Or from a cloned git repo.
|
||||
|
||||
pipx install .
|
||||
|
||||
Once finished, you can set up the relay via the setup command. It will ask a few questions to fill
|
||||
out config options for your relay
|
||||
|
||||
activityrelay setup
|
||||
|
||||
Finally start it up with the run command.
|
||||
|
||||
activityrelay run
|
||||
|
||||
Note: Pipx requires python 3.7+. If your distro doesn't have a compatible version of python, it can
|
||||
be installed via
|
||||
|
||||
|
||||
## Pip
|
||||
|
||||
The instructions for installation via pip are very similar to pipx. Installation can be done from
|
||||
git
|
||||
|
||||
python3 -m pip install git+https://git.pleroma.social/pleroma/relay@0.2.0
|
||||
|
||||
or a cloned git repo.
|
||||
|
||||
python3 -m pip install .
|
||||
|
||||
Now run the configuration wizard
|
||||
|
||||
activityrelay setup
|
||||
|
||||
And start the relay when finished
|
||||
|
||||
activityrelay run
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
Installation and management via Docker can be handled with the `docker.sh` script. To install
|
||||
ActivityRelay, run the install command. Once the image is built and the container is created,
|
||||
your will be asked to fill out some config options for your relay.
|
||||
|
||||
./docker.sh install
|
||||
|
||||
Finally start it up. It will be listening on TCP port 8080.
|
||||
|
||||
./docker.sh start
|
|
@ -9,18 +9,25 @@ port: 8080
|
|||
# Note
|
||||
note: "Make a note about your instance here."
|
||||
|
||||
# maximum number of inbox posts to do at once
|
||||
post_limit: 512
|
||||
|
||||
# this section is for ActivityPub
|
||||
ap:
|
||||
# this is used for generating activitypub messages, as well as instructions for
|
||||
# linking AP identities. it should be an SSL-enabled domain reachable by https.
|
||||
host: 'relay.example.com'
|
||||
|
||||
blocked_instances:
|
||||
- 'bad-instance.example.com'
|
||||
- 'another-bad-instance.example.com'
|
||||
|
||||
whitelist_enabled: false
|
||||
|
||||
whitelist:
|
||||
- 'good-instance.example.com'
|
||||
- 'another.good-instance.example.com'
|
||||
|
||||
# uncomment the lines below to prevent certain activitypub software from posting
|
||||
# to the relay (all known relays by default). this uses the software name in nodeinfo
|
||||
#blocked_software:
|
||||
|
@ -28,3 +35,9 @@ ap:
|
|||
#- 'aoderelay'
|
||||
#- 'social.seattle.wa.us-relay'
|
||||
#- 'unciarelay'
|
||||
|
||||
# cache limits as number of items. only change this if you know what you're doing
|
||||
cache:
|
||||
objects: 1024
|
||||
actors: 1024
|
||||
digests: 1024
|
||||
|
|
|
@ -1,58 +1,8 @@
|
|||
from . import logging
|
||||
__version__ = '0.2.0'
|
||||
|
||||
from aiohttp.web import Application
|
||||
|
||||
from . import logger
|
||||
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import aiohttp.web
|
||||
import yaml
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="A generic LitePub relay (works with all LitePub consumers and Mastodon).",
|
||||
prog="python -m relay")
|
||||
parser.add_argument("-c", "--config", type=str, default="relay.yaml",
|
||||
metavar="<path>", help="the path to your config file")
|
||||
args = parser.parse_args()
|
||||
|
||||
def load_config():
|
||||
with open(args.config) as f:
|
||||
options = {}
|
||||
|
||||
## Prevent a warning message for pyyaml 5.1+
|
||||
if getattr(yaml, 'FullLoader', None):
|
||||
options['Loader'] = yaml.FullLoader
|
||||
|
||||
yaml_file = yaml.load(f, **options)
|
||||
|
||||
config = {
|
||||
'db': yaml_file.get('db', 'relay.jsonld'),
|
||||
'listen': yaml_file.get('listen', '0.0.0.0'),
|
||||
'port': int(yaml_file.get('port', 8080)),
|
||||
'note': yaml_file.get('note', 'Make a note about your instance here.'),
|
||||
'ap': {
|
||||
'blocked_software': [v.lower() for v in yaml_file['ap'].get('blocked_software', [])],
|
||||
'blocked_instances': yaml_file['ap'].get('blocked_instances', []),
|
||||
'host': yaml_file['ap'].get('host', 'localhost'),
|
||||
'whitelist': yaml_file['ap'].get('whitelist', []),
|
||||
'whitelist_enabled': yaml_file['ap'].get('whitelist_enabled', False)
|
||||
}
|
||||
}
|
||||
return config
|
||||
|
||||
|
||||
CONFIG = load_config()
|
||||
|
||||
from .http_signatures import http_signatures_middleware
|
||||
|
||||
|
||||
app = aiohttp.web.Application(middlewares=[
|
||||
http_signatures_middleware
|
||||
])
|
||||
|
||||
|
||||
from . import database
|
||||
from . import actor
|
||||
from . import webfinger
|
||||
from . import default
|
||||
from . import nodeinfo
|
||||
from . import http_stats
|
||||
app = Application()
|
||||
|
|
|
@ -1,56 +1,5 @@
|
|||
import asyncio
|
||||
import aiohttp.web
|
||||
import logging
|
||||
import platform
|
||||
import sys
|
||||
import Crypto
|
||||
import time
|
||||
|
||||
from . import app, CONFIG
|
||||
|
||||
|
||||
def crypto_check():
|
||||
vers_split = platform.python_version().split('.')
|
||||
pip_command = 'pip3 uninstall pycrypto && pip3 install pycryptodome'
|
||||
|
||||
if Crypto.__version__ != '2.6.1':
|
||||
return
|
||||
|
||||
if int(vers_split[1]) > 7 and Crypto.__version__ == '2.6.1':
|
||||
logging.error('PyCrypto is broken on Python 3.8+. Please replace it with pycryptodome before running again. Exiting in 10 sec...')
|
||||
logging.error(pip_command)
|
||||
time.sleep(10)
|
||||
sys.exit()
|
||||
|
||||
else:
|
||||
logging.warning('PyCrypto is old and should be replaced with pycryptodome')
|
||||
logging.warning(pip_command)
|
||||
|
||||
|
||||
async def start_webserver():
|
||||
runner = aiohttp.web.AppRunner(app)
|
||||
await runner.setup()
|
||||
try:
|
||||
listen = CONFIG['listen']
|
||||
except:
|
||||
listen = 'localhost'
|
||||
try:
|
||||
port = CONFIG['port']
|
||||
except:
|
||||
port = 8080
|
||||
|
||||
logging.info('Starting webserver at {listen}:{port}'.format(listen=listen,port=port))
|
||||
|
||||
site = aiohttp.web.TCPSite(runner, listen, port)
|
||||
await site.start()
|
||||
|
||||
def main():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
asyncio.ensure_future(start_webserver(), loop=loop)
|
||||
loop.run_forever()
|
||||
from .manage import main
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
crypto_check()
|
||||
main()
|
||||
main()
|
||||
|
|
347
relay/actor.py
347
relay/actor.py
|
@ -1,347 +0,0 @@
|
|||
import aiohttp
|
||||
import aiohttp.web
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
import re
|
||||
import simplejson as json
|
||||
import cgi
|
||||
import datetime
|
||||
|
||||
from urllib.parse import urlsplit
|
||||
from Crypto.PublicKey import RSA
|
||||
from cachetools import LFUCache
|
||||
|
||||
from . import app, CONFIG
|
||||
from .database import DATABASE
|
||||
from .http_debug import http_debug
|
||||
from .remote_actor import fetch_actor
|
||||
from .http_signatures import sign_headers, generate_body_digest
|
||||
|
||||
|
||||
# generate actor keys if not present
|
||||
if "actorKeys" not in DATABASE:
|
||||
logging.info("No actor keys present, generating 4096-bit RSA keypair.")
|
||||
|
||||
privkey = RSA.generate(4096)
|
||||
pubkey = privkey.publickey()
|
||||
|
||||
DATABASE["actorKeys"] = {
|
||||
"publicKey": pubkey.exportKey('PEM').decode('utf-8'),
|
||||
"privateKey": privkey.exportKey('PEM').decode('utf-8')
|
||||
}
|
||||
|
||||
|
||||
PRIVKEY = RSA.importKey(DATABASE["actorKeys"]["privateKey"])
|
||||
PUBKEY = PRIVKEY.publickey()
|
||||
AP_CONFIG = CONFIG['ap']
|
||||
CACHE_SIZE = CONFIG.get('cache-size', 16384)
|
||||
CACHE = LFUCache(CACHE_SIZE)
|
||||
|
||||
sem = asyncio.Semaphore(500)
|
||||
|
||||
|
||||
async def actor(request):
|
||||
data = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"endpoints": {
|
||||
"sharedInbox": "https://{}/inbox".format(request.host)
|
||||
},
|
||||
"followers": "https://{}/followers".format(request.host),
|
||||
"following": "https://{}/following".format(request.host),
|
||||
"inbox": "https://{}/inbox".format(request.host),
|
||||
"name": "ActivityRelay",
|
||||
"type": "Application",
|
||||
"id": "https://{}/actor".format(request.host),
|
||||
"publicKey": {
|
||||
"id": "https://{}/actor#main-key".format(request.host),
|
||||
"owner": "https://{}/actor".format(request.host),
|
||||
"publicKeyPem": DATABASE["actorKeys"]["publicKey"]
|
||||
},
|
||||
"summary": "ActivityRelay bot",
|
||||
"preferredUsername": "relay",
|
||||
"url": "https://{}/actor".format(request.host)
|
||||
}
|
||||
return aiohttp.web.json_response(data, content_type='application/activity+json')
|
||||
|
||||
|
||||
app.router.add_get('/actor', actor)
|
||||
get_actor_inbox = lambda actor: actor.get('endpoints', {}).get('sharedInbox', actor['inbox'])
|
||||
|
||||
|
||||
async def push_message_to_actor(actor, message, our_key_id):
|
||||
inbox = get_actor_inbox(actor)
|
||||
url = urlsplit(inbox)
|
||||
|
||||
# XXX: Digest
|
||||
data = json.dumps(message)
|
||||
headers = {
|
||||
'(request-target)': 'post {}'.format(url.path),
|
||||
'Content-Length': str(len(data)),
|
||||
'Content-Type': 'application/activity+json',
|
||||
'User-Agent': 'ActivityRelay',
|
||||
'Host': url.netloc,
|
||||
'Digest': 'SHA-256={}'.format(generate_body_digest(data)),
|
||||
'Date': datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||
}
|
||||
headers['signature'] = sign_headers(headers, PRIVKEY, our_key_id)
|
||||
headers.pop('(request-target)')
|
||||
headers.pop('Host')
|
||||
|
||||
logging.debug('%r >> %r', inbox, message)
|
||||
|
||||
global sem
|
||||
async with sem:
|
||||
try:
|
||||
async with aiohttp.ClientSession(trace_configs=[http_debug()]) as session:
|
||||
async with session.post(inbox, data=data, headers=headers) as resp:
|
||||
if resp.status == 202:
|
||||
return
|
||||
resp_payload = await resp.text()
|
||||
logging.debug('%r >> resp %r', inbox, resp_payload)
|
||||
except Exception as e:
|
||||
logging.info('Caught %r while pushing to %r.', e, inbox)
|
||||
|
||||
|
||||
async def fetch_nodeinfo(domain):
|
||||
headers = {'Accept': 'application/json'}
|
||||
nodeinfo_url = None
|
||||
|
||||
wk_nodeinfo = await fetch_actor(f'https://{domain}/.well-known/nodeinfo', headers=headers)
|
||||
|
||||
if not wk_nodeinfo:
|
||||
return
|
||||
|
||||
for link in wk_nodeinfo.get('links', ''):
|
||||
if link['rel'] == 'http://nodeinfo.diaspora.software/ns/schema/2.0':
|
||||
nodeinfo_url = link['href']
|
||||
break
|
||||
|
||||
if not nodeinfo_url:
|
||||
return
|
||||
|
||||
nodeinfo_data = await fetch_actor(nodeinfo_url, headers=headers)
|
||||
software = nodeinfo_data.get('software')
|
||||
|
||||
return software.get('name') if software else None
|
||||
|
||||
|
||||
async def follow_remote_actor(actor_uri):
|
||||
actor = await fetch_actor(actor_uri)
|
||||
|
||||
if not actor:
|
||||
logging.info('failed to fetch actor at: %r', actor_uri)
|
||||
return
|
||||
|
||||
if AP_CONFIG['whitelist_enabled'] is True and urlsplit(actor_uri).hostname not in AP_CONFIG['whitelist']:
|
||||
logging.info('refusing to follow non-whitelisted actor: %r', actor_uri)
|
||||
return
|
||||
|
||||
logging.info('following: %r', actor_uri)
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Follow",
|
||||
"to": [actor['id']],
|
||||
"object": actor['id'],
|
||||
"id": "https://{}/activities/{}".format(AP_CONFIG['host'], uuid.uuid4()),
|
||||
"actor": "https://{}/actor".format(AP_CONFIG['host'])
|
||||
}
|
||||
await push_message_to_actor(actor, message, "https://{}/actor#main-key".format(AP_CONFIG['host']))
|
||||
|
||||
|
||||
async def unfollow_remote_actor(actor_uri):
|
||||
actor = await fetch_actor(actor_uri)
|
||||
if not actor:
|
||||
logging.info('failed to fetch actor at: %r', actor_uri)
|
||||
return
|
||||
|
||||
logging.info('unfollowing: %r', actor_uri)
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Undo",
|
||||
"to": [actor['id']],
|
||||
"object": {
|
||||
"type": "Follow",
|
||||
"object": actor_uri,
|
||||
"actor": actor['id'],
|
||||
"id": "https://{}/activities/{}".format(AP_CONFIG['host'], uuid.uuid4())
|
||||
},
|
||||
"id": "https://{}/activities/{}".format(AP_CONFIG['host'], uuid.uuid4()),
|
||||
"actor": "https://{}/actor".format(AP_CONFIG['host'])
|
||||
}
|
||||
await push_message_to_actor(actor, message, "https://{}/actor#main-key".format(AP_CONFIG['host']))
|
||||
|
||||
|
||||
tag_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
|
||||
def strip_html(data):
|
||||
no_tags = tag_re.sub('', data)
|
||||
return cgi.escape(no_tags)
|
||||
|
||||
|
||||
def distill_inboxes(actor, object_id):
|
||||
global DATABASE
|
||||
|
||||
origin_hostname = urlsplit(object_id).hostname
|
||||
|
||||
inbox = get_actor_inbox(actor)
|
||||
targets = [target for target in DATABASE.get('relay-list', []) if target != inbox]
|
||||
targets = [target for target in targets if urlsplit(target).hostname != origin_hostname]
|
||||
hostnames = [urlsplit(target).hostname for target in targets]
|
||||
|
||||
assert inbox not in targets
|
||||
assert origin_hostname not in hostnames
|
||||
|
||||
return targets
|
||||
|
||||
|
||||
def distill_object_id(activity):
|
||||
logging.debug('>> determining object ID for %r', activity['object'])
|
||||
obj = activity['object']
|
||||
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
|
||||
return obj['id']
|
||||
|
||||
|
||||
async def handle_relay(actor, data, request):
|
||||
global CACHE
|
||||
|
||||
object_id = distill_object_id(data)
|
||||
|
||||
if object_id in CACHE:
|
||||
logging.debug('>> already relayed %r as %r', object_id, CACHE[object_id])
|
||||
return
|
||||
|
||||
activity_id = "https://{}/activities/{}".format(request.host, uuid.uuid4())
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Announce",
|
||||
"to": ["https://{}/followers".format(request.host)],
|
||||
"actor": "https://{}/actor".format(request.host),
|
||||
"object": object_id,
|
||||
"id": activity_id
|
||||
}
|
||||
|
||||
logging.debug('>> relay: %r', message)
|
||||
|
||||
inboxes = distill_inboxes(actor, object_id)
|
||||
|
||||
futures = [push_message_to_actor({'inbox': inbox}, message, 'https://{}/actor#main-key'.format(request.host)) for inbox in inboxes]
|
||||
asyncio.ensure_future(asyncio.gather(*futures))
|
||||
|
||||
CACHE[object_id] = activity_id
|
||||
|
||||
|
||||
async def handle_forward(actor, data, request):
|
||||
object_id = distill_object_id(data)
|
||||
|
||||
logging.debug('>> Relay %r', data)
|
||||
|
||||
inboxes = distill_inboxes(actor, object_id)
|
||||
|
||||
futures = [
|
||||
push_message_to_actor(
|
||||
{'inbox': inbox},
|
||||
data,
|
||||
'https://{}/actor#main-key'.format(request.host))
|
||||
for inbox in inboxes]
|
||||
asyncio.ensure_future(asyncio.gather(*futures))
|
||||
|
||||
|
||||
async def handle_follow(actor, data, request):
|
||||
global DATABASE
|
||||
|
||||
following = DATABASE.get('relay-list', [])
|
||||
inbox = get_actor_inbox(actor)
|
||||
|
||||
|
||||
if urlsplit(inbox).hostname in AP_CONFIG['blocked_instances']:
|
||||
return
|
||||
|
||||
if inbox not in following:
|
||||
following += [inbox]
|
||||
DATABASE['relay-list'] = following
|
||||
|
||||
asyncio.ensure_future(follow_remote_actor(actor['id']))
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Accept",
|
||||
"to": [actor["id"]],
|
||||
"actor": "https://{}/actor".format(request.host),
|
||||
|
||||
# this is wrong per litepub, but mastodon < 2.4 is not compliant with that profile.
|
||||
"object": {
|
||||
"type": "Follow",
|
||||
"id": data["id"],
|
||||
"object": "https://{}/actor".format(request.host),
|
||||
"actor": actor["id"]
|
||||
},
|
||||
|
||||
"id": "https://{}/activities/{}".format(request.host, uuid.uuid4()),
|
||||
}
|
||||
|
||||
asyncio.ensure_future(push_message_to_actor(actor, message, 'https://{}/actor#main-key'.format(request.host)))
|
||||
|
||||
|
||||
async def handle_undo(actor, data, request):
|
||||
global DATABASE
|
||||
|
||||
child = data['object']
|
||||
if child['type'] == 'Follow':
|
||||
following = DATABASE.get('relay-list', [])
|
||||
|
||||
inbox = get_actor_inbox(actor)
|
||||
|
||||
if inbox in following:
|
||||
following.remove(inbox)
|
||||
DATABASE['relay-list'] = following
|
||||
|
||||
await unfollow_remote_actor(actor['id'])
|
||||
|
||||
|
||||
processors = {
|
||||
'Announce': handle_relay,
|
||||
'Create': handle_relay,
|
||||
'Delete': handle_forward,
|
||||
'Follow': handle_follow,
|
||||
'Undo': handle_undo,
|
||||
'Update': handle_forward,
|
||||
}
|
||||
|
||||
|
||||
async def inbox(request):
|
||||
data = await request.json()
|
||||
instance = urlsplit(data['actor']).hostname
|
||||
|
||||
if AP_CONFIG['blocked_software']:
|
||||
software = await fetch_nodeinfo(instance)
|
||||
|
||||
if software and software.lower() in AP_CONFIG['blocked_software']:
|
||||
raise aiohttp.web.HTTPUnauthorized(body='relays have been blocked', content_type='text/plain')
|
||||
|
||||
if 'actor' not in data or not request['validated']:
|
||||
raise aiohttp.web.HTTPUnauthorized(body='access denied', content_type='text/plain')
|
||||
|
||||
elif data['type'] != 'Follow' and 'https://{}/inbox'.format(instance) not in DATABASE['relay-list']:
|
||||
raise aiohttp.web.HTTPUnauthorized(body='access denied', content_type='text/plain')
|
||||
|
||||
elif AP_CONFIG['whitelist_enabled'] is True and instance not in AP_CONFIG['whitelist']:
|
||||
raise aiohttp.web.HTTPUnauthorized(body='access denied', content_type='text/plain')
|
||||
|
||||
actor = await fetch_actor(data["actor"])
|
||||
actor_uri = 'https://{}/actor'.format(request.host)
|
||||
|
||||
logging.debug(">> payload %r", data)
|
||||
|
||||
processor = processors.get(data['type'], None)
|
||||
if processor:
|
||||
await processor(actor, data, request)
|
||||
|
||||
return aiohttp.web.Response(body=b'{}', content_type='application/activity+json')
|
||||
|
||||
app.router.add_post('/inbox', inbox)
|
272
relay/config.py
Normal file
272
relay/config.py
Normal file
|
@ -0,0 +1,272 @@
|
|||
import json
|
||||
import yaml
|
||||
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
relay_software_names = [
|
||||
'activityrelay',
|
||||
'aoderelay',
|
||||
'social.seattle.wa.us-relay',
|
||||
'unciarelay'
|
||||
]
|
||||
|
||||
|
||||
class DotDict(dict):
|
||||
def __getattr__(self, k):
|
||||
try:
|
||||
return self[k]
|
||||
|
||||
except KeyError:
|
||||
raise AttributeError(f'{self.__class__.__name__} object has no attribute {k}') from None
|
||||
|
||||
|
||||
def __setattr__(self, k, v):
|
||||
try:
|
||||
if k in self._ignore_keys:
|
||||
super().__setattr__(k, v)
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if k.startswith('_'):
|
||||
super().__setattr__(k, v)
|
||||
|
||||
else:
|
||||
self[k] = v
|
||||
|
||||
|
||||
def __setitem__(self, k, v):
|
||||
if type(v) == dict:
|
||||
v = DotDict(v)
|
||||
|
||||
super().__setitem__(k, v)
|
||||
|
||||
|
||||
def __delattr__(self, k):
|
||||
try:
|
||||
dict.__delitem__(self, k)
|
||||
|
||||
except KeyError:
|
||||
raise AttributeError(f'{self.__class__.__name__} object has no attribute {k}') from None
|
||||
|
||||
|
||||
class RelayConfig(DotDict):
|
||||
apkeys = {
|
||||
'host',
|
||||
'whitelist_enabled',
|
||||
'blocked_software',
|
||||
'blocked_instances',
|
||||
'whitelist'
|
||||
}
|
||||
|
||||
cachekeys = {
|
||||
'json',
|
||||
'objects',
|
||||
'digests'
|
||||
}
|
||||
|
||||
|
||||
def __init__(self, path, is_docker):
|
||||
if is_docker:
|
||||
path = '/data/relay.yaml'
|
||||
|
||||
self._isdocker = is_docker
|
||||
self._path = Path(path).expanduser()
|
||||
|
||||
super().__init__({
|
||||
'db': str(self._path.parent.joinpath(f'{self._path.stem}.jsonld')),
|
||||
'listen': '0.0.0.0',
|
||||
'port': 8080,
|
||||
'note': 'Make a note about your instance here.',
|
||||
'push_limit': 512,
|
||||
'host': 'relay.example.com',
|
||||
'blocked_software': [],
|
||||
'blocked_instances': [],
|
||||
'whitelist': [],
|
||||
'whitelist_enabled': False,
|
||||
'json': 1024,
|
||||
'objects': 1024,
|
||||
'digests': 1024
|
||||
})
|
||||
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if self._isdocker and key in ['db', 'listen', 'port']:
|
||||
return
|
||||
|
||||
if key in ['blocked_instances', 'blocked_software', 'whitelist']:
|
||||
assert isinstance(value, (list, set, tuple))
|
||||
|
||||
elif key in ['port', 'json', 'objects', 'digests']:
|
||||
assert isinstance(value, (int))
|
||||
|
||||
elif key == 'whitelist_enabled':
|
||||
assert isinstance(value, bool)
|
||||
|
||||
super().__setitem__(key, value)
|
||||
|
||||
|
||||
@property
|
||||
def db(self):
|
||||
return Path(self['db']).expanduser().resolve()
|
||||
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
||||
|
||||
|
||||
@property
|
||||
def actor(self):
|
||||
return f'https://{self.host}/actor'
|
||||
|
||||
|
||||
@property
|
||||
def inbox(self):
|
||||
return f'https://{self.host}/inbox'
|
||||
|
||||
|
||||
@property
|
||||
def keyid(self):
|
||||
return f'{self.actor}#main-key'
|
||||
|
||||
|
||||
def ban_instance(self, instance):
|
||||
if instance.startswith('http'):
|
||||
instance = urlparse(instance).hostname
|
||||
|
||||
if self.is_banned(instance):
|
||||
return False
|
||||
|
||||
self.blocked_instances.append(instance)
|
||||
return True
|
||||
|
||||
|
||||
def unban_instance(self, instance):
|
||||
if instance.startswith('http'):
|
||||
instance = urlparse(instance).hostname
|
||||
|
||||
try:
|
||||
self.blocked_instances.remove(instance)
|
||||
return True
|
||||
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def ban_software(self, software):
|
||||
if self.is_banned_software(software):
|
||||
return False
|
||||
|
||||
self.blocked_software.append(software)
|
||||
return True
|
||||
|
||||
|
||||
def unban_software(self, software):
|
||||
try:
|
||||
self.blocked_software.remove(software)
|
||||
return True
|
||||
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def add_whitelist(self, instance):
|
||||
if instance.startswith('http'):
|
||||
instance = urlparse(instance).hostname
|
||||
|
||||
if self.is_whitelisted(instance):
|
||||
return False
|
||||
|
||||
self.whitelist.append(instance)
|
||||
return True
|
||||
|
||||
|
||||
def del_whitelist(self, instance):
|
||||
if instance.startswith('http'):
|
||||
instance = urlparse(instance).hostname
|
||||
|
||||
try:
|
||||
self.whitelist.remove(instance)
|
||||
return True
|
||||
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def is_banned(self, instance):
|
||||
if instance.startswith('http'):
|
||||
instance = urlparse(instance).hostname
|
||||
|
||||
return instance in self.blocked_instances
|
||||
|
||||
|
||||
def is_banned_software(self, software):
|
||||
if not software:
|
||||
return False
|
||||
|
||||
return software.lower() in self.blocked_software
|
||||
|
||||
|
||||
def is_whitelisted(self, instance):
|
||||
if instance.startswith('http'):
|
||||
instance = urlparse(instance).hostname
|
||||
|
||||
return instance in self.whitelist
|
||||
|
||||
|
||||
def load(self):
|
||||
options = {}
|
||||
|
||||
try:
|
||||
options['Loader'] = yaml.FullLoader
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
with open(self.path) as fd:
|
||||
config = yaml.load(fd, **options)
|
||||
|
||||
except FileNotFoundError:
|
||||
return False
|
||||
|
||||
if not config:
|
||||
return False
|
||||
|
||||
for key, value in config.items():
|
||||
if key in ['ap', 'cache']:
|
||||
for k, v in value.items():
|
||||
if k not in self:
|
||||
continue
|
||||
|
||||
self[k] = v
|
||||
|
||||
elif key not in self:
|
||||
continue
|
||||
|
||||
self[key] = value
|
||||
|
||||
if self.host.endswith('example.com'):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def save(self):
|
||||
config = {
|
||||
'db': self['db'],
|
||||
'listen': self.listen,
|
||||
'port': self.port,
|
||||
'note': self.note,
|
||||
'push_limit': self.push_limit,
|
||||
'ap': {key: self[key] for key in self.apkeys},
|
||||
'cache': {key: self[key] for key in self.cachekeys}
|
||||
}
|
||||
|
||||
with open(self._path, 'w') as fd:
|
||||
yaml.dump(config, fd, sort_keys=False)
|
||||
|
||||
return config
|
|
@ -1,43 +1,121 @@
|
|||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import urllib.parse
|
||||
import simplejson as json
|
||||
from sys import exit
|
||||
import traceback
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
from . import CONFIG
|
||||
AP_CONFIG = CONFIG['ap']
|
||||
|
||||
try:
|
||||
with open(CONFIG['db']) as f:
|
||||
DATABASE = json.load(f)
|
||||
|
||||
except FileNotFoundError:
|
||||
logging.info('No database was found, making a new one.')
|
||||
DATABASE = {}
|
||||
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.info('Invalid JSON in db. Exiting...')
|
||||
exit(1)
|
||||
|
||||
following = DATABASE.get('relay-list', [])
|
||||
for inbox in following:
|
||||
if urllib.parse.urlsplit(inbox).hostname in AP_CONFIG['blocked_instances']:
|
||||
following.remove(inbox)
|
||||
|
||||
elif AP_CONFIG['whitelist_enabled'] is True and urllib.parse.urlsplit(inbox).hostname not in AP_CONFIG['whitelist']:
|
||||
following.remove(inbox)
|
||||
|
||||
DATABASE['relay-list'] = following
|
||||
|
||||
if 'actors' in DATABASE:
|
||||
DATABASE.pop('actors')
|
||||
|
||||
async def database_save():
|
||||
while True:
|
||||
with open(CONFIG['db'], 'w') as f:
|
||||
json.dump(DATABASE, f)
|
||||
await asyncio.sleep(30)
|
||||
class RelayDatabase:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.data = None
|
||||
self.PRIVKEY = None
|
||||
|
||||
|
||||
asyncio.ensure_future(database_save())
|
||||
@property
|
||||
def PUBKEY(self):
|
||||
return self.PRIVKEY.publickey()
|
||||
|
||||
|
||||
@property
|
||||
def pubkey(self):
|
||||
return self.PUBKEY.exportKey('PEM').decode('utf-8')
|
||||
|
||||
|
||||
@property
|
||||
def privkey(self):
|
||||
try:
|
||||
return self.data['private-key']
|
||||
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
|
||||
@property
|
||||
def hostnames(self):
|
||||
return [urlparse(inbox).hostname for inbox in self.inboxes]
|
||||
|
||||
|
||||
@property
|
||||
def inboxes(self):
|
||||
return self.data.get('relay-list', [])
|
||||
|
||||
|
||||
def generate_key(self):
|
||||
self.PRIVKEY = RSA.generate(4096)
|
||||
self.data['private-key'] = self.PRIVKEY.exportKey('PEM').decode('utf-8')
|
||||
|
||||
|
||||
def load(self):
|
||||
new_db = True
|
||||
|
||||
try:
|
||||
with self.config.db.open() as fd:
|
||||
self.data = json.load(fd)
|
||||
|
||||
key = self.data.pop('actorKeys', None)
|
||||
|
||||
if key:
|
||||
self.data['private-key'] = key.get('privateKey')
|
||||
|
||||
self.data.pop('actors', None)
|
||||
new_db = False
|
||||
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
if self.config.db.stat().st_size > 0:
|
||||
raise e from None
|
||||
|
||||
if not self.data:
|
||||
logging.info('No database was found. Making a new one.')
|
||||
self.data = {}
|
||||
|
||||
for inbox in self.inboxes:
|
||||
if self.config.is_banned(inbox) or (self.config.whitelist_enabled and not self.config.is_whitelisted(inbox)):
|
||||
self.del_inbox(inbox)
|
||||
|
||||
if not self.privkey:
|
||||
logging.info("No actor keys present, generating 4096-bit RSA keypair.")
|
||||
self.generate_key()
|
||||
|
||||
else:
|
||||
self.PRIVKEY = RSA.importKey(self.privkey)
|
||||
|
||||
self.save()
|
||||
return not new_db
|
||||
|
||||
|
||||
def save(self):
|
||||
with self.config.db.open('w') as fd:
|
||||
data = {
|
||||
'relay-list': self.inboxes,
|
||||
'private-key': self.privkey
|
||||
}
|
||||
|
||||
json.dump(data, fd, indent=4)
|
||||
|
||||
|
||||
def get_inbox(self, domain):
|
||||
if domain.startswith('http'):
|
||||
domain = urlparse(domain).hostname
|
||||
|
||||
for inbox in self.inboxes:
|
||||
if domain == urlparse(inbox).hostname:
|
||||
return inbox
|
||||
|
||||
|
||||
def add_inbox(self, inbox):
|
||||
assert inbox.startswith('https')
|
||||
assert inbox not in self.inboxes
|
||||
|
||||
self.data['relay-list'].append(inbox)
|
||||
|
||||
|
||||
def del_inbox(self, inbox):
|
||||
if inbox not in self.inboxes:
|
||||
raise KeyError(inbox)
|
||||
|
||||
self.data['relay-list'].remove(inbox)
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
import aiohttp.web
|
||||
import urllib.parse
|
||||
from . import app, CONFIG
|
||||
from .database import DATABASE
|
||||
|
||||
host = CONFIG['ap']['host']
|
||||
note = CONFIG['note']
|
||||
|
||||
inboxes = DATABASE.get('relay-list', [])
|
||||
|
||||
async def default(request):
|
||||
targets = '<br>'.join([urllib.parse.urlsplit(target).hostname for target in inboxes])
|
||||
return aiohttp.web.Response(
|
||||
status=200,
|
||||
content_type="text/html",
|
||||
charset="utf-8",
|
||||
text="""
|
||||
<html><head>
|
||||
<title>ActivityPub Relay at {host}</title>
|
||||
<style>
|
||||
p {{ color: #FFFFFF; font-family: monospace, arial; font-size: 100%; }}
|
||||
body {{ background-color: #000000; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<p>This is an Activity Relay for fediverse instances.</p>
|
||||
<p>{note}</p>
|
||||
<p>For Mastodon and Misskey instances, you may subscribe to this relay with the address: <a href="https://{host}/inbox">https://{host}/inbox</a></p>
|
||||
<p>For Pleroma and other instances, you may subscribe to this relay with the address: <a href="https://{host}/actor">https://{host}/actor</a></p>
|
||||
<p>To host your own relay, you may download the code at this address: <a href="https://git.pleroma.social/pleroma/relay">https://git.pleroma.social/pleroma/relay</a></p>
|
||||
<br><p>List of {count} registered instances:<br>{targets}</p>
|
||||
</body></html>
|
||||
|
||||
""".format(host=host, note=note,targets=targets,count=len(inboxes)))
|
||||
|
||||
app.router.add_get('/', default)
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
import aiohttp
|
||||
import aiohttp.web
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
|
@ -59,8 +58,11 @@ async def on_request_exception(session, trace_config_ctx, params):
|
|||
|
||||
|
||||
def http_debug():
|
||||
if logging.DEBUG >= logging.root.level:
|
||||
return
|
||||
|
||||
trace_config = aiohttp.TraceConfig()
|
||||
trace_config.on_request_start.append(on_request_start)
|
||||
trace_config.on_request_end.append(on_request_end)
|
||||
trace_config.on_request_exception.append(on_request_exception)
|
||||
return trace_config
|
||||
return [trace_config]
|
||||
|
|
|
@ -1,148 +0,0 @@
|
|||
import aiohttp
|
||||
import aiohttp.web
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Hash import SHA, SHA256, SHA512
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
|
||||
from cachetools import LFUCache
|
||||
from async_lru import alru_cache
|
||||
|
||||
from .remote_actor import fetch_actor
|
||||
|
||||
|
||||
HASHES = {
|
||||
'sha1': SHA,
|
||||
'sha256': SHA256,
|
||||
'sha512': SHA512
|
||||
}
|
||||
|
||||
|
||||
def split_signature(sig):
|
||||
default = {"headers": "date"}
|
||||
|
||||
sig = sig.strip().split(',')
|
||||
|
||||
for chunk in sig:
|
||||
k, _, v = chunk.partition('=')
|
||||
v = v.strip('\"')
|
||||
default[k] = v
|
||||
|
||||
default['headers'] = default['headers'].split()
|
||||
return default
|
||||
|
||||
|
||||
def build_signing_string(headers, used_headers):
|
||||
return '\n'.join(map(lambda x: ': '.join([x.lower(), headers[x]]), used_headers))
|
||||
|
||||
|
||||
SIGSTRING_CACHE = LFUCache(1024)
|
||||
|
||||
def sign_signing_string(sigstring, key):
|
||||
if sigstring in SIGSTRING_CACHE:
|
||||
return SIGSTRING_CACHE[sigstring]
|
||||
|
||||
pkcs = PKCS1_v1_5.new(key)
|
||||
h = SHA256.new()
|
||||
h.update(sigstring.encode('ascii'))
|
||||
sigdata = pkcs.sign(h)
|
||||
|
||||
sigdata = base64.b64encode(sigdata)
|
||||
SIGSTRING_CACHE[sigstring] = sigdata.decode('ascii')
|
||||
|
||||
return SIGSTRING_CACHE[sigstring]
|
||||
|
||||
|
||||
def generate_body_digest(body):
|
||||
bodyhash = SIGSTRING_CACHE.get(body)
|
||||
|
||||
if not bodyhash:
|
||||
h = SHA256.new(body.encode('utf-8'))
|
||||
bodyhash = base64.b64encode(h.digest()).decode('utf-8')
|
||||
SIGSTRING_CACHE[body] = bodyhash
|
||||
|
||||
return bodyhash
|
||||
|
||||
|
||||
def sign_headers(headers, key, key_id):
|
||||
headers = {x.lower(): y for x, y in headers.items()}
|
||||
used_headers = headers.keys()
|
||||
sig = {
|
||||
'keyId': key_id,
|
||||
'algorithm': 'rsa-sha256',
|
||||
'headers': ' '.join(used_headers)
|
||||
}
|
||||
sigstring = build_signing_string(headers, used_headers)
|
||||
sig['signature'] = sign_signing_string(sigstring, key)
|
||||
|
||||
chunks = ['{}="{}"'.format(k, v) for k, v in sig.items()]
|
||||
return ','.join(chunks)
|
||||
|
||||
|
||||
@alru_cache(maxsize=16384)
|
||||
async def fetch_actor_key(actor):
|
||||
actor_data = await fetch_actor(actor)
|
||||
|
||||
if not actor_data:
|
||||
return None
|
||||
|
||||
try:
|
||||
return RSA.importKey(actor_data['publicKey']['publicKeyPem'])
|
||||
|
||||
except Exception as e:
|
||||
logging.debug(f'Exception occured while fetching actor key: {e}')
|
||||
|
||||
|
||||
async def validate(actor, request):
|
||||
pubkey = await fetch_actor_key(actor)
|
||||
if not pubkey:
|
||||
return False
|
||||
|
||||
logging.debug('actor key: %r', pubkey)
|
||||
|
||||
headers = request.headers.copy()
|
||||
headers['(request-target)'] = ' '.join([request.method.lower(), request.path])
|
||||
|
||||
sig = split_signature(headers['signature'])
|
||||
logging.debug('sigdata: %r', sig)
|
||||
|
||||
sigstring = build_signing_string(headers, sig['headers'])
|
||||
logging.debug('sigstring: %r', sigstring)
|
||||
|
||||
sign_alg, _, hash_alg = sig['algorithm'].partition('-')
|
||||
logging.debug('sign alg: %r, hash alg: %r', sign_alg, hash_alg)
|
||||
|
||||
sigdata = base64.b64decode(sig['signature'])
|
||||
|
||||
pkcs = PKCS1_v1_5.new(pubkey)
|
||||
h = HASHES[hash_alg].new()
|
||||
h.update(sigstring.encode('ascii'))
|
||||
result = pkcs.verify(h, sigdata)
|
||||
|
||||
request['validated'] = result
|
||||
|
||||
logging.debug('validates? %r', result)
|
||||
return result
|
||||
|
||||
|
||||
async def http_signatures_middleware(app, handler):
|
||||
async def http_signatures_handler(request):
|
||||
request['validated'] = False
|
||||
|
||||
if 'signature' in request.headers and request.method == 'POST':
|
||||
data = await request.json()
|
||||
if 'actor' not in data:
|
||||
raise aiohttp.web.HTTPUnauthorized(body='signature check failed, no actor in message')
|
||||
|
||||
actor = data["actor"]
|
||||
if not (await validate(actor, request)):
|
||||
logging.info('Signature validation failed for: %r', actor)
|
||||
raise aiohttp.web.HTTPUnauthorized(body='signature check failed, signature did not match key')
|
||||
|
||||
return (await handler(request))
|
||||
|
||||
return (await handler(request))
|
||||
|
||||
return http_signatures_handler
|
|
@ -1,11 +0,0 @@
|
|||
import aiohttp.web
|
||||
|
||||
from . import app
|
||||
from .http_debug import STATS
|
||||
|
||||
|
||||
async def stats(request):
|
||||
return aiohttp.web.json_response(STATS)
|
||||
|
||||
|
||||
app.router.add_get('/stats', stats)
|
34
relay/logger.py
Normal file
34
relay/logger.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
|
||||
## Add the verbose logging level
|
||||
def verbose(message, *args, **kwargs):
|
||||
if not logging.root.isEnabledFor(logging.VERBOSE):
|
||||
return
|
||||
|
||||
logging.log(logging.VERBOSE, message, *args, **kwargs)
|
||||
|
||||
setattr(logging, 'verbose', verbose)
|
||||
setattr(logging, 'VERBOSE', 15)
|
||||
logging.addLevelName(15, 'VERBOSE')
|
||||
|
||||
|
||||
## Get log level from environment if possible
|
||||
env_log_level = os.environ.get('LOG_LEVEL', 'INFO').upper()
|
||||
|
||||
|
||||
## Make sure the level from the environment is valid
|
||||
try:
|
||||
log_level = getattr(logging, env_log_level)
|
||||
|
||||
except AttributeError:
|
||||
log_level = logging.INFO
|
||||
|
||||
|
||||
## Set logging config
|
||||
logging.basicConfig(
|
||||
level = log_level,
|
||||
format = "[%(asctime)s] %(levelname)s: %(message)s",
|
||||
handlers = [logging.StreamHandler()]
|
||||
)
|
|
@ -1,8 +0,0 @@
|
|||
import logging
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="[%(asctime)s] %(levelname)s: %(message)s",
|
||||
handlers=[logging.StreamHandler()]
|
||||
)
|
464
relay/manage.py
464
relay/manage.py
|
@ -1,83 +1,421 @@
|
|||
import Crypto
|
||||
import asyncio
|
||||
import sys
|
||||
import simplejson as json
|
||||
import click
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
|
||||
from .actor import follow_remote_actor, unfollow_remote_actor
|
||||
from . import CONFIG
|
||||
from .database import DATABASE
|
||||
from aiohttp.web import AppRunner, TCPSite
|
||||
from cachetools import LRUCache
|
||||
|
||||
from . import app, views, __version__
|
||||
from .config import DotDict, RelayConfig, relay_software_names
|
||||
from .database import RelayDatabase
|
||||
from .misc import check_open_port, follow_remote_actor, unfollow_remote_actor
|
||||
|
||||
|
||||
def relay_list():
|
||||
print('Connected to the following instances or relays:')
|
||||
[print('-', relay) for relay in DATABASE['relay-list']]
|
||||
@click.group('cli', context_settings={'show_default': True}, invoke_without_command=True)
|
||||
@click.option('--config', '-c', default='relay.yaml', help='path to the relay\'s config')
|
||||
@click.version_option(version=__version__, prog_name='ActivityRelay')
|
||||
@click.pass_context
|
||||
def cli(ctx, config):
|
||||
app['is_docker'] = bool(os.environ.get('DOCKER_RUNNING'))
|
||||
app['config'] = RelayConfig(config, app['is_docker'])
|
||||
|
||||
if not app['config'].load():
|
||||
app['config'].save()
|
||||
|
||||
app['database'] = RelayDatabase(app['config'])
|
||||
app['database'].load()
|
||||
|
||||
app['cache'] = DotDict()
|
||||
app['semaphore'] = asyncio.Semaphore(app['config']['push_limit'])
|
||||
|
||||
for key in app['config'].cachekeys:
|
||||
app['cache'][key] = LRUCache(app['config'][key])
|
||||
|
||||
if not ctx.invoked_subcommand:
|
||||
if app['config'].host.endswith('example.com'):
|
||||
relay_setup.callback()
|
||||
|
||||
else:
|
||||
relay_run.callback()
|
||||
|
||||
|
||||
def relay_follow():
|
||||
if len(sys.argv) < 3:
|
||||
print('usage: python3 -m relay.manage follow <target>')
|
||||
exit()
|
||||
|
||||
target = sys.argv[2]
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(follow_remote_actor(target))
|
||||
|
||||
print('Sent follow message to:', target)
|
||||
@cli.group('inbox')
|
||||
@click.pass_context
|
||||
def cli_inbox(ctx):
|
||||
'Manage the inboxes in the database'
|
||||
pass
|
||||
|
||||
|
||||
def relay_unfollow():
|
||||
if len(sys.argv) < 3:
|
||||
print('usage: python3 -m relay.manage unfollow <target>')
|
||||
exit()
|
||||
@cli_inbox.command('list')
|
||||
def cli_inbox_list():
|
||||
'List the connected instances or relays'
|
||||
|
||||
target = sys.argv[2]
|
||||
click.echo('Connected to the following instances or relays:')
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(unfollow_remote_actor(target))
|
||||
|
||||
print('Sent unfollow message to:', target)
|
||||
|
||||
def relay_forceremove():
|
||||
if len(sys.argv) < 3:
|
||||
print('usage: python3 -m relay.manage force-remove <target>')
|
||||
exit()
|
||||
|
||||
target = sys.argv[2]
|
||||
|
||||
following = DATABASE.get('relay-list', [])
|
||||
|
||||
if target in following:
|
||||
following.remove(target)
|
||||
DATABASE['relay-list'] = following
|
||||
with open('relay.jsonld', 'w') as f:
|
||||
json.dump(DATABASE, f)
|
||||
print('Removed target from DB:', target)
|
||||
for inbox in app['database'].inboxes:
|
||||
click.echo(f'- {inbox}')
|
||||
|
||||
|
||||
TASKS = {
|
||||
'list': relay_list,
|
||||
'follow': relay_follow,
|
||||
'unfollow': relay_unfollow,
|
||||
'force-remove': relay_forceremove
|
||||
}
|
||||
@cli_inbox.command('follow')
|
||||
@click.argument('actor')
|
||||
def cli_inbox_follow(actor):
|
||||
'Follow an actor (Relay must be running)'
|
||||
|
||||
run_in_loop(handle_follow_actor, actor)
|
||||
|
||||
|
||||
def usage():
|
||||
print('usage: python3 -m relay.manage <task> [...]')
|
||||
print('tasks:')
|
||||
[print('-', task) for task in TASKS.keys()]
|
||||
exit()
|
||||
@cli_inbox.command('unfollow')
|
||||
@click.argument('actor')
|
||||
def cli_inbox_unfollow(actor):
|
||||
'Unfollow an actor (Relay must be running)'
|
||||
|
||||
run_in_loop(handle_unfollow_actor(actor))
|
||||
|
||||
|
||||
@cli_inbox.command('add')
|
||||
@click.argument('inbox')
|
||||
def cli_inbox_add(inbox):
|
||||
'Add an inbox to the database'
|
||||
|
||||
database = app['database']
|
||||
config = app['config']
|
||||
|
||||
if not inbox.startswith('http'):
|
||||
inbox = f'https://{inbox}/inbox'
|
||||
|
||||
if database.get_inbox(inbox):
|
||||
click.echo(f'Error: Inbox already in database: {inbox}')
|
||||
return
|
||||
|
||||
if config.is_banned(inbox):
|
||||
click.echo(f'Error: Refusing to add banned inbox: {inbox}')
|
||||
return
|
||||
|
||||
database.add_inbox(inbox)
|
||||
database.save()
|
||||
click.echo(f'Added inbox to the database: {inbox}')
|
||||
|
||||
|
||||
@cli_inbox.command('remove')
|
||||
@click.argument('inbox')
|
||||
def cli_inbox_remove(inbox):
|
||||
'Remove an inbox from the database'
|
||||
|
||||
database = app['database']
|
||||
dbinbox = database.get_inbox(inbox)
|
||||
|
||||
if not dbinbox:
|
||||
click.echo(f'Error: Inbox does not exist: {inbox}')
|
||||
return
|
||||
|
||||
database.del_inbox(dbinbox)
|
||||
database.save()
|
||||
click.echo(f'Removed inbox from the database: {inbox}')
|
||||
|
||||
|
||||
@cli.group('instance')
|
||||
def cli_instance():
|
||||
'Manage instance bans'
|
||||
pass
|
||||
|
||||
|
||||
@cli_instance.command('list')
|
||||
def cli_instance_list():
|
||||
'List all banned instances'
|
||||
|
||||
click.echo('Banned instances or relays:')
|
||||
|
||||
for domain in app['config'].blocked_instances:
|
||||
click.echo(f'- {domain}')
|
||||
|
||||
|
||||
@cli_instance.command('ban')
|
||||
@click.argument('target')
|
||||
def cli_instance_ban(target):
|
||||
'Ban an instance and remove the associated inbox if it exists'
|
||||
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
inbox = database.get_inbox(target)
|
||||
|
||||
if config.ban_instance(target):
|
||||
config.save()
|
||||
|
||||
if inbox:
|
||||
database.del_inbox(inbox)
|
||||
database.save()
|
||||
|
||||
click.echo(f'Banned instance: {target}')
|
||||
return
|
||||
|
||||
click.echo(f'Instance already banned: {target}')
|
||||
|
||||
|
||||
@cli_instance.command('unban')
|
||||
@click.argument('target')
|
||||
def cli_instance_unban(target):
|
||||
'Unban an instance'
|
||||
|
||||
config = app['config']
|
||||
|
||||
if config.unban_instance(target):
|
||||
config.save()
|
||||
|
||||
click.echo(f'Unbanned instance: {target}')
|
||||
return
|
||||
|
||||
click.echo(f'Instance wasn\'t banned: {target}')
|
||||
|
||||
|
||||
@cli.group('software')
|
||||
def cli_software():
|
||||
'Manage banned software'
|
||||
pass
|
||||
|
||||
|
||||
@cli_software.command('list')
|
||||
def cli_software_list():
|
||||
'List all banned software'
|
||||
|
||||
click.echo('Banned software:')
|
||||
|
||||
for software in app['config'].blocked_software:
|
||||
click.echo(f'- {software}')
|
||||
|
||||
|
||||
@cli_software.command('ban')
|
||||
@click.option('--fetch-nodeinfo/--ignore-nodeinfo', '-f', 'fetch_nodeinfo', default=False,
|
||||
help='Treat NAME like a domain and try to fet the software name from nodeinfo'
|
||||
)
|
||||
@click.argument('name')
|
||||
def cli_software_ban(name, fetch_nodeinfo):
|
||||
'Ban software. Use RELAYS for NAME to ban relays'
|
||||
|
||||
config = app['config']
|
||||
|
||||
if name == 'RELAYS':
|
||||
for name in relay_software_names:
|
||||
config.ban_software(name)
|
||||
|
||||
config.save()
|
||||
return click.echo('Banned all relay software')
|
||||
|
||||
if fetch_nodeinfo:
|
||||
software = run_in_loop(fetch_nodeinfo, name)
|
||||
|
||||
if not software:
|
||||
click.echo(f'Failed to fetch software name from domain: {name}')
|
||||
|
||||
name = software
|
||||
|
||||
if config.ban_software(name):
|
||||
config.save()
|
||||
return click.echo(f'Banned software: {name}')
|
||||
|
||||
click.echo(f'Software already banned: {name}')
|
||||
|
||||
|
||||
@cli_software.command('unban')
|
||||
@click.option('--fetch-nodeinfo/--ignore-nodeinfo', '-f', 'fetch_nodeinfo', default=False,
|
||||
help='Treat NAME like a domain and try to fet the software name from nodeinfo'
|
||||
)
|
||||
@click.argument('name')
|
||||
def cli_software_unban(name, fetch_nodeinfo):
|
||||
'Ban software. Use RELAYS for NAME to unban relays'
|
||||
|
||||
config = app['config']
|
||||
|
||||
if name == 'RELAYS':
|
||||
for name in relay_software_names:
|
||||
config.unban_software(name)
|
||||
|
||||
config.save()
|
||||
return click.echo('Unbanned all relay software')
|
||||
|
||||
if fetch_nodeinfo:
|
||||
software = run_in_loop(fetch_nodeinfo, name)
|
||||
|
||||
if not software:
|
||||
click.echo(f'Failed to fetch software name from domain: {name}')
|
||||
|
||||
name = software
|
||||
|
||||
if config.unban_software(name):
|
||||
config.save()
|
||||
return click.echo(f'Unbanned software: {name}')
|
||||
|
||||
click.echo(f'Software wasn\'t banned: {name}')
|
||||
|
||||
|
||||
|
||||
@cli.group('whitelist')
|
||||
def cli_whitelist():
|
||||
'Manage the instance whitelist'
|
||||
pass
|
||||
|
||||
|
||||
@cli_whitelist.command('list')
|
||||
def cli_whitelist_list():
|
||||
click.echo('Current whitelisted domains')
|
||||
|
||||
for domain in app['config'].whitelist:
|
||||
click.echo(f'- {domain}')
|
||||
|
||||
|
||||
@cli_whitelist.command('add')
|
||||
@click.argument('instance')
|
||||
def cli_whitelist_add(instance):
|
||||
'Add an instance to the whitelist'
|
||||
|
||||
config = app['config']
|
||||
|
||||
if not config.add_whitelist(instance):
|
||||
return click.echo(f'Instance already in the whitelist: {instance}')
|
||||
|
||||
config.save()
|
||||
click.echo(f'Instance added to the whitelist: {instance}')
|
||||
|
||||
|
||||
@cli_whitelist.command('remove')
|
||||
@click.argument('instance')
|
||||
def cli_whitelist_remove(instance):
|
||||
'Remove an instance from the whitelist'
|
||||
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
inbox = database.get_inbox(instance)
|
||||
|
||||
if not config.del_whitelist(instance):
|
||||
return click.echo(f'Instance not in the whitelist: {instance}')
|
||||
|
||||
config.save()
|
||||
|
||||
if inbox and config.whitelist_enabled:
|
||||
database.del_inbox(inbox)
|
||||
database.save()
|
||||
|
||||
click.echo(f'Removed instance from the whitelist: {instance}')
|
||||
|
||||
|
||||
@cli.command('setup')
|
||||
def relay_setup():
|
||||
'Generate a new config'
|
||||
|
||||
config = app['config']
|
||||
|
||||
while True:
|
||||
config.host = click.prompt('What domain will the relay be hosted on?', default=config.host)
|
||||
|
||||
if not config.host.endswith('example.com'):
|
||||
break
|
||||
|
||||
click.echo('The domain must not be example.com')
|
||||
|
||||
config.listen = click.prompt('Which address should the relay listen on?', default=config.listen)
|
||||
|
||||
while True:
|
||||
config.port = click.prompt('What TCP port should the relay listen on?', default=config.port, type=int)
|
||||
break
|
||||
|
||||
config.save()
|
||||
|
||||
if not app['is_docker'] and click.confirm('Relay all setup! Would you like to run it now?'):
|
||||
relay_run.callback()
|
||||
|
||||
|
||||
@cli.command('run')
|
||||
def relay_run():
|
||||
'Run the relay'
|
||||
|
||||
config = app['config']
|
||||
|
||||
if config.host.endswith('example.com'):
|
||||
return click.echo('Relay is not set up. Please edit your relay config or run "activityrelay setup".')
|
||||
|
||||
vers_split = platform.python_version().split('.')
|
||||
pip_command = 'pip3 uninstall pycrypto && pip3 install pycryptodome'
|
||||
|
||||
if Crypto.__version__ == '2.6.1':
|
||||
if int(vers_split[1]) > 7:
|
||||
click.echo('Error: PyCrypto is broken on Python 3.8+. Please replace it with pycryptodome before running again. Exiting...')
|
||||
return click.echo(pip_command)
|
||||
|
||||
else:
|
||||
click.echo('Warning: PyCrypto is old and should be replaced with pycryptodome')
|
||||
return click.echo(pip_command)
|
||||
|
||||
if not check_open_port(config.listen, config.port):
|
||||
return click.echo(f'Error: A server is already running on port {config.port}')
|
||||
|
||||
# web pages
|
||||
app.router.add_get('/', views.home)
|
||||
|
||||
# endpoints
|
||||
app.router.add_post('/actor', views.inbox)
|
||||
app.router.add_post('/inbox', views.inbox)
|
||||
app.router.add_get('/actor', views.actor)
|
||||
app.router.add_get('/nodeinfo/2.0.json', views.nodeinfo_2_0)
|
||||
app.router.add_get('/.well-known/nodeinfo', views.nodeinfo_wellknown)
|
||||
app.router.add_get('/.well-known/webfinger', views.webfinger)
|
||||
|
||||
if logging.DEBUG >= logging.root.level:
|
||||
app.router.add_get('/stats', views.stats)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
asyncio.ensure_future(handle_start_webserver(), loop=loop)
|
||||
loop.run_forever()
|
||||
|
||||
|
||||
def run_in_loop(func, *args, **kwargs):
|
||||
loop = asyncio.new_event_loop()
|
||||
return loop.run_until_complete(func(*args, **kwargs))
|
||||
|
||||
|
||||
async def handle_follow_actor(app, target):
|
||||
config = app['config']
|
||||
|
||||
if not target.startswith('http'):
|
||||
target = f'https://{target}/actor'
|
||||
|
||||
if config.is_banned(target):
|
||||
return click.echo(f'Error: Refusing to follow banned actor: {target}')
|
||||
|
||||
await follow_remote_actor(target)
|
||||
click.echo(f'Sent follow message to: {target}')
|
||||
|
||||
|
||||
async def handle_unfollow_actor(app, target):
|
||||
database = app['database']
|
||||
|
||||
if not target.startswith('http'):
|
||||
target = f'https://{target}/actor'
|
||||
|
||||
if not database.get_inbox(target):
|
||||
return click.echo(f'Error: Not following actor: {target}')
|
||||
|
||||
await unfollow_remote_actor(target)
|
||||
click.echo(f'Sent unfollow message to: {target}')
|
||||
|
||||
|
||||
async def handle_start_webserver():
|
||||
config = app['config']
|
||||
runner = AppRunner(app, access_log_format='%{X-Forwarded-For}i "%r" %s %b "%{Referer}i" "%{User-Agent}i"')
|
||||
|
||||
logging.info(f'Starting webserver at {config.host} ({config.listen}:{config.port})')
|
||||
await runner.setup()
|
||||
|
||||
site = TCPSite(runner, config.listen, config.port)
|
||||
await site.start()
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
usage()
|
||||
|
||||
if sys.argv[1] in TASKS:
|
||||
TASKS[sys.argv[1]]()
|
||||
else:
|
||||
usage()
|
||||
cli(prog_name='relay')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
click.echo('Running relay.manage is depreciated. Run `activityrelay [command]` instead.')
|
||||
|
|
320
relay/misc.py
Normal file
320
relay/misc.py
Normal file
|
@ -0,0 +1,320 @@
|
|||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
import traceback
|
||||
|
||||
from Crypto.Hash import SHA, SHA256, SHA512
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
from aiohttp import ClientSession
|
||||
from datetime import datetime
|
||||
from json.decoder import JSONDecodeError
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from . import app
|
||||
from .http_debug import http_debug
|
||||
|
||||
|
||||
HASHES = {
|
||||
'sha1': SHA,
|
||||
'sha256': SHA256,
|
||||
'sha512': SHA512
|
||||
}
|
||||
|
||||
|
||||
def build_signing_string(headers, used_headers):
|
||||
return '\n'.join(map(lambda x: ': '.join([x.lower(), headers[x]]), used_headers))
|
||||
|
||||
|
||||
def check_open_port(host, port):
|
||||
if host == '0.0.0.0':
|
||||
host = '127.0.0.1'
|
||||
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
try:
|
||||
return s.connect_ex((host , port)) != 0
|
||||
|
||||
except socket.error as e:
|
||||
return False
|
||||
|
||||
|
||||
def create_signature_header(headers):
|
||||
headers = {k.lower(): v for k, v in headers.items()}
|
||||
used_headers = headers.keys()
|
||||
sigstring = build_signing_string(headers, used_headers)
|
||||
|
||||
sig = {
|
||||
'keyId': app['config'].keyid,
|
||||
'algorithm': 'rsa-sha256',
|
||||
'headers': ' '.join(used_headers),
|
||||
'signature': sign_signing_string(sigstring, app['database'].PRIVKEY)
|
||||
}
|
||||
|
||||
chunks = ['{}="{}"'.format(k, v) for k, v in sig.items()]
|
||||
return ','.join(chunks)
|
||||
|
||||
|
||||
def distill_object_id(activity):
|
||||
logging.debug('>> determining object ID for', activity['object'])
|
||||
|
||||
try:
|
||||
return activity['object']['id']
|
||||
|
||||
except TypeError:
|
||||
return activity['object']
|
||||
|
||||
|
||||
def distill_inboxes(actor, object_id):
|
||||
database = app['database']
|
||||
origin_hostname = urlparse(object_id).hostname
|
||||
actor_inbox = get_actor_inbox(actor)
|
||||
targets = []
|
||||
|
||||
for inbox in database.inboxes:
|
||||
if inbox != actor_inbox or urlparse(inbox).hostname != origin_hostname:
|
||||
targets.append(inbox)
|
||||
|
||||
return targets
|
||||
|
||||
|
||||
def generate_body_digest(body):
|
||||
bodyhash = app['cache'].digests.get(body)
|
||||
|
||||
if bodyhash:
|
||||
return bodyhash
|
||||
|
||||
h = SHA256.new(body.encode('utf-8'))
|
||||
bodyhash = base64.b64encode(h.digest()).decode('utf-8')
|
||||
app['cache'].digests[body] = bodyhash
|
||||
|
||||
return bodyhash
|
||||
|
||||
|
||||
def get_actor_inbox(actor):
|
||||
return actor.get('endpoints', {}).get('sharedInbox', actor['inbox'])
|
||||
|
||||
|
||||
def sign_signing_string(sigstring, key):
|
||||
pkcs = PKCS1_v1_5.new(key)
|
||||
h = SHA256.new()
|
||||
h.update(sigstring.encode('ascii'))
|
||||
sigdata = pkcs.sign(h)
|
||||
|
||||
return base64.b64encode(sigdata).decode('utf-8')
|
||||
|
||||
|
||||
def split_signature(sig):
|
||||
default = {"headers": "date"}
|
||||
|
||||
sig = sig.strip().split(',')
|
||||
|
||||
for chunk in sig:
|
||||
k, _, v = chunk.partition('=')
|
||||
v = v.strip('\"')
|
||||
default[k] = v
|
||||
|
||||
default['headers'] = default['headers'].split()
|
||||
return default
|
||||
|
||||
|
||||
async def fetch_actor_key(actor):
|
||||
actor_data = await request(actor)
|
||||
|
||||
if not actor_data:
|
||||
return None
|
||||
|
||||
try:
|
||||
return RSA.importKey(actor_data['publicKey']['publicKeyPem'])
|
||||
|
||||
except Exception as e:
|
||||
logging.debug(f'Exception occured while fetching actor key: {e}')
|
||||
|
||||
|
||||
async def fetch_nodeinfo(domain):
|
||||
nodeinfo_url = None
|
||||
|
||||
wk_nodeinfo = await request(f'https://{domain}/.well-known/nodeinfo', sign_headers=False)
|
||||
|
||||
if not wk_nodeinfo:
|
||||
return
|
||||
|
||||
for link in wk_nodeinfo.get('links', ''):
|
||||
if link['rel'] == 'http://nodeinfo.diaspora.software/ns/schema/2.0':
|
||||
nodeinfo_url = link['href']
|
||||
break
|
||||
|
||||
if not nodeinfo_url:
|
||||
return
|
||||
|
||||
nodeinfo_data = await request(nodeinfo_url, sign_headers=False)
|
||||
|
||||
try:
|
||||
return nodeinfo_data['software']['name']
|
||||
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
|
||||
async def follow_remote_actor(actor_uri):
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
|
||||
actor = await request(actor_uri)
|
||||
inbox = get_actor_inbox(actor)
|
||||
|
||||
if not actor:
|
||||
logging.error(f'failed to fetch actor at: {actor_uri}')
|
||||
return
|
||||
|
||||
logging.verbose(f'sending follow request: {actor_uri}')
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Follow",
|
||||
"to": [actor['id']],
|
||||
"object": actor['id'],
|
||||
"id": f"https://{config.host}/activities/{uuid4()}",
|
||||
"actor": f"https://{config.host}/actor"
|
||||
}
|
||||
|
||||
await request(inbox, message)
|
||||
|
||||
|
||||
async def unfollow_remote_actor(actor_uri):
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
|
||||
actor = await request(actor_uri)
|
||||
|
||||
if not actor:
|
||||
logging.error(f'failed to fetch actor: {actor_uri}')
|
||||
return
|
||||
|
||||
inbox = get_actor_inbox(actor)
|
||||
logging.verbose(f'sending unfollow request to inbox: {inbox}')
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Undo",
|
||||
"to": [actor_uri],
|
||||
"object": {
|
||||
"type": "Follow",
|
||||
"object": actor_uri,
|
||||
"actor": actor_uri,
|
||||
"id": f"https://{config.host}/activities/{uuid4()}"
|
||||
},
|
||||
"id": f"https://{config.host}/activities/{uuid4()}",
|
||||
"actor": f"https://{config.host}/actor"
|
||||
}
|
||||
|
||||
await request(inbox, message)
|
||||
|
||||
|
||||
async def request(uri, data=None, force=False, sign_headers=True):
|
||||
## If a get request and not force, try to use the cache first
|
||||
if not data and not force:
|
||||
try:
|
||||
return app['cache'].json[uri]
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
url = urlparse(uri)
|
||||
method = 'POST' if data else 'GET'
|
||||
headers = {'User-Agent': 'ActivityRelay'}
|
||||
|
||||
## Set the content type for a POST
|
||||
if data and 'Content-Type' not in headers:
|
||||
headers['Content-Type'] = 'application/activity+json'
|
||||
|
||||
## Set the accepted content type for a GET
|
||||
elif not data and 'Accept' not in headers:
|
||||
headers['Accept'] = 'application/activity+json'
|
||||
|
||||
if sign_headers:
|
||||
signing_headers = {
|
||||
'(request-target)': f'{method.lower()} {url.path}',
|
||||
'Date': datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'),
|
||||
'Host': url.netloc
|
||||
}
|
||||
|
||||
if data:
|
||||
assert isinstance(data, dict)
|
||||
|
||||
action = data.get('type')
|
||||
data = json.dumps(data)
|
||||
signing_headers.update({
|
||||
'Digest': f'SHA-256={generate_body_digest(data)}',
|
||||
'Content-Length': str(len(data.encode('utf-8')))
|
||||
})
|
||||
|
||||
signing_headers['Signature'] = create_signature_header(signing_headers)
|
||||
|
||||
del signing_headers['(request-target)']
|
||||
del signing_headers['Host']
|
||||
|
||||
headers.update(signing_headers)
|
||||
|
||||
try:
|
||||
# json_serializer=DotDict maybe?
|
||||
async with ClientSession(trace_configs=http_debug()) as session, app['semaphore']:
|
||||
async with session.request(method, uri, headers=headers, data=data) as resp:
|
||||
## aiohttp has been known to leak if the response hasn't been read,
|
||||
## so we're just gonna read the request no matter what
|
||||
resp_data = await resp.read()
|
||||
resp_payload = json.loads(resp_data.decode('utf-8'))
|
||||
|
||||
if resp.status not in [200, 202]:
|
||||
if not data:
|
||||
logging.verbose(f'Received error when requesting {uri}: {resp.status} {resp_payload}')
|
||||
return
|
||||
|
||||
logging.verbose(f'Received error when sending {action} to {uri}: {resp.status} {resp_payload}')
|
||||
return
|
||||
|
||||
logging.debug(f'{uri} >> resp {resp_payload}')
|
||||
|
||||
app['cache'].json[uri] = resp_payload
|
||||
return resp_payload
|
||||
|
||||
except JSONDecodeError:
|
||||
return
|
||||
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
async def validate_signature(actor, http_request):
|
||||
pubkey = await fetch_actor_key(actor)
|
||||
|
||||
if not pubkey:
|
||||
return False
|
||||
|
||||
logging.debug(f'actor key: {pubkey}')
|
||||
|
||||
headers = {key.lower(): value for key, value in http_request.headers.items()}
|
||||
headers['(request-target)'] = ' '.join([http_request.method.lower(), http_request.path])
|
||||
|
||||
sig = split_signature(headers['signature'])
|
||||
logging.debug(f'sigdata: {sig}')
|
||||
|
||||
sigstring = build_signing_string(headers, sig['headers'])
|
||||
logging.debug(f'sigstring: {sigstring}')
|
||||
|
||||
sign_alg, _, hash_alg = sig['algorithm'].partition('-')
|
||||
logging.debug(f'sign alg: {sign_alg}, hash alg: {hash_alg}')
|
||||
|
||||
sigdata = base64.b64decode(sig['signature'])
|
||||
|
||||
pkcs = PKCS1_v1_5.new(pubkey)
|
||||
h = HASHES[hash_alg].new()
|
||||
h.update(sigstring.encode('ascii'))
|
||||
result = pkcs.verify(h, sigdata)
|
||||
|
||||
http_request['validated'] = result
|
||||
|
||||
logging.debug(f'validates? {result}')
|
||||
return result
|
|
@ -1,67 +0,0 @@
|
|||
import subprocess
|
||||
import urllib.parse
|
||||
|
||||
import aiohttp.web
|
||||
|
||||
from . import app
|
||||
from .database import DATABASE
|
||||
|
||||
|
||||
try:
|
||||
commit_label = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip().decode('ascii')
|
||||
except:
|
||||
commit_label = '???'
|
||||
|
||||
|
||||
nodeinfo_template = {
|
||||
# XXX - is this valid for a relay?
|
||||
'openRegistrations': True,
|
||||
'protocols': ['activitypub'],
|
||||
'services': {
|
||||
'inbound': [],
|
||||
'outbound': []
|
||||
},
|
||||
'software': {
|
||||
'name': 'activityrelay',
|
||||
'version': '0.1 {}'.format(commit_label)
|
||||
},
|
||||
'usage': {
|
||||
'localPosts': 0,
|
||||
'users': {
|
||||
'total': 1
|
||||
}
|
||||
},
|
||||
'version': '2.0'
|
||||
}
|
||||
|
||||
|
||||
def get_peers():
|
||||
global DATABASE
|
||||
|
||||
return [urllib.parse.urlsplit(inbox).hostname for inbox in DATABASE.get('relay-list', [])]
|
||||
|
||||
|
||||
async def nodeinfo_2_0(request):
|
||||
data = nodeinfo_template.copy()
|
||||
data['metadata'] = {
|
||||
'peers': get_peers()
|
||||
}
|
||||
return aiohttp.web.json_response(data)
|
||||
|
||||
|
||||
app.router.add_get('/nodeinfo/2.0.json', nodeinfo_2_0)
|
||||
|
||||
|
||||
async def nodeinfo_wellknown(request):
|
||||
data = {
|
||||
'links': [
|
||||
{
|
||||
'rel': 'http://nodeinfo.diaspora.software/ns/schema/2.0',
|
||||
'href': 'https://{}/nodeinfo/2.0.json'.format(request.host)
|
||||
}
|
||||
]
|
||||
}
|
||||
return aiohttp.web.json_response(data)
|
||||
|
||||
|
||||
app.router.add_get('/.well-known/nodeinfo', nodeinfo_wellknown)
|
122
relay/processors.py
Normal file
122
relay/processors.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
import asyncio
|
||||
import logging
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from . import app, misc
|
||||
|
||||
|
||||
async def handle_relay(actor, data, request):
|
||||
cache = app['cache'].objects
|
||||
object_id = misc.distill_object_id(data)
|
||||
|
||||
if object_id in cache:
|
||||
logging.verbose(f'already relayed {object_id} as {cache[object_id]}')
|
||||
return
|
||||
|
||||
logging.verbose(f'Relaying post from {actor["id"]}')
|
||||
|
||||
activity_id = f"https://{request.host}/activities/{uuid4()}"
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Announce",
|
||||
"to": [f"https://{request.host}/followers"],
|
||||
"actor": f"https://{request.host}/actor",
|
||||
"object": object_id,
|
||||
"id": activity_id
|
||||
}
|
||||
|
||||
logging.debug(f'>> relay: {message}')
|
||||
|
||||
inboxes = misc.distill_inboxes(actor, object_id)
|
||||
futures = [misc.request(inbox, data=message) for inbox in inboxes]
|
||||
|
||||
asyncio.ensure_future(asyncio.gather(*futures))
|
||||
cache[object_id] = activity_id
|
||||
|
||||
|
||||
async def handle_forward(actor, data, request):
|
||||
cache = app['cache'].objects
|
||||
object_id = misc.distill_object_id(data)
|
||||
|
||||
if object_id in cache:
|
||||
logging.verbose(f'already forwarded {object_id}')
|
||||
return
|
||||
|
||||
logging.verbose(f'Forwarding post from {actor["id"]}')
|
||||
logging.debug(f'>> Relay {data}')
|
||||
|
||||
inboxes = misc.distill_inboxes(actor['id'], object_id)
|
||||
|
||||
futures = [misc.request(inbox, data=data) for inbox in inboxes]
|
||||
asyncio.ensure_future(asyncio.gather(*futures))
|
||||
|
||||
cache[object_id] = object_id
|
||||
|
||||
|
||||
async def handle_follow(actor, data, request):
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
|
||||
inbox = misc.get_actor_inbox(actor)
|
||||
|
||||
if inbox not in database.inboxes:
|
||||
database.add_inbox(inbox)
|
||||
database.save()
|
||||
asyncio.ensure_future(misc.follow_remote_actor(actor['id']))
|
||||
|
||||
message = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"type": "Accept",
|
||||
"to": [actor["id"]],
|
||||
"actor": config.actor,
|
||||
|
||||
# this is wrong per litepub, but mastodon < 2.4 is not compliant with that profile.
|
||||
"object": {
|
||||
"type": "Follow",
|
||||
"id": data["id"],
|
||||
"object": config.actor,
|
||||
"actor": actor["id"]
|
||||
},
|
||||
|
||||
"id": f"https://{request.host}/activities/{uuid4()}",
|
||||
}
|
||||
|
||||
asyncio.ensure_future(misc.request(inbox, message))
|
||||
|
||||
|
||||
async def handle_undo(actor, data, request):
|
||||
## If the activity being undone is an Announce, forward it insteead
|
||||
if data['object']['type'] == 'Announce':
|
||||
await handle_forward(actor, data, request)
|
||||
return
|
||||
|
||||
elif data['object']['type'] != 'Follow':
|
||||
return
|
||||
|
||||
database = app['database']
|
||||
inbox = database.get_inbox(actor['id'])
|
||||
|
||||
if not inbox:
|
||||
return
|
||||
|
||||
database.del_inbox(inbox)
|
||||
database.save()
|
||||
|
||||
await misc.unfollow_remote_actor(actor['id'])
|
||||
|
||||
|
||||
processors = {
|
||||
'Announce': handle_relay,
|
||||
'Create': handle_relay,
|
||||
'Delete': handle_forward,
|
||||
'Follow': handle_follow,
|
||||
'Undo': handle_undo,
|
||||
'Update': handle_forward,
|
||||
}
|
||||
|
||||
|
||||
async def run_processor(request, data, actor):
|
||||
logging.verbose(f'New activity from actor: {actor["id"]} {data["type"]}')
|
||||
return await processors[data['type']](actor, data, request)
|
|
@ -1,56 +0,0 @@
|
|||
import logging
|
||||
import aiohttp
|
||||
|
||||
from cachetools import TTLCache
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from . import CONFIG
|
||||
from .http_debug import http_debug
|
||||
|
||||
|
||||
CACHE_SIZE = CONFIG.get('cache-size', 16384)
|
||||
CACHE_TTL = CONFIG.get('cache-ttl', 3600)
|
||||
|
||||
ACTORS = TTLCache(CACHE_SIZE, CACHE_TTL)
|
||||
|
||||
|
||||
async def fetch_actor(uri, headers={}, force=False, sign_headers=True):
|
||||
if uri in ACTORS and not force:
|
||||
return ACTORS[uri]
|
||||
|
||||
from .actor import PRIVKEY
|
||||
from .http_signatures import sign_headers
|
||||
|
||||
url = urlsplit(uri)
|
||||
key_id = 'https://{}/actor#main-key'.format(CONFIG['ap']['host'])
|
||||
|
||||
headers.update({
|
||||
'Accept': 'application/activity+json',
|
||||
'User-Agent': 'ActivityRelay'
|
||||
})
|
||||
|
||||
if sign_headers:
|
||||
headers.update({
|
||||
'(request-target)': 'get {}'.format(url.path),
|
||||
'Date': datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'),
|
||||
'Host': url.netloc
|
||||
})
|
||||
|
||||
headers['signature'] = sign_headers(headers, PRIVKEY, key_id)
|
||||
headers.pop('(request-target)')
|
||||
headers.pop('Host')
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession(trace_configs=[http_debug()]) as session:
|
||||
async with session.get(uri, headers=headers) as resp:
|
||||
|
||||
if resp.status != 200:
|
||||
return None
|
||||
|
||||
ACTORS[uri] = (await resp.json(encoding='utf-8', content_type=None))
|
||||
return ACTORS[uri]
|
||||
|
||||
except Exception as e:
|
||||
logging.info('Caught %r while fetching actor %r.', e, uri)
|
||||
return None
|
202
relay/views.py
Normal file
202
relay/views.py
Normal file
|
@ -0,0 +1,202 @@
|
|||
import logging
|
||||
import subprocess
|
||||
import traceback
|
||||
|
||||
from aiohttp.web import HTTPForbidden, HTTPUnauthorized, Response, json_response
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from . import __version__, app, misc
|
||||
from .http_debug import STATS
|
||||
from .processors import run_processor
|
||||
|
||||
|
||||
try:
|
||||
commit_label = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip().decode('ascii')
|
||||
|
||||
except:
|
||||
commit_label = '???'
|
||||
|
||||
|
||||
async def home(request):
|
||||
targets = '<br>'.join(app['database'].hostnames)
|
||||
text = """
|
||||
<html><head>
|
||||
<title>ActivityPub Relay at {host}</title>
|
||||
<style>
|
||||
p {{ color: #FFFFFF; font-family: monospace, arial; font-size: 100%; }}
|
||||
body {{ background-color: #000000; }}
|
||||
a {{ color: #26F; }}
|
||||
a:visited {{ color: #46C; }}
|
||||
a:hover {{ color: #8AF; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<p>This is an Activity Relay for fediverse instances.</p>
|
||||
<p>{note}</p>
|
||||
<p>You may subscribe to this relay with the address: <a href="https://{host}/actor">https://{host}/actor</a></p>
|
||||
<p>To host your own relay, you may download the code at this address: <a href="https://git.pleroma.social/pleroma/relay">https://git.pleroma.social/pleroma/relay</a></p>
|
||||
<br><p>List of {count} registered instances:<br>{targets}</p>
|
||||
</body></html>""".format(host=request.host, note=app['config'].note, targets=targets, count=len(app['database'].inboxes))
|
||||
|
||||
return Response(
|
||||
status = 200,
|
||||
content_type = 'text/html',
|
||||
charset = 'utf-8',
|
||||
text = text
|
||||
)
|
||||
|
||||
|
||||
async def actor(request):
|
||||
database = app['database']
|
||||
|
||||
data = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"endpoints": {
|
||||
"sharedInbox": f"https://{request.host}/inbox"
|
||||
},
|
||||
"followers": f"https://{request.host}/followers",
|
||||
"following": f"https://{request.host}/following",
|
||||
"inbox": f"https://{request.host}/inbox",
|
||||
"name": "ActivityRelay",
|
||||
"type": "Application",
|
||||
"id": f"https://{request.host}/actor",
|
||||
"publicKey": {
|
||||
"id": f"https://{request.host}/actor#main-key",
|
||||
"owner": f"https://{request.host}/actor",
|
||||
"publicKeyPem": database.pubkey
|
||||
},
|
||||
"summary": "ActivityRelay bot",
|
||||
"preferredUsername": "relay",
|
||||
"url": f"https://{request.host}/actor"
|
||||
}
|
||||
|
||||
return json_response(data, content_type='application/activity+json')
|
||||
|
||||
|
||||
async def inbox(request):
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
|
||||
## reject if missing signature header
|
||||
if 'signature' not in request.headers:
|
||||
logging.verbose('Actor missing signature header')
|
||||
raise HTTPUnauthorized(body='missing signature')
|
||||
|
||||
## read message and get actor id and domain
|
||||
try:
|
||||
data = await request.json()
|
||||
actor_id = data['actor']
|
||||
actor_domain = urlparse(actor_id).hostname
|
||||
|
||||
except KeyError:
|
||||
logging.verbose('actor not in data')
|
||||
raise HTTPUnauthorized(body='no actor in message')
|
||||
|
||||
## reject if there is no actor in the message
|
||||
except:
|
||||
traceback.print_exc()
|
||||
logging.verbose('Failed to parse inbox message')
|
||||
raise HTTPUnauthorized(body='failed to parse message')
|
||||
|
||||
actor = await misc.request(actor_id)
|
||||
|
||||
## reject if actor is empty
|
||||
if not actor:
|
||||
logging.verbose(f'Failed to fetch actor: {actor_id}')
|
||||
raise HTTPUnauthorized('failed to fetch actor')
|
||||
|
||||
## reject if the actor isn't whitelisted while the whiltelist is enabled
|
||||
elif config.whitelist_enabled and not config.is_whitelisted(actor_id):
|
||||
logging.verbose(f'Rejected actor for not being in the whitelist: {actor_id}')
|
||||
raise HTTPForbidden(body='access denied')
|
||||
|
||||
## reject if actor is banned
|
||||
if app['config'].is_banned(actor_id):
|
||||
logging.verbose(f'Ignored request from banned actor: {actor_id}')
|
||||
raise HTTPForbidden(body='access denied')
|
||||
|
||||
## reject if software used by actor is banned
|
||||
if len(config.blocked_software):
|
||||
software = await fetch_nodeinfo(actor_domain)
|
||||
|
||||
if config.is_banned_software(software):
|
||||
logging.verbose(f'Rejected actor for using specific software: {software}')
|
||||
raise HTTPForbidden(body='access denied')
|
||||
|
||||
## reject if the signature is invalid
|
||||
if not (await misc.validate_signature(actor_id, request)):
|
||||
logging.verbose(f'signature validation failed for: {actor_id}')
|
||||
raise HTTPUnauthorized(body='signature check failed, signature did not match key')
|
||||
|
||||
## reject if activity type isn't 'Follow' and the actor isn't following
|
||||
if data['type'] != 'Follow' and not database.get_inbox(actor_domain):
|
||||
logging.verbose(f'Rejected actor for trying to post while not following: {actor_id}')
|
||||
raise HTTPUnauthorized(body='access denied')
|
||||
|
||||
logging.debug(f">> payload {data}")
|
||||
|
||||
await run_processor(request, data, actor)
|
||||
return Response(body=b'{}', content_type='application/activity+json')
|
||||
|
||||
|
||||
async def webfinger(request):
|
||||
config = app['config']
|
||||
subject = request.query['resource']
|
||||
|
||||
if subject != f'acct:relay@{request.host}':
|
||||
return json_response({'error': 'user not found'}, status=404)
|
||||
|
||||
data = {
|
||||
'subject': subject,
|
||||
'aliases': [config.actor],
|
||||
'links': [
|
||||
{'href': config.actor, 'rel': 'self', 'type': 'application/activity+json'},
|
||||
{'href': config.actor, 'rel': 'self', 'type': 'application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"'}
|
||||
]
|
||||
}
|
||||
|
||||
return json_response(data)
|
||||
|
||||
|
||||
async def nodeinfo_2_0(request):
|
||||
data = {
|
||||
# XXX - is this valid for a relay?
|
||||
'openRegistrations': True,
|
||||
'protocols': ['activitypub'],
|
||||
'services': {
|
||||
'inbound': [],
|
||||
'outbound': []
|
||||
},
|
||||
'software': {
|
||||
'name': 'activityrelay',
|
||||
'version': f'{__version__} {commit_label}'
|
||||
},
|
||||
'usage': {
|
||||
'localPosts': 0,
|
||||
'users': {
|
||||
'total': 1
|
||||
}
|
||||
},
|
||||
'metadata': {
|
||||
'peers': app['database'].hostnames
|
||||
},
|
||||
'version': '2.0'
|
||||
}
|
||||
|
||||
return json_response(data)
|
||||
|
||||
|
||||
async def nodeinfo_wellknown(request):
|
||||
data = {
|
||||
'links': [
|
||||
{
|
||||
'rel': 'http://nodeinfo.diaspora.software/ns/schema/2.0',
|
||||
'href': f'https://{request.host}/nodeinfo/2.0.json'
|
||||
}
|
||||
]
|
||||
}
|
||||
return json_response(data)
|
||||
|
||||
|
||||
async def stats(request):
|
||||
return json_response(STATS)
|
|
@ -1,24 +0,0 @@
|
|||
import aiohttp.web
|
||||
from . import app
|
||||
|
||||
|
||||
async def webfinger(request):
|
||||
subject = request.query['resource']
|
||||
|
||||
if subject != 'acct:relay@{}'.format(request.host):
|
||||
return aiohttp.web.json_response({'error': 'user not found'}, status=404)
|
||||
|
||||
actor_uri = "https://{}/actor".format(request.host)
|
||||
data = {
|
||||
"aliases": [actor_uri],
|
||||
"links": [
|
||||
{"href": actor_uri, "rel": "self", "type": "application/activity+json"},
|
||||
{"href": actor_uri, "rel": "self", "type": "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
|
||||
],
|
||||
"subject": subject
|
||||
}
|
||||
|
||||
return aiohttp.web.json_response(data)
|
||||
|
||||
|
||||
app.router.add_get('/.well-known/webfinger', webfinger)
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
|
@ -0,0 +1 @@
|
|||
.
|
24
setup.cfg
24
setup.cfg
|
@ -1,5 +1,6 @@
|
|||
[metadata]
|
||||
name = relay
|
||||
version = 0.2.0
|
||||
description = Generic LitePub relay (works with all LitePub consumers and Mastodon)
|
||||
long_description = file: README.md
|
||||
long_description_content_type = text/markdown; charset=UTF-8
|
||||
|
@ -22,17 +23,14 @@ project_urls =
|
|||
zip_safe = False
|
||||
packages = find:
|
||||
install_requires =
|
||||
aiohttp>=3.5.4
|
||||
async-timeout>=3.0.0
|
||||
attrs>=18.1.0
|
||||
chardet>=3.0.4
|
||||
idna>=2.7
|
||||
idna-ssl>=1.1.0; python_version < "3.7"
|
||||
multidict>=4.3.1
|
||||
pycryptodome>=3.9.4
|
||||
PyYAML>=5.1
|
||||
simplejson>=3.16.0
|
||||
yarl>=1.2.6
|
||||
cachetools
|
||||
async_lru
|
||||
aiohttp >= 3.8.0
|
||||
cachetools >= 5.0.0
|
||||
click >= 8.1.2
|
||||
pycryptodome >= 3.14.1
|
||||
PyYAML >= 5.0.0
|
||||
python_requires = >=3.6
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
activityrelay = relay.manage:main
|
||||
|
||||
|
|
Loading…
Reference in a new issue