mirror of
https://git.pleroma.social/pleroma/relay.git
synced 2024-11-22 06:27:59 +00:00
Merge branch 'dev' into 'master'
v0.2.2 See merge request pleroma/relay!37
This commit is contained in:
commit
729477820f
|
@ -1,4 +1,4 @@
|
||||||
__version__ = '0.2.1'
|
__version__ = '0.2.2'
|
||||||
|
|
||||||
from aiohttp.web import Application
|
from aiohttp.web import Application
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
## Add the verbose logging level
|
## Add the verbose logging level
|
||||||
def verbose(message, *args, **kwargs):
|
def verbose(message, *args, **kwargs):
|
||||||
|
@ -14,9 +16,15 @@ setattr(logging, 'VERBOSE', 15)
|
||||||
logging.addLevelName(15, 'VERBOSE')
|
logging.addLevelName(15, 'VERBOSE')
|
||||||
|
|
||||||
|
|
||||||
## Get log level from environment if possible
|
## Get log level and file from environment if possible
|
||||||
env_log_level = os.environ.get('LOG_LEVEL', 'INFO').upper()
|
env_log_level = os.environ.get('LOG_LEVEL', 'INFO').upper()
|
||||||
|
|
||||||
|
try:
|
||||||
|
env_log_file = Path(os.environ.get('LOG_FILE')).expanduser().resolve()
|
||||||
|
|
||||||
|
except TypeError:
|
||||||
|
env_log_file = None
|
||||||
|
|
||||||
|
|
||||||
## Make sure the level from the environment is valid
|
## Make sure the level from the environment is valid
|
||||||
try:
|
try:
|
||||||
|
@ -27,8 +35,13 @@ except AttributeError:
|
||||||
|
|
||||||
|
|
||||||
## Set logging config
|
## Set logging config
|
||||||
|
handlers = [logging.StreamHandler()]
|
||||||
|
|
||||||
|
if env_log_file:
|
||||||
|
handlers.append(logging.FileHandler(env_log_file))
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level = log_level,
|
level = log_level,
|
||||||
format = "[%(asctime)s] %(levelname)s: %(message)s",
|
format = "[%(asctime)s] %(levelname)s: %(message)s",
|
||||||
handlers = [logging.StreamHandler()]
|
handlers = handlers
|
||||||
)
|
)
|
||||||
|
|
|
@ -58,7 +58,7 @@ def create_signature_header(headers):
|
||||||
|
|
||||||
|
|
||||||
def distill_object_id(activity):
|
def distill_object_id(activity):
|
||||||
logging.debug('>> determining object ID for', activity['object'])
|
logging.debug(f'>> determining object ID for {activity["object"]}')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return activity['object']['id']
|
return activity['object']['id']
|
||||||
|
@ -136,7 +136,7 @@ async def fetch_actor_key(actor):
|
||||||
async def fetch_nodeinfo(domain):
|
async def fetch_nodeinfo(domain):
|
||||||
nodeinfo_url = None
|
nodeinfo_url = None
|
||||||
|
|
||||||
wk_nodeinfo = await request(f'https://{domain}/.well-known/nodeinfo', sign_headers=False)
|
wk_nodeinfo = await request(f'https://{domain}/.well-known/nodeinfo', sign_headers=False, activity=False)
|
||||||
|
|
||||||
if not wk_nodeinfo:
|
if not wk_nodeinfo:
|
||||||
return
|
return
|
||||||
|
@ -149,7 +149,7 @@ async def fetch_nodeinfo(domain):
|
||||||
if not nodeinfo_url:
|
if not nodeinfo_url:
|
||||||
return
|
return
|
||||||
|
|
||||||
nodeinfo_data = await request(nodeinfo_url, sign_headers=False)
|
nodeinfo_data = await request(nodeinfo_url, sign_headers=False, activity=False)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return nodeinfo_data['software']['name']
|
return nodeinfo_data['software']['name']
|
||||||
|
@ -211,7 +211,7 @@ async def unfollow_remote_actor(actor_uri):
|
||||||
await request(inbox, message)
|
await request(inbox, message)
|
||||||
|
|
||||||
|
|
||||||
async def request(uri, data=None, force=False, sign_headers=True):
|
async def request(uri, data=None, force=False, sign_headers=True, activity=True):
|
||||||
## If a get request and not force, try to use the cache first
|
## If a get request and not force, try to use the cache first
|
||||||
if not data and not force:
|
if not data and not force:
|
||||||
try:
|
try:
|
||||||
|
@ -223,14 +223,15 @@ async def request(uri, data=None, force=False, sign_headers=True):
|
||||||
url = urlparse(uri)
|
url = urlparse(uri)
|
||||||
method = 'POST' if data else 'GET'
|
method = 'POST' if data else 'GET'
|
||||||
headers = {'User-Agent': 'ActivityRelay'}
|
headers = {'User-Agent': 'ActivityRelay'}
|
||||||
|
mimetype = 'application/activity+json' if activity else 'application/json'
|
||||||
|
|
||||||
## Set the content type for a POST
|
## Set the content type for a POST
|
||||||
if data and 'Content-Type' not in headers:
|
if data and 'Content-Type' not in headers:
|
||||||
headers['Content-Type'] = 'application/activity+json'
|
headers['Content-Type'] = mimetype
|
||||||
|
|
||||||
## Set the accepted content type for a GET
|
## Set the accepted content type for a GET
|
||||||
elif not data and 'Accept' not in headers:
|
elif not data and 'Accept' not in headers:
|
||||||
headers['Accept'] = 'application/activity+json'
|
headers['Accept'] = mimetype
|
||||||
|
|
||||||
if sign_headers:
|
if sign_headers:
|
||||||
signing_headers = {
|
signing_headers = {
|
||||||
|
|
|
@ -118,5 +118,8 @@ processors = {
|
||||||
|
|
||||||
|
|
||||||
async def run_processor(request, data, actor):
|
async def run_processor(request, data, actor):
|
||||||
|
if data['type'] not in processors:
|
||||||
|
return
|
||||||
|
|
||||||
logging.verbose(f'New activity from actor: {actor["id"]} {data["type"]}')
|
logging.verbose(f'New activity from actor: {actor["id"]} {data["type"]}')
|
||||||
return await processors[data['type']](actor, data, request)
|
return await processors[data['type']](actor, data, request)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[metadata]
|
[metadata]
|
||||||
name = relay
|
name = relay
|
||||||
version = 0.2.1
|
version = 0.2.2
|
||||||
description = Generic LitePub relay (works with all LitePub consumers and Mastodon)
|
description = Generic LitePub relay (works with all LitePub consumers and Mastodon)
|
||||||
long_description = file: README.md
|
long_description = file: README.md
|
||||||
long_description_content_type = text/markdown; charset=UTF-8
|
long_description_content_type = text/markdown; charset=UTF-8
|
||||||
|
|
Loading…
Reference in a new issue