Merge branch 'masto_actor_fetch' into 'master'
Sign headers when fetching actor See merge request pleroma/relay!28
This commit is contained in:
commit
e4de3081c0
|
@ -88,13 +88,11 @@ async def fetch_actor_key(actor):
|
||||||
if not actor_data:
|
if not actor_data:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if 'publicKey' not in actor_data:
|
try:
|
||||||
return None
|
return RSA.importKey(actor_data['publicKey']['publicKeyPem'])
|
||||||
|
|
||||||
if 'publicKeyPem' not in actor_data['publicKey']:
|
except Exception as e:
|
||||||
return None
|
logging.debug(f'Exception occured while fetching actor key: {e}')
|
||||||
|
|
||||||
return RSA.importKey(actor_data['publicKey']['publicKeyPem'])
|
|
||||||
|
|
||||||
|
|
||||||
async def validate(actor, request):
|
async def validate(actor, request):
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
import logging
|
import logging
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from . import CONFIG
|
|
||||||
from .http_debug import http_debug
|
|
||||||
|
|
||||||
from cachetools import TTLCache
|
from cachetools import TTLCache
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from . import CONFIG
|
||||||
|
from .http_debug import http_debug
|
||||||
|
|
||||||
|
|
||||||
CACHE_SIZE = CONFIG.get('cache-size', 16384)
|
CACHE_SIZE = CONFIG.get('cache-size', 16384)
|
||||||
|
@ -12,22 +15,42 @@ CACHE_TTL = CONFIG.get('cache-ttl', 3600)
|
||||||
ACTORS = TTLCache(CACHE_SIZE, CACHE_TTL)
|
ACTORS = TTLCache(CACHE_SIZE, CACHE_TTL)
|
||||||
|
|
||||||
|
|
||||||
async def fetch_actor(uri, headers={}, force=False):
|
async def fetch_actor(uri, headers={}, force=False, sign_headers=True):
|
||||||
if uri in ACTORS and not force:
|
if uri in ACTORS and not force:
|
||||||
return ACTORS[uri]
|
return ACTORS[uri]
|
||||||
|
|
||||||
new_headers = {'Accept': 'application/activity+json'}
|
from .actor import PRIVKEY
|
||||||
|
from .http_signatures import sign_headers
|
||||||
|
|
||||||
for k,v in headers.items():
|
url = urlsplit(uri)
|
||||||
new_headers[k.capitalize()] = v
|
key_id = 'https://{}/actor#main-key'.format(CONFIG['ap']['host'])
|
||||||
|
|
||||||
|
headers.update({
|
||||||
|
'Accept': 'application/activity+json',
|
||||||
|
'User-Agent': 'ActivityRelay'
|
||||||
|
})
|
||||||
|
|
||||||
|
if sign_headers:
|
||||||
|
headers.update({
|
||||||
|
'(request-target)': 'get {}'.format(url.path),
|
||||||
|
'Date': datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'),
|
||||||
|
'Host': url.netloc
|
||||||
|
})
|
||||||
|
|
||||||
|
headers['signature'] = sign_headers(headers, PRIVKEY, key_id)
|
||||||
|
headers.pop('(request-target)')
|
||||||
|
headers.pop('Host')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
async with aiohttp.ClientSession(trace_configs=[http_debug()]) as session:
|
async with aiohttp.ClientSession(trace_configs=[http_debug()]) as session:
|
||||||
async with session.get(uri, headers=new_headers) as resp:
|
async with session.get(uri, headers=headers) as resp:
|
||||||
|
|
||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
ACTORS[uri] = (await resp.json(encoding='utf-8', content_type=None))
|
ACTORS[uri] = (await resp.json(encoding='utf-8', content_type=None))
|
||||||
return ACTORS[uri]
|
return ACTORS[uri]
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.info('Caught %r while fetching actor %r.', e, uri)
|
logging.info('Caught %r while fetching actor %r.', e, uri)
|
||||||
return None
|
return None
|
||||||
|
|
Loading…
Reference in a new issue