move app to __init__.py
This commit is contained in:
parent
a5dd6f1abd
commit
d322d41f07
6 changed files with 19 additions and 36 deletions
|
@ -1,14 +1,8 @@
|
|||
__version__ = '0.2.0'
|
||||
|
||||
from aiohttp.web import Application
|
||||
|
||||
from . import logger
|
||||
|
||||
|
||||
APP = None
|
||||
|
||||
def get_app():
|
||||
return APP
|
||||
|
||||
def set_app(app):
|
||||
global APP
|
||||
|
||||
APP = app
|
||||
app = Application()
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
from aiohttp.web import Application
|
||||
|
||||
from . import set_app
|
||||
|
||||
|
||||
app = Application()
|
||||
|
||||
set_app(app)
|
|
@ -8,8 +8,7 @@ import platform
|
|||
from aiohttp.web import AppRunner, TCPSite
|
||||
from cachetools import LRUCache
|
||||
|
||||
from . import misc, views
|
||||
from .application import app
|
||||
from . import app, misc, views
|
||||
from .config import DotDict, RelayConfig
|
||||
from .database import RelayDatabase
|
||||
from .misc import follow_remote_actor, unfollow_remote_actor
|
||||
|
|
|
@ -13,7 +13,7 @@ from json.decoder import JSONDecodeError
|
|||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from . import get_app
|
||||
from . import app
|
||||
from .http_debug import http_debug
|
||||
|
||||
|
||||
|
@ -34,10 +34,10 @@ def create_signature_header(headers):
|
|||
sigstring = build_signing_string(headers, used_headers)
|
||||
|
||||
sig = {
|
||||
'keyId': get_app()['config'].keyid,
|
||||
'keyId': app['config'].keyid,
|
||||
'algorithm': 'rsa-sha256',
|
||||
'headers': ' '.join(used_headers),
|
||||
'signature': sign_signing_string(sigstring, get_app()['database'].PRIVKEY)
|
||||
'signature': sign_signing_string(sigstring, app['database'].PRIVKEY)
|
||||
}
|
||||
|
||||
chunks = ['{}="{}"'.format(k, v) for k, v in sig.items()]
|
||||
|
@ -55,7 +55,7 @@ def distill_object_id(activity):
|
|||
|
||||
|
||||
def distill_inboxes(actor, object_id):
|
||||
database = get_app()['database']
|
||||
database = app['database']
|
||||
origin_hostname = urlparse(object_id).hostname
|
||||
actor_inbox = get_actor_inbox(actor)
|
||||
targets = []
|
||||
|
@ -68,14 +68,14 @@ def distill_inboxes(actor, object_id):
|
|||
|
||||
|
||||
def generate_body_digest(body):
|
||||
bodyhash = get_app()['cache'].digests.get(body)
|
||||
bodyhash = app['cache'].digests.get(body)
|
||||
|
||||
if bodyhash:
|
||||
return bodyhash
|
||||
|
||||
h = SHA256.new(body.encode('utf-8'))
|
||||
bodyhash = base64.b64encode(h.digest()).decode('utf-8')
|
||||
get_app()['cache'].digests[body] = bodyhash
|
||||
app['cache'].digests[body] = bodyhash
|
||||
|
||||
return bodyhash
|
||||
|
||||
|
@ -146,8 +146,8 @@ async def fetch_nodeinfo(domain):
|
|||
|
||||
|
||||
async def follow_remote_actor(actor_uri):
|
||||
config = get_app()['config']
|
||||
database = get_app()['database']
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
|
||||
actor = await request(actor_uri)
|
||||
inbox = get_actor_inbox(actor)
|
||||
|
@ -171,8 +171,8 @@ async def follow_remote_actor(actor_uri):
|
|||
|
||||
|
||||
async def unfollow_remote_actor(actor_uri):
|
||||
config = get_app()['config']
|
||||
database = get_app()['database']
|
||||
config = app['config']
|
||||
database = app['database']
|
||||
|
||||
actor = await request(actor_uri)
|
||||
|
||||
|
@ -204,7 +204,7 @@ async def request(uri, data=None, force=False, sign_headers=True):
|
|||
## If a get request and not force, try to use the cache first
|
||||
if not data and not force:
|
||||
try:
|
||||
return get_app()['cache'].json[uri]
|
||||
return app['cache'].json[uri]
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
@ -247,7 +247,7 @@ async def request(uri, data=None, force=False, sign_headers=True):
|
|||
|
||||
try:
|
||||
# json_serializer=DotDict maybe?
|
||||
async with ClientSession(trace_configs=http_debug()) as session, get_app()['semaphore']:
|
||||
async with ClientSession(trace_configs=http_debug()) as session, app['semaphore']:
|
||||
async with session.request(method, uri, headers=headers, data=data) as resp:
|
||||
## aiohttp has been known to leak if the response hasn't been read,
|
||||
## so we're just gonna read the request no matter what
|
||||
|
@ -264,7 +264,7 @@ async def request(uri, data=None, force=False, sign_headers=True):
|
|||
|
||||
logging.debug(f'{uri} >> resp {resp_payload}')
|
||||
|
||||
get_app()['cache'].json[uri] = resp_payload
|
||||
app['cache'].json[uri] = resp_payload
|
||||
return resp_payload
|
||||
|
||||
except JSONDecodeError:
|
||||
|
|
|
@ -3,8 +3,7 @@ import logging
|
|||
|
||||
from uuid import uuid4
|
||||
|
||||
from . import misc
|
||||
from .application import app
|
||||
from . import app, misc
|
||||
from .misc import distill_inboxes, distill_object_id, request
|
||||
|
||||
|
||||
|
|
|
@ -5,8 +5,7 @@ import traceback
|
|||
from aiohttp.web import HTTPForbidden, HTTPUnauthorized, Response, json_response
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from . import __version__, misc
|
||||
from .application import app
|
||||
from . import __version__, app, misc
|
||||
from .http_debug import STATS
|
||||
from .processors import run_processor
|
||||
|
||||
|
|
Loading…
Reference in a new issue