basic auth
This commit is contained in:
parent
7b5090abf8
commit
4838761dc7
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,3 +1,7 @@
|
|||
hecc-data
|
||||
hecc.sh
|
||||
build*
|
||||
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
|
20
README.md
20
README.md
|
@ -1,3 +1,19 @@
|
|||
# hecc
|
||||
# Hide Every Collection, Comrad! (HECC)
|
||||
|
||||
Web proxy to require authentication for the public profile pages on Mastodon
|
||||
Web proxy for Mastodon that puts public profiles behind an auth layer.
|
||||
|
||||
## How it works
|
||||
|
||||
HECC sits between Mastodon and your front-facing web proxy to intercept incoming requests. If a profile, a toot, or any related json is requested, it will be blocked unless authenticated.
|
||||
|
||||
Note: Still very much a WIP. Currently it's just simple http auth, but I plan on adding the ability to auth via a local Mastodon account or oauth
|
||||
|
||||
## Installation
|
||||
|
||||
Python 3.6.0+ (3.8.0 recommended)
|
||||
|
||||
`python3 setup.py install`
|
||||
|
||||
## Configuration
|
||||
|
||||
(eventually)
|
||||
|
|
1
hecc/__init__.py
Normal file
1
hecc/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
'''heck'''
|
29
hecc/__main__.py
Normal file
29
hecc/__main__.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
import stat
|
||||
|
||||
from os import environ as env
|
||||
from .routes import main
|
||||
|
||||
if 'install' in sys.argv:
|
||||
from .config import mastodir, logging
|
||||
|
||||
script = f'{mastodir}/hecc.sh'
|
||||
start_script = f'''#!/bin/sh
|
||||
export MASTODIR={mastodir}
|
||||
|
||||
(cd MASTODIR && python -m hecc)'''
|
||||
|
||||
with open(script, 'w') as sh:
|
||||
sh.write(start_script)
|
||||
|
||||
if os.path.isfile(script):
|
||||
os.chmod(script, 492)
|
||||
logging.info(f'Startup script saved as {script}')
|
||||
|
||||
else:
|
||||
logging.info(f'Failed to write script as {script}')
|
||||
|
||||
else:
|
||||
main()
|
79
hecc/config.py
Normal file
79
hecc/config.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
import logging as logger
|
||||
|
||||
from os import environ as env
|
||||
from os.path import isdir, isfile, abspath, dirname, basename
|
||||
from envbash import load_envbash
|
||||
|
||||
from .functions import bool_check
|
||||
|
||||
VERSION = '0.1'
|
||||
mastodir = env.get('MASTODIR', os.getcwd())
|
||||
stor_path = abspath(f'{mastodir}/hecc-data')
|
||||
|
||||
|
||||
if not isdir(stor_path):
|
||||
os.makedirs(stor_path, exist_ok=True)
|
||||
|
||||
|
||||
if not bool_check(env.get('LOGDATE', 'yes').lower()):
|
||||
log_date = ''
|
||||
|
||||
else:
|
||||
log_date = '[%(asctime)s] '
|
||||
|
||||
|
||||
logging = logger.getLogger()
|
||||
logging.setLevel(logger.DEBUG)
|
||||
log_format = f'{log_date} %(levelname)s: %(message)s'
|
||||
|
||||
logger.addLevelName(5, 'VERBOSE')
|
||||
logger.addLevelName(30, 'WARN')
|
||||
logger.addLevelName(50, 'CRIT')
|
||||
|
||||
console = logger.StreamHandler()
|
||||
console.name = 'Console Log'
|
||||
console.level = logger.INFO
|
||||
console.formatter = logger.Formatter(log_format)
|
||||
|
||||
logging.addHandler(console)
|
||||
|
||||
|
||||
full_path = abspath(sys.executable) if getattr(sys, 'frozen', False) else abspath(__file__)
|
||||
script_path = getattr(sys, '_MEIPASS', dirname(abspath(__file__)))
|
||||
script_name = basename(full_path)
|
||||
|
||||
|
||||
if not isfile(f'{mastodir}/.env.production'):
|
||||
logging.error(f'Mastodon environment file doesn\'t exist: {mastodir}/.env.production')
|
||||
|
||||
else:
|
||||
load_envbash(f'{mastodir}/.env.production')
|
||||
|
||||
if not isfile(f'{stor_path}/production.env'):
|
||||
logging.error(f'HECC environment file doesn\'t exist: {stor_path}/production.env')
|
||||
|
||||
else:
|
||||
load_envbash(f'{stor_path}/production.env')
|
||||
|
||||
MASTOCONFIG={
|
||||
'domain': env.get('WEB_DOMAIN', env.get('LOCAL_DOMAIN', 'localhost:3000')),
|
||||
'dbhost': env.get('DB_HOST', '/var/run/postgresql'),
|
||||
'dbport': env.get('DB_PORT', 5432),
|
||||
'dbname': env.get('DB_NAME', 'mastodon_production'),
|
||||
'dbuser': env.get('DB_USER', env.get('USER')),
|
||||
'dbpass': env.get('DB_PASS')
|
||||
}
|
||||
|
||||
HECCCONFIG = {
|
||||
'host': env.get('HECC_HOST', '127.0.0.1'),
|
||||
'port': env.get('HECC_PORT', 3001),
|
||||
'dbhost': env.get('HECC_DBHOST', MASTOCONFIG['dbhost']),
|
||||
'dbport': env.get('HECC_DBPORT', MASTOCONFIG['dbport']),
|
||||
'dbname': env.get('HECC_DBNAME', 'hecc'),
|
||||
'dbuser': env.get('HECC_DBUSER', MASTOCONFIG['dbuser']),
|
||||
'dbpass': env.get('HECC_DBPASS', MASTOCONFIG['dbpass'])
|
||||
}
|
||||
|
0
hecc/database.py
Normal file
0
hecc/database.py
Normal file
109
hecc/functions.py
Normal file
109
hecc/functions.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
import aiohttp
|
||||
import re
|
||||
import json
|
||||
|
||||
from colour import Color
|
||||
|
||||
|
||||
error_codes = {
|
||||
400: 'BadRequest',
|
||||
404: 'NotFound',
|
||||
401: 'Unauthorized',
|
||||
403: 'Forbidden',
|
||||
404: 'NotFound',
|
||||
500: 'InternalServerError'
|
||||
}
|
||||
|
||||
|
||||
def bool_check(value):
|
||||
if value.lower() in ['yes', 'true', 'enable', True]:
|
||||
return True
|
||||
|
||||
elif value.lower() in ['no', 'false', 'disable', '', None, False]:
|
||||
return False
|
||||
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
def json_error(code, error):
|
||||
error_body = json.dumps({'error': error})
|
||||
cont_type = 'application/json'
|
||||
|
||||
if code == 418:
|
||||
raise HTTPTeapot(body=error_body, content_type=cont_type)
|
||||
|
||||
elif code not in error_codes.keys():
|
||||
logging.error(f'Hey! You specified a wrong error code: {code} {error}')
|
||||
|
||||
error_body = json.dumps({'error': 'DevError'})
|
||||
|
||||
raise aiohttp.web.HTTPInternalServerError(body=error_body, content_type=cont_type)
|
||||
|
||||
raise eval('aiohttp.web.HTTP'+error_codes[code]+'(body=error_body, content_type=cont_type)')
|
||||
|
||||
|
||||
def user_check(path):
|
||||
parsed = path.replace('.json', '').split('/')
|
||||
|
||||
if (parsed[1] == 'users' and len(parsed) < 4) or (parsed[1].startswith('@') and len(parsed) < 3):
|
||||
try:
|
||||
int(parsed[-1])
|
||||
return False
|
||||
|
||||
except ValueError:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
class color:
|
||||
def __init__(self):
|
||||
self.check = lambda color: Color(f'#{str(color)}' if re.search(r'^(?:[0-9a-fA-F]{3}){1,2}$', color) else color)
|
||||
|
||||
def multi(self, multiplier):
|
||||
if multiplier >= 1:
|
||||
return 1
|
||||
|
||||
elif multiplier <= 0:
|
||||
return 0
|
||||
|
||||
return multiplier
|
||||
|
||||
def lighten(self, color, multiplier):
|
||||
col = self.check(color)
|
||||
col.luminance += ((1 - col.luminance) * self.multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
def darken(self, color, multiplier):
|
||||
col = self.check(color)
|
||||
col.luminance -= (col.luminance * self.multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
|
||||
def saturate(self, color, multiplier):
|
||||
col = self.check(color)
|
||||
col.saturation += ((1 - col.saturation) * self.multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
|
||||
def desaturate(self, color, multiplier):
|
||||
col = self.check(color)
|
||||
col.saturation -= (col.saturation * self.multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
|
||||
def rgba(self, color, transparency):
|
||||
col = self.check(color)
|
||||
|
||||
red = col.red*255
|
||||
green = col.green*255
|
||||
blue = col.blue*255
|
||||
trans = self.multi(transparency)
|
||||
|
||||
return f'rgba({red:0.2f}, {green:0.2f}, {blue:0.2f}, {trans:0.2f})'
|
168
hecc/middleware.py
Normal file
168
hecc/middleware.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
import asyncio
|
||||
import aiohttp
|
||||
import json
|
||||
import logging
|
||||
import binascii
|
||||
import base64
|
||||
|
||||
from urllib.parse import urlparse
|
||||
from aiohttp.http_exceptions import *
|
||||
|
||||
from .signature import validate, pass_hash
|
||||
from .functions import json_error, user_check
|
||||
from .config import MASTOCONFIG, script_path
|
||||
|
||||
|
||||
# I'm a little teapot :3
|
||||
class HTTPTeapot(aiohttp.web.HTTPError):
|
||||
status_code = 418
|
||||
|
||||
|
||||
blocked_agents = [
|
||||
'gabsocial',
|
||||
'kiwifarms',
|
||||
'fedichive',
|
||||
'liveview',
|
||||
'freespeech',
|
||||
'shitposter.club',
|
||||
'baraag',
|
||||
'gameliberty',
|
||||
'neckbeard'
|
||||
]
|
||||
|
||||
auth_paths = [
|
||||
'/@',
|
||||
'/users'
|
||||
]
|
||||
|
||||
|
||||
async def raise_auth_error(request, auth_realm):
|
||||
raise aiohttp.web.HTTPUnauthorized(
|
||||
headers={aiohttp.hdrs.WWW_AUTHENTICATE: f'Basic realm={auth_realm}'},
|
||||
body=open(f'{script_path}/templates/unauthorized.html').read(),
|
||||
content_type='text/html'
|
||||
)
|
||||
|
||||
|
||||
async def passthrough(path, headers, post=None, query=None):
|
||||
reqtype = 'POST' if post else 'GET'
|
||||
url = urlparse(path).path
|
||||
querydata = query if query else ''
|
||||
|
||||
async with aiohttp.request(reqtype, f'https://{MASTOCONFIG["domain"]}/{path}{query}', headers=headers, data=post) as resp:
|
||||
if resp.status not in [200, 202]:
|
||||
logging.warning(f'Recieved error {resp.status} from Mastodon')
|
||||
#json_error(500, f'Failed to forward request. Recieved error {resp.status} from Mastodon')
|
||||
|
||||
data = await resp.read()
|
||||
|
||||
raise aiohttp.web.HTTPOk(body=data, content_type=resp.content_type)
|
||||
|
||||
|
||||
async def http_redirect(app, handler):
|
||||
async def redirect_handler(request):
|
||||
headers = {'Host': MASTOCONFIG["domain"]}
|
||||
json_req = request.headers.get('Accept') == 'application/json'
|
||||
querydata = request.query
|
||||
|
||||
rawquery = '?'
|
||||
|
||||
if len(querydata) > 0:
|
||||
for var in querydata:
|
||||
if rawquery == '?':
|
||||
rawquery += f'{var}={querydata[var]}'
|
||||
|
||||
else:
|
||||
rawquery += f'&{var}={querydata[var]}'
|
||||
|
||||
query = rawquery if rawquery != '' else None
|
||||
|
||||
if json_req:
|
||||
headers.update({'Accept': 'application/json'})
|
||||
|
||||
try:
|
||||
data = await request.json()
|
||||
|
||||
except Exception as e:
|
||||
#logging.warning(f'failed to grab data: {e}')
|
||||
data = None
|
||||
|
||||
await passthrough(request.path, headers, post=data, query=query)
|
||||
|
||||
return (await handler(request))
|
||||
return redirect_handler
|
||||
|
||||
|
||||
async def http_signatures(app, handler):
|
||||
async def http_signatures_handler(request):
|
||||
request['validated'] = False
|
||||
json_req = request.headers.get('Accept') == 'application/json'
|
||||
|
||||
if any(map(request.path.startswith, auth_paths)) and not user_check(request.path):
|
||||
if json_req or request.path.endswith('.json'):
|
||||
if 'signature' in request.headers:
|
||||
data = await request.json()
|
||||
print(json.dumps(data, indent=' '))
|
||||
|
||||
if 'actor' not in data:
|
||||
raise json_error(401, 'signature check failed, no actor in message')
|
||||
|
||||
actor = data["actor"]
|
||||
if not (await validate(actor, request)):
|
||||
logging.info(f'Signature validation failed for: {actor}')
|
||||
raise json_error(401, 'signature check failed, signature did not match key')
|
||||
|
||||
else:
|
||||
raise json_error(401, 'Missing signature')
|
||||
|
||||
else:
|
||||
auth_username = 'admin'
|
||||
auth_password = 'heck'
|
||||
auth_realm = 'Nope'
|
||||
|
||||
auth_header = request.headers.get(aiohttp.hdrs.AUTHORIZATION)
|
||||
|
||||
if auth_header == None or not auth_header.startswith('Basic '):
|
||||
return await raise_auth_error(request, auth_realm)
|
||||
|
||||
try:
|
||||
secret = auth_header[6:].encode('utf-8')
|
||||
auth_decoded = base64.decodebytes(secret).decode('utf-8')
|
||||
|
||||
except (UnicodeDecodeError, UnicodeEncodeError, binascii.Error):
|
||||
await raise_auth_error(request)
|
||||
|
||||
credentials = auth_decoded.split(':')
|
||||
|
||||
if len(credentials) != 2:
|
||||
await raise_auth_error(request, auth_realm)
|
||||
|
||||
username, password = credentials
|
||||
|
||||
if username != auth_username or password != auth_password:
|
||||
await raise_auth_error(request, auth_realm)
|
||||
|
||||
return (await handler(request))
|
||||
return http_signatures_handler
|
||||
|
||||
|
||||
async def http_filter(app, handler):
|
||||
async def http_filter_handler(request):
|
||||
if [agent for agent in blocked_agents if agent in request.headers.get('User-Agent', '').lower()]:
|
||||
raise HTTPTeapot(body='418 This teapot kills fascists', content_type='text/plain')
|
||||
|
||||
return (await handler(request))
|
||||
return http_filter_handler
|
||||
|
||||
|
||||
# Fucking trailing slashes
|
||||
async def http_trailing_slash(app, handler):
|
||||
async def http_trailing_slash_handler(request):
|
||||
if request.path != '/' and request.path.endswith('/'):
|
||||
return aiohttp.web.HTTPFound(request.path[:-1])
|
||||
|
||||
return (await handler(request))
|
||||
return http_trailing_slash_handler
|
||||
|
||||
|
||||
__all__ = ['http_signatures_middleware', 'http_auth_middleware', 'http_filter_middleware', 'http_trailing_slash']
|
113
hecc/routes.py
Normal file
113
hecc/routes.py
Normal file
|
@ -0,0 +1,113 @@
|
|||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import aiohttp_jinja2
|
||||
import jinja2_markdown
|
||||
|
||||
from jinja2 import select_autoescape, FileSystemLoader
|
||||
from ipaddress import ip_address as address
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .config import HECCCONFIG, VERSION, script_path, logging
|
||||
from .functions import color
|
||||
from . import middleware
|
||||
|
||||
|
||||
def webserver():
|
||||
from . import views
|
||||
|
||||
web = aiohttp.web.Application(middlewares=[
|
||||
middleware.http_filter,
|
||||
middleware.http_signatures,
|
||||
middleware.http_redirect
|
||||
])
|
||||
|
||||
#app = aiohttp.web.Application(middlewares=[
|
||||
# http_filter_middleware,
|
||||
# http_trailing_slash,
|
||||
# http_signatures_middleware,
|
||||
# http_auth_middleware
|
||||
#])
|
||||
|
||||
async def global_vars(request):
|
||||
return {
|
||||
'VERSION': VERSION,
|
||||
'len': len,
|
||||
'urlparse': urlparse,
|
||||
'lighten': color().lighten,
|
||||
'darken': color().darken,
|
||||
'saturate': color().saturate,
|
||||
'desaturate': color().desaturate,
|
||||
'rgba': color().rgba
|
||||
}
|
||||
|
||||
|
||||
aiohttp_jinja2.setup(web,
|
||||
loader=FileSystemLoader(script_path),
|
||||
autoescape=select_autoescape(['html', 'css']),
|
||||
extensions=[jinja2_markdown.MarkdownExtension],
|
||||
context_processors=[global_vars],
|
||||
lstrip_blocks=True,
|
||||
trim_blocks=True
|
||||
)
|
||||
|
||||
web.add_routes([
|
||||
aiohttp.web.route('*', '/', views.heck),
|
||||
aiohttp.web.route('*', '/@{user}', views.heck),
|
||||
aiohttp.web.route('*', '/@{user}/{post}', views.heck),
|
||||
aiohttp.web.route('*', '/users/{user}', views.heck),
|
||||
aiohttp.web.route('*', '/users/{user}/{post}', views.heck)
|
||||
])
|
||||
|
||||
return web
|
||||
|
||||
|
||||
async def start_webserver():
|
||||
app = webserver()
|
||||
runner = aiohttp.web.AppRunner(app, access_log_format='%{X-Real-Ip}i "%r" %s %b "%{User-Agent}i"')
|
||||
await runner.setup()
|
||||
|
||||
listen = HECCCONFIG['host']
|
||||
port = HECCCONFIG['port']
|
||||
|
||||
if listen.startswith('unix:'):
|
||||
if sys.platform != 'win32':
|
||||
sock_listen = listen.replace('unix:', '')
|
||||
|
||||
logging.info(f'Starting webserver at socket: {sock_listen}')
|
||||
site = aiohttp.web.UnixSite(runner, sock_listen)
|
||||
|
||||
else:
|
||||
logging.error('Windows cannot use unix sockets. Use an IP address instead. Exiting...')
|
||||
sys.exit()
|
||||
|
||||
else:
|
||||
try:
|
||||
address(listen)
|
||||
|
||||
except ValueError:
|
||||
logging.warning('Invalid IP address. Listening on "0.0.0.0" instead.')
|
||||
listen = '127.0.0.1'
|
||||
|
||||
try:
|
||||
int(port)
|
||||
|
||||
except ValueError:
|
||||
logging.warning('Invalid port. Using 3621 instead.')
|
||||
port = 3621
|
||||
|
||||
logging.info(f'Starting webserver at address: {listen}:{port}')
|
||||
site = aiohttp.web.TCPSite(runner, listen, port)
|
||||
|
||||
await site.start()
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
asyncio.ensure_future(start_webserver())
|
||||
loop.run_forever()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logging.info('Bye!')
|
147
hecc/signature.py
Normal file
147
hecc/signature.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
import aiohttp
|
||||
import aiohttp.web
|
||||
import binascii
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
|
||||
from aiohttp.http_exceptions import *
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Hash import SHA, SHA256, SHA512
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
|
||||
from .config import MASTOCONFIG
|
||||
|
||||
|
||||
def pass_hash():
|
||||
password_hash = SHA512.new(NET['api_pass'].encode('UTF-8'))
|
||||
return password_hash.hexdigest()
|
||||
|
||||
|
||||
HASHES = {
|
||||
'sha1': SHA,
|
||||
'sha256': SHA256,
|
||||
'sha512': SHA512
|
||||
}
|
||||
|
||||
|
||||
def split_signature(sig):
|
||||
default = {"headers": "date"}
|
||||
|
||||
sig = sig.strip().split(',')
|
||||
|
||||
for chunk in sig:
|
||||
k, _, v = chunk.partition('=')
|
||||
v = v.strip('\"')
|
||||
default[k] = v
|
||||
|
||||
default['headers'] = default['headers'].split()
|
||||
return default
|
||||
|
||||
|
||||
def build_signing_string(headers, used_headers):
|
||||
return '\n'.join(map(lambda x: ': '.join([x.lower(), headers[x]]), used_headers))
|
||||
|
||||
|
||||
def sign_signing_string(sigstring, key):
|
||||
if sigstring not in cache.sigstrings.items:
|
||||
pkcs = PKCS1_v1_5.new(key)
|
||||
h = SHA256.new()
|
||||
h.update(sigstring.encode('ascii'))
|
||||
sigdata = pkcs.sign(h)
|
||||
|
||||
sigdata = base64.b64encode(sigdata)
|
||||
cache.sigstrings.store(sigstring, sigdata.decode('ascii'))
|
||||
|
||||
return cache.sigstrings.fetch(sigstring)
|
||||
|
||||
|
||||
def sign_headers(headers, key, key_id):
|
||||
headers = {x.lower(): y for x, y in headers.items()}
|
||||
used_headers = headers.keys()
|
||||
sig = {
|
||||
'keyId': key_id,
|
||||
'algorithm': 'rsa-sha256',
|
||||
'headers': ' '.join(used_headers)
|
||||
}
|
||||
sigstring = build_signing_string(headers, used_headers)
|
||||
sig['signature'] = sign_signing_string(sigstring, key)
|
||||
|
||||
chunks = ['{}="{}"'.format(k, v) for k, v in sig.items()]
|
||||
return ','.join(chunks)
|
||||
|
||||
|
||||
async def fetch_actor(uri, force=False):
|
||||
domain = MASTOCONFIG["domain"]
|
||||
|
||||
try:
|
||||
headers = {
|
||||
'(request-target)': uri,
|
||||
'Accept': 'application/activity+json',
|
||||
'User-Agent': f'MAW/{VERSION}; https://{domain}'
|
||||
}
|
||||
headers['signature'] = sign_headers(headers, PRIVKEY, f'https://{domain}/actor#main-key')
|
||||
headers.pop('(request-target)')
|
||||
|
||||
async with aiohttp.ClientSession(trace_configs=[http_debug()]) as session:
|
||||
async with session.get(uri, headers=headers) as resp:
|
||||
if resp.status != 200:
|
||||
return
|
||||
|
||||
data = await resp.json(encoding='utf-8')
|
||||
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
logging.info('Caught %r while fetching actor %r.', e, uri)
|
||||
return None
|
||||
|
||||
|
||||
async def fetch_actor_key(actor):
|
||||
if actor not in cache.keys.items:
|
||||
actor_data = await fetch_actor(actor)
|
||||
|
||||
if not actor_data:
|
||||
return None
|
||||
|
||||
if 'publicKey' not in actor_data:
|
||||
return None
|
||||
|
||||
if 'publicKeyPem' not in actor_data['publicKey']:
|
||||
return None
|
||||
|
||||
cache.keys.store(actor, actor_data['publicKey']['publicKeyPem'])
|
||||
|
||||
return RSA.importKey(cache.keys.fetch(actor))
|
||||
|
||||
|
||||
async def validate(actor, request):
|
||||
pubkey = await fetch_actor_key(actor)
|
||||
if not pubkey:
|
||||
return False
|
||||
|
||||
logging.debug(f'actor key: {pubkey}')
|
||||
|
||||
headers = request.headers.copy()
|
||||
headers['(request-target)'] = ' '.join([request.method.lower(), request.path])
|
||||
|
||||
sig = split_signature(headers['signature'])
|
||||
logging.debug(f'sigdata: {sig}')
|
||||
|
||||
sigstring = build_signing_string(headers, sig['headers'])
|
||||
logging.debug(f'sigstring: {sigstring}')
|
||||
|
||||
sign_alg, _, hash_alg = sig['algorithm'].partition('-')
|
||||
logging.debug(f'sign alg: {sign_alg}, hash alg: {hash_alg}')
|
||||
|
||||
sigdata = base64.b64decode(sig['signature'])
|
||||
|
||||
pkcs = PKCS1_v1_5.new(pubkey)
|
||||
h = HASHES[hash_alg].new()
|
||||
h.update(sigstring.encode('ascii'))
|
||||
result = pkcs.verify(h, sigdata)
|
||||
|
||||
request['validated'] = result
|
||||
|
||||
logging.debug('validates? {result}')
|
||||
return result
|
24
hecc/templates/unauthorized.html
Normal file
24
hecc/templates/unauthorized.html
Normal file
|
@ -0,0 +1,24 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Nope.mov</title>
|
||||
<style>
|
||||
body {
|
||||
background-color: #111;
|
||||
color: #ddd;
|
||||
}
|
||||
|
||||
.center {
|
||||
width: 300px;
|
||||
height: 300px;
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
margin-left: -150px;
|
||||
margin-top: -150px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<img src="https://static.barkshark.xyz/main/img/YouDidntSayTheMagicWord.gif" class="center" />
|
||||
</body>
|
||||
</html>
|
9
hecc/views.py
Normal file
9
hecc/views.py
Normal file
|
@ -0,0 +1,9 @@
|
|||
import aiohttp
|
||||
|
||||
async def heck(request):
|
||||
data = {
|
||||
'path': request.match_info.get('path'),
|
||||
'msg': 'UvU'
|
||||
}
|
||||
|
||||
return aiohttp.web.json_response(data)
|
5
reload.cfg
Normal file
5
reload.cfg
Normal file
|
@ -0,0 +1,5 @@
|
|||
exec = python3 -m hecc
|
||||
watch_ext = py, env
|
||||
ignore_dirs = build, data
|
||||
ignore_files = reload.py, test.py
|
||||
log_level = INFO
|
Loading…
Reference in a new issue