Merge pull request 'rework' (#3) from rework into main

Reviewed-on: #3
This commit is contained in:
Izalia Mae 2021-09-17 15:30:08 -04:00
commit 864849feb8
90 changed files with 5229 additions and 2037 deletions

2
.gitignore vendored
View file

@ -114,3 +114,5 @@ dmypy.json
# Pyre type checker
.pyre/
test*.py
reload.cfg

View file

@ -1,12 +0,0 @@
'''
IzzyLib by Zoey Mae
Licensed under the CNPL: https://git.pixie.town/thufie/CNPL
https://git.barkshark.xyz/izaliamae/izzylib
'''
import sys
assert sys.version_info >= (3, 6)
__version_tpl__ = (0, 5, 0)
__version__ = '.'.join([str(v) for v in __version_tpl__])

View file

@ -1,56 +0,0 @@
'''functions to alter colors in hex format'''
import re
from colour import Color
check = lambda color: Color(f'#{str(color)}' if re.search(r'^(?:[0-9a-fA-F]{3}){1,2}$', color) else color)
def _multi(multiplier):
if multiplier >= 1:
return 1
elif multiplier <= 0:
return 0
return multiplier
def lighten(color, multiplier):
col = check(color)
col.luminance += ((1 - col.luminance) * _multi(multiplier))
return col.hex_l
def darken(color, multiplier):
col = check(color)
col.luminance -= (col.luminance * _multi(multiplier))
return col.hex_l
def saturate(color, multiplier):
col = check(color)
col.saturation += ((1 - col.saturation) * _multi(multiplier))
return col.hex_l
def desaturate(color, multiplier):
col = check(color)
col.saturation -= (col.saturation * _multi(multiplier))
return col.hex_l
def rgba(color, transparency):
col = check(color)
red = col.red*255
green = col.green*255
blue = col.blue*255
trans = _multi(transparency)
return f'rgba({red:0.2f}, {green:0.2f}, {blue:0.2f}, {trans:0.2f})'
__all__ = ['lighten', 'darken', 'saturate', 'desaturate', 'rgba']

View file

@ -1,386 +0,0 @@
import sys
from contextlib import contextmanager
from datetime import datetime
from sqlalchemy import create_engine, ForeignKey, MetaData, Table
from sqlalchemy import Column as SqlColumn, types as Types
#from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.exc import OperationalError, ProgrammingError
from sqlalchemy.orm import sessionmaker
from . import logging
from .cache import LRUCache
from .misc import DotDict, RandomGen, NfsCheck, PrintMethods
SqlTypes = DotDict({t.lower(): getattr(Types, t) for t in dir(Types) if not t.startswith('_')})
class DataBase():
def __init__(self, dbtype='postgresql+psycopg2', tables={}, **kwargs):
self.engine_string = self.__engine_string(dbtype, kwargs)
self.db = create_engine(self.engine_string)
self.table = Tables(self, tables)
self.table_names = tables.keys()
self.classes = kwargs.get('row_classes', CustomRows())
self.cache = None
session_class = kwargs.get('session_class', Session)
self.session = lambda trans=True: session_class(self, trans)
self.SetupCache()
def __engine_string(self, dbtype, kwargs):
if not kwargs.get('database'):
raise MissingDatabaseError('Database not set')
engine_string = dbtype + '://'
if dbtype == 'sqlite':
if NfsCheck(kwargs.get('database')):
logging.error('Database file is on an NFS share which does not support locking. Any writes to the database will fail')
engine_string += '/' + kwargs.get('database')
else:
user = kwargs.get('user')
password = kwargs.get('pass')
host = kwargs.get('host', '/var/run/postgresql')
port = kwargs.get('port', 5432)
name = kwargs.get('name', 'postgres')
maxconn = kwargs.get('maxconnections', 25)
if user:
if password:
engine_string += f'{user}:{password}@'
else:
engine_string += user + '@'
if host == '/var/run/postgresql':
engine_string += '/' + name
else:
engine_string += f'{host}:{port}/{name}'
return engine_string
def close(self):
self.SetupCache()
def SetupCache(self):
self.cache = DotDict({table: LRUCache() for table in self.table_names})
def CreateTables(self, *tables):
new_tables = [self.table[table] for table in tables]
self.table.meta.create_all(bind=self.db, tables=new_tables)
def CreateDatabase(self):
if self.engine_string.startswith('postgresql'):
predb = create_engine(db.engine_string.replace(config.db.name, 'postgres', -1))
conn = predb.connect()
conn.execute('commit')
try:
conn.execute(f'CREATE DATABASE {config.db.name}')
except ProgrammingError:
'The database already exists, so just move along'
except Exception as e:
conn.close()
raise e from None
conn.close()
self.table.meta.create_all(self.db)
def execute(self, *args, **kwargs):
with self.session() as s:
return s.execute(*args, **kwargs)
class Session(object):
def __init__(self, db, trans=True):
self.db = db
self.classes = self.db.classes
self.session = sessionmaker(bind=db.db)()
self.table = self.db.table
self.cache = self.db.cache
self.trans = trans
# session aliases
self.s = self.session
self.begin = self.s.begin
self.commit = self.s.commit
self.rollback = self.s.rollback
self.query = self.s.query
self.execute = self.s.execute
self._setup()
if not self.trans:
self.commit()
def __enter__(self):
self.sessionid = RandomGen(10)
return self
def __exit__(self, exctype, value, tb):
if tb:
self.rollback()
else:
self.commit()
def _setup(self):
pass
def count(self, table_name, **kwargs):
return self.query(self.table[table_name]).filter_by(**kwargs).count()
def fetch(self, table_name, single=True, orderby=None, orderdir='asc', **kwargs):
table = self.table[table_name]
RowClass = self.classes.get(table_name.capitalize())
query = self.query(table).filter_by(**kwargs)
if not orderby:
rows = query.all()
else:
if orderdir == 'asc':
rows = query.order_by(getattr(table.c, orderby).asc()).all()
elif orderdir == 'desc':
rows = query.order_by(getattr(table.c, orderby).asc()).all()
else:
raise ValueError(f'Unsupported order direction: {orderdir}')
if single:
return RowClass(table_name, rows[0], self) if len(rows) > 0 else None
return [RowClass(table_name, row, self) for row in rows]
def insert(self, table_name, **kwargs):
row = self.fetch(table_name, **kwargs)
if row:
row.update_session(self, **kwargs)
return
table = self.table[table_name]
if getattr(table, 'timestamp', None) and not kwargs.get('timestamp'):
kwargs['timestamp'] = datetime.now()
res = self.execute(table.insert().values(**kwargs))
#return self.fetch(table_name, **kwargs)
def update(self, table=None, rowid=None, row=None, **data):
if row:
rowid = row.id
table = row._table_name
if not rowid or not table:
raise ValueError('Missing row ID or table')
tclass = self.table[table]
self.execute(tclass.update().where(tclass.c.id == rowid).values(**data))
def remove(self, table=None, rowid=None, row=None):
if row:
rowid = row.id
table = row._table_name
if not rowid or not table:
raise ValueError('Missing row ID or table')
row = self.execute(f'DELETE FROM {table} WHERE id={rowid}')
def DropTables(self):
tables = self.GetTables()
for table in tables:
self.execute(f'DROP TABLE {table}')
def GetTables(self):
rows = self.execute("SELECT name FROM sqlite_master WHERE type IN ('table','view') and name NOT LIKE 'sqlite_%'")
return [row[0] for row in rows]
def AppendColumn(self, tbl, col):
table = self.table[tbl]
try:
column = getattr(table.c, col)
except AttributeError:
logging.error(f'Table "{tbl}" does not have column "{col}"')
return
columns = [row[1] for row in self.execute(f'PRAGMA table_info({tbl})')]
if col in columns:
logging.info(f'Column "{col}" already exists')
return
sql = f'ALTER TABLE {tbl} ADD COLUMN {col} {column.type}'
if not column.nullable:
sql += ' NOT NULL'
if column.primary_key:
sql += ' PRIMARY KEY'
if column.unique:
sql += ' UNIQUE'
self.execute(sql)
def RemoveColumn(self, tbl, col):
table = self.table[tbl]
column = getattr(table, col, None)
columns = [row[1] for row in self.execute(f'PRAGMA table_info({tbl})')]
if col not in columns:
logging.info(f'Column "{col}" already exists')
return
columns.remove(col)
coltext = ', '.join(columns)
self.execute(f'CREATE TABLE {tbl}_temp AS SELECT {coltext} FROM {tbl}')
self.execute(f'DROP TABLE {tbl}')
self.execute(f'ALTER TABLE {tbl}_temp RENAME TO {tbl}')
class CustomRows(object):
def get(self, name):
return getattr(self, name, self.Row)
class Row(DotDict):
#_filter_columns = lambda self, row: [attr for attr in dir(row) if not attr.startswith('_') and attr != 'metadata']
def __init__(self, table, row, session):
if not row:
return
super().__init__()
self._update(row._asdict())
self._db = session.db
self._table_name = table
self._columns = self.keys()
#self._columns = self._filter_columns(row)
self.__run__(session)
## Subclass Row and redefine this function
def __run__(self, s):
pass
def _filter_data(self):
data = {k: v for k,v in self.items() if k in self._columns}
for k,v in self.items():
if v.__class__ == DotDict:
data[k] = v.asDict()
return data
def asDict(self):
return self._filter_data()
def _update(self, new_data={}, **kwargs):
kwargs.update(new_data)
for k,v in kwargs.items():
if type(v) == dict:
self[k] = DotDict(v)
self[k] = v
def delete(self):
with self._db.session() as s:
return self.delete_session(s)
def delete_session(self, s):
return s.remove(row=self)
def update(self, dict_data={}, **data):
dict_data.update(data)
self._update(dict_data)
with self._db.session() as s:
s.update(row=self, **self._filter_data())
def update_session(self, s, dict_data={}, **data):
return s.update(row=self, **dict_data, **data)
class Tables(DotDict):
def __init__(self, db, tables={}):
'"tables" should be a dict with the table names for keys and a list of Columns for values'
super().__init__()
self.db = db
self.meta = MetaData()
for name, table in tables.items():
self.__setup_table(name, table)
def __setup_table(self, name, table):
self[name] = Table(name, self.meta, *table)
def Column(name, stype=None, fkey=None, **kwargs):
if not stype and not kwargs:
if name == 'id':
return Column('id', 'integer', primary_key=True, autoincrement=True)
elif name == 'timestamp':
return Column('timestamp', 'datetime')
raise ValueError('Missing column type and options')
else:
options = [name, SqlTypes.get(stype.lower(), SqlTypes['string'])]
if fkey:
options.append(ForeignKey(fkey))
return SqlColumn(*options, **kwargs)
class MissingDatabaseError(Exception):
'''raise when the "database" kwargs is not set'''

View file

@ -1,23 +0,0 @@
class MissingHeadersError(Exception):
def __init__(self, headers: list):
self.headers = ', '.join(headers)
self.message = f'Missing required headers for verificaton: {self.headers}'
super().init(self.message)
def __str__(self):
return self.message
class VerificationError(Exception):
def __init__(self, string=None):
self.message = f'Failed to verify hash'
if string:
self.message += ' for ' + string
super().__init__(self.message)
def __str__(self):
return self.message

View file

@ -1,433 +0,0 @@
import functools, json, sys
from IzzyLib import logging
from IzzyLib.misc import DefaultDict, DotDict, Path
from base64 import b64decode, b64encode
from datetime import datetime
from io import BytesIO
from ssl import SSLCertVerificationError
from urllib.error import HTTPError
from urllib.parse import urlparse
from urllib.request import Request, urlopen
from . import error, __version__
try:
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
crypto_enabled = True
except ImportError:
logging.verbose('Pycryptodome module not found. HTTP header signing and verifying is disabled')
crypto_enabled = False
try:
from sanic.request import Request as SanicRequest
except ImportError:
logging.verbose('Sanic module not found. Request verification is disabled')
SanicRequest = False
try:
from PIL import Image
except ImportError:
logging.verbose('Pillow module not found. Image downloading is disabled')
Image = False
Client = None
class HttpClient(object):
def __init__(self, headers={}, useragent=f'IzzyLib/{__version__}', appagent=None, proxy_type='https', proxy_host=None, proxy_port=None):
proxy_ports = {
'http': 80,
'https': 443
}
if proxy_type not in ['http', 'https']:
raise ValueError(f'Not a valid proxy type: {proxy_type}')
self.headers=headers
self.agent = f'{useragent} ({appagent})' if appagent else useragent
self.proxy = DotDict({
'enabled': True if proxy_host else False,
'ptype': proxy_type,
'host': proxy_host,
'port': proxy_ports[proxy_type] if not proxy_port else proxy_port
})
self.SetGlobal = SetClient
def __sign_request(self, request, privkey, keyid):
if not crypto_enabled:
logging.error('Crypto functions disabled')
return
request.add_header('(request-target)', f'{request.method.lower()} {request.path}')
request.add_header('host', request.host)
request.add_header('date', datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'))
if request.body:
body_hash = b64encode(SHA256.new(request.body).digest()).decode("UTF-8")
request.add_header('digest', f'SHA-256={body_hash}')
request.add_header('content-length', len(request.body))
sig = {
'keyId': keyid,
'algorithm': 'rsa-sha256',
'headers': ' '.join([k.lower() for k in request.headers.keys()]),
'signature': b64encode(PkcsHeaders(privkey, request.headers)).decode('UTF-8')
}
sig_items = [f'{k}="{v}"' for k,v in sig.items()]
sig_string = ','.join(sig_items)
request.add_header('signature', sig_string)
request.remove_header('(request-target)')
request.remove_header('host')
def __build_request(self, url, data=None, headers={}, method='GET'):
new_headers = self.headers.copy()
new_headers.update(headers)
parsed_headers = {k.lower(): v for k,v in new_headers.items()}
if not parsed_headers.get('user-agent'):
parsed_headers['user-agent'] = self.agent
if isinstance(data, dict):
data = json.dumps(data)
if isinstance(data, str):
data = data.encode('UTF-8')
request = Request(url, data=data, headers=parsed_headers, method=method)
if self.proxy.enabled:
request.set_proxy(f'{self.proxy.host}:{self.proxy.host}', self.proxy.ptype)
return request
def request(self, *args, **kwargs):
request = self.__build_request(*args, **kwargs)
try:
response = urlopen(request)
except HTTPError as e:
response = e.fp
except SSLCertVerificationError as e:
logging.error('HttpClient.request: Certificate error:', e)
return
return HttpResponse(response)
def file(self, url, filepath, *args, filename=None, **kwargs):
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
logging.error(f'Failed to download {url}:', resp.status, resp.body)
return False
path = Path(filepath)
if not path.exists():
logging.error('Path does not exist:', path)
return False
with path.join(filename).open('wb') as fd:
fd.write(resp.body)
return True
def image(self, url, filepath, *args, filename=None, ext='png', dimensions=(50, 50), **kwargs):
if not Image:
logging.error('Pillow module is not installed')
return
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
logging.error(f'Failed to download {url}:', resp.status, resp.body)
return False
if not filename:
filename = Path(url).stem()
path = Path(filepath)
if not path.exists():
logging.error('Path does not exist:', path)
return False
byte = BytesIO()
image = Image.open(BytesIO(resp.body))
image.thumbnail(dimensions)
image.save(byte, format=ext.upper())
with path.join(filename).open('wb') as fd:
fd.write(byte.getvalue())
def json(self, *args, headers={}, activity=True, **kwargs):
json_type = 'activity+json' if activity else 'json'
headers.update({
'accept': f'application/{json_type}'
})
return self.request(*args, headers=headers, **kwargs)
def signed_request(self, privkey, keyid, *args, **kwargs):
request = self.__build_request(*args, **kwargs)
self.__sign_request(request, privkey, keyid)
try:
response = urlopen(request)
except HTTPError as e:
response = e
return HttpResponse(response)
class HttpResponse(object):
def __init__(self, response):
self.body = response.read()
self.headers = DefaultDict({k.lower(): v.lower() for k,v in response.headers.items()})
self.status = response.status
self.url = response.url
def text(self):
return self.body.decode('UTF-8')
def json(self, fail=False):
try:
return DotDict(self.text())
except Exception as e:
if fail:
raise e from None
else:
return DotDict()
def json_pretty(self, indent=4):
return json.dumps(self.json().asDict(), indent=indent)
def VerifyRequest(request: SanicRequest, actor: dict):
'''Verify a header signature from a sanic request
request: The request with the headers to verify
actor: A dictionary containing the activitypub actor and the link to the pubkey used for verification
'''
if not SanicRequest:
logging.error('Sanic request verification disabled')
return
body = request.body if request.body else None
return VerifyHeaders(request.headers, request.method, request.path, body, actor)
def VerifyHeaders(headers: dict, method: str, path: str, actor: dict=None, body=None):
'''Verify a header signature
headers: A dictionary containing all the headers from a request
method: The HTTP method of the request
path: The path of the HTTP request
actor (optional): A dictionary containing the activitypub actor and the link to the pubkey used for verification
body (optional): The body of the request. Only needed if the signature includes the digest header
fail (optional): If set to True, raise an error instead of returning False if any step of the process fails
'''
if not crypto_enabled:
logging.error('Crypto functions disabled')
return
headers = {k.lower(): v for k,v in headers.items()}
headers['(request-target)'] = f'{method.lower()} {path}'
signature = ParseSig(headers.get('signature'))
digest = ParseBodyDigest(headers.get('digest'))
missing_headers = [k for k in headers if k in ['date', 'host'] if headers.get(k) == None]
if not signature:
logging.verbose('Missing signature')
return False
if not actor:
actor = FetchActor(signature.keyid)
## Add digest header to missing headers list if it doesn't exist
if method.lower() == 'post' and not digest:
missing_headers.append('digest')
## Fail if missing date, host or digest (if POST) headers
if missing_headers:
logging.verbose('Missing headers:', missing_headers)
return False
## Fail if body verification fails
if digest and not VerifyString(body, digest.sig, digest.alg):
logging.verbose('Failed body digest verification')
return False
pubkey = actor.publicKey['publicKeyPem']
if PkcsHeaders(pubkey, {k:v for k,v in headers.items() if k in signature.headers}, sig=signature):
return True
logging.verbose('Failed header verification')
return False
def ParseBodyDigest(digest):
if not digest:
return
parsed = DotDict()
parts = digest.split('=', 1)
if len(parts) != 2:
return
parsed.sig = parts[1]
parsed.alg = parts[0].replace('-', '')
return parsed
def VerifyString(string, enc_string, alg='SHA256', fail=False):
if not crypto_enabled:
logging.error('Crypto functions disabled')
return
if type(string) != bytes:
string = string.encode('UTF-8')
body_hash = b64encode(SHA256.new(string).digest()).decode('UTF-8')
if body_hash == enc_string:
return True
if fail:
raise error.VerificationError()
else:
return False
def PkcsHeaders(key: str, headers: dict, sig=None):
if not crypto_enabled:
logging.error('Crypto functions disabled')
return
if sig:
head_items = [f'{item}: {headers[item]}' for item in sig.headers]
else:
head_items = [f'{k.lower()}: {v}' for k,v in headers.items()]
head_string = '\n'.join(head_items)
head_bytes = head_string.encode('UTF-8')
KEY = RSA.importKey(key)
pkcs = PKCS1_v1_5.new(KEY)
h = SHA256.new(head_bytes)
if sig:
return pkcs.verify(h, b64decode(sig.signature))
else:
return pkcs.sign(h)
def ParseSig(signature: str):
if not signature:
logging.verbose('Missing signature header')
return
split_sig = signature.split(',')
sig = DefaultDict({})
for part in split_sig:
key, value = part.split('=', 1)
sig[key.lower()] = value.replace('"', '')
if not sig.headers:
logging.verbose('Missing headers section in signature')
return
sig.headers = sig.headers.split()
return sig
def FetchActor(url):
if not Client:
logging.error('IzzyLib.http: Please set global client with "SetClient(client)"')
return {}
url = url.split('#')[0]
headers = {'Accept': 'application/activity+json'}
resp = Client.request(url, headers=headers)
if not resp.json():
logging.verbose('functions.FetchActor: Failed to fetch actor:', url)
logging.debug(f'Error {resp.status}: {resp.body}')
return {}
actor = resp.json()
actor.web_domain = urlparse(url).netloc
actor.shared_inbox = actor.inbox
actor.pubkey = None
actor.handle = actor.preferredUsername
if actor.get('endpoints'):
actor.shared_inbox = actor.endpoints.get('sharedInbox', actor.inbox)
if actor.get('publicKey'):
actor.pubkey = actor.publicKey.get('publicKeyPem')
return actor
@functools.lru_cache(maxsize=512)
def FetchWebfingerAcct(handle, domain):
if not Client:
logging.error('IzzyLib.http: Please set global client with "SetClient(client)"')
return {}
data = DefaultDict()
webfinger = Client.request(f'https://{domain}/.well-known/webfinger?resource=acct:{handle}@{domain}')
if not webfinger.body:
return
data.handle, data.domain = webfinger.json().subject.replace('acct:', '').split('@')
for link in webfinger.json().links:
if link['rel'] == 'self' and link['type'] == 'application/activity+json':
data.actor = link['href']
return data
def SetClient(client=None):
global Client
Client = client or HttpClient()
def GenRsaKey():
privkey = RSA.generate(2048)
key = DotDict({'PRIVKEY': privkey, 'PUBKEY': privkey.publickey()})
key.update({'privkey': key.PRIVKEY.export_key().decode(), 'pubkey': key.PUBKEY.export_key().decode()})
return key

View file

@ -1,326 +0,0 @@
import multiprocessing, sanic, signal, traceback
import logging as pylog
from jinja2.exceptions import TemplateNotFound
from multidict import CIMultiDict
from multiprocessing import cpu_count, current_process
from urllib.parse import parse_qsl, urlparse
from . import http, logging
from .misc import DotDict, DefaultDict, LowerDotDict
from .template import Template
log_path_ignore = [
'/media',
'/static'
]
log_ext_ignore = [
'js', 'ttf', 'woff2',
'ac3', 'aiff', 'flac', 'm4a', 'mp3', 'ogg', 'wav', 'wma',
'apng', 'ico', 'jpeg', 'jpg', 'png', 'svg',
'divx', 'mov', 'mp4', 'webm', 'wmv'
]
class HttpServer(sanic.Sanic):
def __init__(self, name='sanic', host='0.0.0.0', port='4080', **kwargs):
self.host = host
self.port = int(port)
self.workers = int(kwargs.get('workers', cpu_count()))
self.sig_handler = kwargs.get('sig_handler')
self.ctx = DotDict()
super().__init__(name, request_class=kwargs.get('request_class', HttpRequest))
#for log in ['sanic.root', 'sanic.access']:
#pylog.getLogger(log).setLevel(pylog.CRITICAL)
self.template = Template(
kwargs.get('tpl_search', []),
kwargs.get('tpl_globals', {}),
kwargs.get('tpl_context'),
kwargs.get('tpl_autoescape', True)
)
self.template.addEnv('app', self)
self.error_handler.add(TemplateNotFound, NoTemplateError)
self.error_handler.add(Exception, kwargs.get('error_handler', GenericError))
self.register_middleware(MiddlewareAccessLog, attach_to='response')
signal.signal(signal.SIGHUP, self.finish)
signal.signal(signal.SIGINT, self.finish)
signal.signal(signal.SIGQUIT, self.finish)
signal.signal(signal.SIGTERM, self.finish)
def add_method_route(self, method, *routes):
for route in routes:
self.add_route(method.as_view(), route)
def add_method_routes(self, routes: list):
for route in routes:
self.add_method_route(*route)
def start(self):
options = {
'host': self.host,
'port': self.port,
'workers': self.workers,
'access_log': False,
'debug': False
}
msg = f'Starting {self.name} at {self.host}:{self.port}'
if self.workers > 1:
msg += f' with {self.workers} workers'
logging.info(msg)
self.run(**options)
def finish(self):
if self.sig_handler:
self.sig_handler()
self.stop()
logging.info('Bye! :3')
class HttpRequest(sanic.request.Request):
def __init__(self, url_bytes, headers, version, method, transport, app):
super().__init__(url_bytes, headers, version, method, transport, app)
self.Headers = Headers(headers)
self.Data = Data(self)
self.template = self.app.template
self.__setup_defaults()
self.__parse_path()
#if self.paths.media:
#return
self.__parse_signature()
self.Run()
def Run(self):
pass
def response(self, tpl, *args, **kwargs):
return self.template.response(self, tpl, *args, **kwargs)
def alldata(self):
return self.__combine_dicts(self.content.json, self.data.query, self.data.form)
def verify(self, actor=None):
self.ap.valid = http.VerifyHeaders(self.headers, self.method, self.path, actor, self.body)
return self.ap.valid
def __combine_dicts(self, *dicts):
data = DotDict()
for item in dicts:
data.update(item)
return data
def __setup_defaults(self):
self.paths = DotDict({'media': False, 'json': False, 'ap': False, 'cookie': False})
self.ap = DotDict({'valid': False, 'signature': {}, 'actor': None, 'inbox': None, 'domain': None})
def __parse_path(self):
self.paths.media = any(map(self.path.startswith, log_path_ignore)) or any(map(self.path.startswith, log_ext_ignore))
self.paths.json = self.__json_check()
def __parse_signature(self):
sig = self.headers.getone('signature', None)
if sig:
self.ap.signature = http.ParseSig(sig)
if self.ap.signature:
self.ap.actor = self.ap.signature.get('keyid', '').split('#', 1)[0]
self.ap.domain = urlparse(self.ap.actor).netloc
def __json_check(self):
if self.path.endswith('.json'):
return True
accept = self.headers.getone('Accept', None)
if accept:
mimes = [v.strip() for v in accept.split(',')]
if any(mime in ['application/json', 'application/activity+json'] for mime in mimes):
return True
return False
class Headers(LowerDotDict):
def __init__(self, headers):
super().__init__()
for k,v in headers.items():
if not self.get(k):
self[k] = []
self[k].append(v)
def getone(self, key, default=None):
value = self.get(key)
if not value:
return default
return value[0]
def getall(self, key, default=[]):
return self.get(key.lower(), default)
class Data(object):
def __init__(self, request):
self.request = request
@property
def combined(self):
return DotDict(**self.form.asDict(), **self.query.asDict(), **self.json.asDict())
@property
def query(self):
data = {k: v for k,v in parse_qsl(self.request.query_string)}
return DotDict(data)
@property
def form(self):
data = {k: v[0] for k,v in self.request.form.items()}
return DotDict(data)
@property
def files(self):
return DotDict({k:v[0] for k,v in self.request.files.items()})
### body functions
@property
def raw(self):
try:
return self.request.body
except Exception as e:
logging.verbose('IzzyLib.http_server.Data.raw: failed to get body')
logging.debug(f'{e.__class__.__name__}: {e}')
return b''
@property
def text(self):
try:
return self.raw.decode()
except Exception as e:
logging.verbose('IzzyLib.http_server.Data.text: failed to get body')
logging.debug(f'{e.__class__.__name__}: {e}')
return ''
@property
def json(self):
try:
return DotDict(self.text)
except Exception as e:
logging.verbose('IzzyLib.http_server.Data.json: failed to get body')
logging.debug(f'{e.__class__.__name__}: {e}')
data = '{}'
return {}
async def MiddlewareAccessLog(request, response):
if request.paths.media:
return
uagent = request.headers.get('user-agent')
address = request.headers.get('x-real-ip', request.forwarded.get('for', request.remote_addr))
logging.info(f'({multiprocessing.current_process().name}) {address} {request.method} {request.path} {response.status} "{uagent}"')
def GenericError(request, exception):
try:
status = exception.status_code
except:
status = 500
if status not in range(200, 499):
traceback.print_exc()
msg = f'{exception.__class__.__name__}: {str(exception)}'
if request.paths.json:
return sanic.response.json({'error': {'status': status, 'message': msg}})
try:
return request.response('server_error.haml', status=status, context={'status': str(status), 'error': msg})
except TemplateNotFound:
return sanic.response.text(f'Error {status}: {msg}')
def NoTemplateError(request, exception):
logging.error('TEMPLATE_ERROR:', f'{exception.__class__.__name__}: {str(exception)}')
return sanic.response.html('I\'m a dumbass and forgot to create a template for this page', 500)
def ReplaceHeader(headers, key, value):
for k,v in headers.items():
if k.lower() == header.lower():
del headers[k]
class Response:
Text = sanic.response.text
Html = sanic.response.html
Json = sanic.response.json
Redir = sanic.response.redirect
def Css(*args, headers={}, **kwargs):
ReplaceHeader(headers, 'content-type', 'text/css')
return sanic.response.text(*args, headers=headers, **kwargs)
def Js(*args, headers={}, **kwargs):
ReplaceHeader(headers, 'content-type', 'application/javascript')
return sanic.response.text(*args, headers=headers, **kwargs)
def Ap(*args, headers={}, **kwargs):
ReplaceHeader(headers, 'content-type', 'application/activity+json')
return sanic.response.json(*args, headers=headers, **kwargs)
def Jrd(*args, headers={}, **kwargs):
ReplaceHeader(headers, 'content-type', 'application/jrd+json')
return sanic.response.json(*args, headers=headers, **kwargs)
Resp = Response

View file

@ -1,578 +0,0 @@
'''Miscellaneous functions'''
import hashlib, random, string, sys, os, json, socket, time
from os import environ as env
from datetime import datetime
from getpass import getpass
from pathlib import Path as Pathlib
from shutil import copyfile
from . import logging
try:
from passlib.hash import argon2
except ImportError:
argon2 = None
def Boolean(v, return_value=False):
if type(v) not in [str, bool, int, type(None)]:
raise ValueError(f'Value is not a string, boolean, int, or nonetype: {value}')
'''make the value lowercase if it's a string'''
value = v.lower() if isinstance(v, str) else v
if value in [1, True, 'on', 'y', 'yes', 'true', 'enable']:
'''convert string to True'''
return True
if value in [0, False, None, 'off', 'n', 'no', 'false', 'disable', '']:
'''convert string to False'''
return False
if return_value:
'''just return the value'''
return v
return True
def RandomGen(length=20, chars=None):
if not isinstance(length, int):
raise TypeError(f'Character length must be an integer, not {type(length)}')
characters = string.ascii_letters + string.digits
if chars:
characters += chars
return ''.join(random.choices(characters, k=length))
def HashString(string, alg='blake2s'):
if alg not in hashlib.__always_supported:
logging.error('Unsupported hash algorithm:', alg)
logging.error('Supported algs:', ', '.join(hashlib.__always_supported))
return
string = string.encode('UTF-8') if type(string) != bytes else string
salt = salt.encode('UTF-8') if type(salt) != bytes else salt
newhash = hashlib.new(alg)
newhash.update(string)
return newhash.hexdigest()
def Timestamp(dtobj=None, utc=False):
dtime = dtobj if dtobj else datetime
date = dtime.utcnow() if utc else dtime.now()
return date.timestamp()
def GetVarName(*kwargs, single=True):
keys = list(kwargs.keys())
return key[0] if single else keys
def ApDate(date=None, alt=False):
if not date:
date = datetime.utcnow()
elif type(date) == int:
date = datetime.fromtimestamp(date)
elif type(date) != datetime:
raise TypeError(f'Unsupported object type for ApDate: {type(date)}')
return date.strftime('%a, %d %b %Y %H:%M:%S GMT' if alt else '%Y-%m-%dT%H:%M:%SZ')
def GetIp():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('10.255.255.255', 1))
data = s.getsockname()
ip = data[0]
except Exception:
ip = '127.0.0.1'
finally:
s.close()
return ip
def Input(prompt, default=None, valtype=str, options=[], password=False):
input_func = getpass if password else input
if default != None:
prompt += ' [-redacted-]' if password else f' [{default}]'
prompt += '\n'
if options:
opt = '/'.join(options)
prompt += f'[{opt}]'
prompt += ': '
value = input_func(prompt)
while value and len(options) > 0 and value not in options:
input_func('Invalid value:', value)
value = input(prompt)
if not value or value == '':
return default
ret = valtype(value)
while valtype == Path and not ret.parent().exists():
input_func('Parent directory doesn\'t exist')
ret = Path(input(prompt))
return ret
def NfsCheck(path):
proc = Path('/proc/mounts')
path = Path(path).resolve()
if not proc.exists():
return True
with proc.open() as fd:
for line in fd:
line = line.split()
if line[2] == 'nfs' and line[1] in path.str():
return True
return False
def PrintMethods(object, include_underscore=False):
for line in dir(object):
if line.startswith('_'):
if include_underscore:
print(line)
else:
print(line)
class DotDict(dict):
def __init__(self, value=None, **kwargs):
'''Python dictionary, but variables can be set/get via attributes
value [str, bytes, dict]: JSON or dict of values to init with
case_insensitive [bool]: Wether keys should be case sensitive or not
kwargs: key/value pairs to set on init. Overrides identical keys set by 'value'
'''
super().__init__()
data = {}
if isinstance(value, (str, bytes)):
self.fromJson(value)
elif isinstance(value, dict):
self.update(value)
elif value:
raise TypeError('The value must be a JSON string, dict, or another DotDict object, not', value.__class__)
if kwargs:
self.update(kwargs)
def __getattr__(self, key):
try:
val = super().__getattribute__(key)
except AttributeError:
val = self.get(key, KeyError)
try:
if val == KeyError:
raise KeyError(f'Invalid key: {key}')
except AttributeError:
'PyCryptodome.PublicKey.RSA.RsaKey.__eq__ does not seem to play nicely'
return DotDict(val) if type(val) == dict else val
def __delattr__(self, key):
if self.get(key):
del self[key]
super().__delattr__(key)
def __setattr__(self, key, value):
if key.startswith('_'):
super().__setattr__(key, value)
else:
super().__setitem__(key, value)
def __str__(self):
return self.toJson()
def __parse_item__(self, k, v):
if type(v) == dict:
v = DotDict(v)
if not k.startswith('_'):
return (k, v)
def get(self, key, default=None):
value = dict.get(self, key, default)
return DotDict(value) if type(value) == dict else value
def items(self):
data = []
for k, v in super().items():
new = self.__parse_item__(k, v)
if new:
data.append(new)
return data
def values(self):
return list(super().values())
def keys(self):
return list(super().keys())
def asDict(self):
return dict(self)
def toJson(self, indent=None, **kwargs):
if 'cls' not in kwargs:
kwargs['cls'] = DotDictEncoder
return json.dumps(dict(self), indent=indent, **kwargs)
def fromJson(self, string):
data = json.loads(string)
self.update(data)
def load_json(self, path):
path = Path(path)
with path.open('r') as fd:
self.update(json.load(fd))
def save_json(self, path, indent=None):
path = Path(path)
with path.open('w') as fd:
json.dump(self, fd, indent=indent)
class DefaultDict(DotDict):
def __getattr__(self, key):
try:
val = super().__getattribute__(key)
except AttributeError:
val = self.get(key, DefaultDict())
return DotDict(val) if type(val) == dict else val
class LowerDotDict(DotDict):
def __getattr__(self, key):
key = key.lower()
try:
val = super().__getattribute__(key)
except AttributeError:
val = self.get(key, KeyError)
if val == KeyError:
raise KeyError(f'Invalid key: {key}')
return DotDict(val) if type(val) == dict else val
def __setattr__(self, key, value):
key = key.lower()
if key.startswith('_'):
super().__setattr__(key, value)
else:
super().__setitem__(key, value)
def update(self, data):
data = {k.lower(): v for k,v in self.items()}
super().update(data)
class Path(object):
def __init__(self, path, exist=True, missing=True, parents=True):
self.__path = Pathlib(str(path))
if str(path).startswith('~'):
self.__path == self.__path.expanduser()
self.json = DotDict({})
self.exist = exist
self.missing = missing
self.parents = parents
self.name = self.__path.name
self.stem = self.__path.stem
def __str__(self):
return str(self.__path)
def __repr__(self):
return f'Path({str(self.__path)})'
def str(self):
return self.__str__()
def __check_dir(self, path=None):
target = self if not path else Path(path)
if not self.parents and not target.parent().exists():
raise FileNotFoundError('Parent directories do not exist:', target.str())
if not self.exist and target.exists():
raise FileExistsError('File or directory already exists:', target.str())
def size(self):
return self.__path.stat().st_size
def mtime(self):
return self.__path.stat().st_mtime
def mkdir(self, mode=0o755):
self.__path.mkdir(mode, self.parents, self.exist)
return True if self.__path.exists() else False
def new(self):
return Path(self.__path)
def parent(self, new=True):
path = Pathlib(self.__path).parent
if new:
return Path(path)
self.__path = path
return self
def copy(self, path, overwrite=False):
target = Path(path)
self.__check_dir(path)
if target.exists() and overwrite:
target.delete()
copyfile(self.str(), target.str())
def move(self, path, overwrite=False):
self.copy(path, overwrite=overwrite)
self.delete()
def join(self, path, new=True):
new_path = self.__path.joinpath(path)
if new:
return Path(new_path)
self.__path = new_path
return self
def home(self, path=None, new=True):
new_path = Pathlib.home()
if path:
new_path = new_path.joinpath(path)
if new:
return Path(new_path)
self.__path = new_path
return self
def isdir(self):
return self.__path.is_dir()
def isfile(self):
return self.__path.is_file()
def islink(self):
return self.__path.is_symlink()
def listdir(self):
return [Path(path) for path in self.__path.iterdir()]
def exists(self):
return self.__path.exists()
def mtime(self):
return os.path.getmtime(self.str())
def link(self, path):
target = Path(path)
self.__check_dir(path)
if target.exists():
target.delete()
self.__path.symlink_to(path, target.isdir())
def resolve(self, new=True):
path = self.__path.resolve()
if new:
return Path(path)
self.__path = path
return self
def touch(self, mode=0o666):
self.__path.touch(mode, self.exist)
return True if self.__path.exists() else False
def loadJson(self):
self.json = DotDict(self.read())
return self.json
def updateJson(self, data={}):
if type(data) == str:
data = json.loads(data)
self.json.update(data)
def storeJson(self, indent=None):
with self.__path.open('w') as fp:
fp.write(json.dumps(self.json.asDict(), indent=indent))
# This needs to be extended to handle dirs with files/sub-dirs
def delete(self):
if self.isdir():
self.__path.rmdir()
else:
self.__path.unlink()
return not self.exists()
def open(self, *args):
return self.__path.open(*args)
def read(self, *args):
return self.open().read(*args)
def readlines(self):
return self.open().readlines()
class DotDictEncoder(json.JSONEncoder):
def default(self, obj):
if type(obj) not in [str, int, float, dict]:
return str(obj)
return json.JSONEncoder.default(self, obj)
class PasswordHash(object):
def __init__(self, salt=None, rounds=8, bsize=50, threads=os.cpu_count(), length=64):
if type(salt) == Path:
if salt.exists():
with salt.open() as fd:
self.salt = fd.read()
else:
newsalt = RandomGen(40)
with salt.open('w') as fd:
fd.write(newsalt)
self.salt = newsalt
else:
self.salt = salt or RandomGen(40)
self.rounds = rounds
self.bsize = bsize * 1024
self.threads = threads
self.length = length
def hash(self, password):
return argon2.using(
salt = self.salt.encode('UTF-8'),
rounds = self.rounds,
memory_cost = self.bsize,
max_threads = self.threads,
digest_size = self.length
).hash(password)
def verify(self, password, passhash):
return argon2.using(
salt = self.salt.encode('UTF-8'),
rounds = self.rounds,
memory_cost = self.bsize,
max_threads = self.threads,
digest_size = self.length
).verify(password, passhash)

View file

@ -1,137 +0,0 @@
'''functions for web template management and rendering'''
import codecs, traceback, os, json, xml
from os import listdir, makedirs
from os.path import isfile, isdir, getmtime, abspath
from jinja2 import Environment, FileSystemLoader, ChoiceLoader, select_autoescape, Markup
from hamlish_jinja import HamlishExtension
from xml.dom import minidom
try:
from sanic import response as Response
except ModuleNotFoundError:
Response = None
from . import logging
from .color import *
from .misc import Path, DotDict
class Template(Environment):
def __init__(self, search=[], global_vars={}, context=None, autoescape=True):
self.autoescape = autoescape
self.search = []
self.func_context = context
for path in search:
self.__add_search_path(path)
super().__init__(
loader=ChoiceLoader([FileSystemLoader(path) for path in self.search]),
extensions=[HamlishExtension],
autoescape=self.autoescape,
lstrip_blocks=True,
trim_blocks=True
)
self.hamlish_file_extensions=('.haml',)
self.hamlish_enable_div_shortcut=True
self.hamlish_mode = 'indented'
self.globals.update({
'markup': Markup,
'cleanhtml': lambda text: ''.join(xml.etree.ElementTree.fromstring(text).itertext()),
'lighten': lighten,
'darken': darken,
'saturate': saturate,
'desaturate': desaturate,
'rgba': rgba
})
self.globals.update(global_vars)
def __add_search_path(self, path):
tpl_path = Path(path)
if not tpl_path.exists():
raise FileNotFoundError('Cannot find search path:', tpl_path.str())
if tpl_path.str() not in self.search:
self.search.append(tpl_path.str())
def setContext(self, context):
if not hasattr(context, '__call__'):
logging.error('Context is not callable')
return
if not isinstance(context({}, {}), dict):
logging.error('Context does not return a dict or dict-like object')
return
self.func_context = context
def addEnv(self, k, v):
self.globals[k] = v
def delEnv(self, var):
if not self.globals.get(var):
raise ValueError(f'"{var}" not in global variables')
del self.var[var]
def updateEnv(self, data):
if not isinstance(data, dict):
raise ValueError(f'Environment data not a dict')
self.globals.update(data)
def addFilter(self, funct, name=None):
name = funct.__name__ if not name else name
self.filters[name] = funct
def delFilter(self, name):
if not self.filters.get(name):
raise valueError(f'"{name}" not in global filters')
del self.filters[name]
def updateFilter(self, data):
if not isinstance(context, dict):
raise ValueError(f'Filter data not a dict')
self.filters.update(data)
def render(self, tplfile, context={}, headers={}, cookies={}, request=None, pprint=False, **kwargs):
if not isinstance(context, dict):
raise TypeError(f'context for {tplfile} not a dict: {type(context)} {context}')
context['request'] = request if request else {'headers': headers, 'cookies': cookies}
if self.func_context:
context.update(self.func_context(DotDict(context), DotDict(self.globals)))
result = self.get_template(tplfile).render(context)
if pprint and any(map(tplfile.endswith, ['haml', 'html', 'xml'])):
return minidom.parseString(result).toprettyxml(indent=" ")
else:
return result
def response(self, request, tpl, ctype='text/html', status=200, **kwargs):
if not Response:
raise ModuleNotFoundError('Sanic is not installed')
html = self.render(tpl, request=request, **kwargs)
return Response.HTTPResponse(body=html, status=status, content_type=ctype, headers=kwargs.get('headers', {}))

52
LICENSE
View file

@ -1,7 +1,7 @@
IzzyLib
Copyright Zoey Mae 2020
Copyright Zoey Mae 2021
COOPERATIVE NON-VIOLENT PUBLIC LICENSE v4
COOPERATIVE NON-VIOLENT PUBLIC LICENSE v6
Preamble
@ -19,7 +19,7 @@ Official Webpage: https://thufie.lain.haus/NPL.html
Terms and Conditions
THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS
COOPERATIVE NON-VIOLENT PUBLIC LICENSE v4 ("LICENSE"). THE WORK IS
COOPERATIVE NON-VIOLENT PUBLIC LICENSE v5 ("LICENSE"). THE WORK IS
PROTECTED BY COPYRIGHT AND ALL OTHER APPLICABLE LAWS. ANY USE OF THE
WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS
PROHIBITED. BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED IN THIS
@ -51,11 +51,14 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
For the avoidance of doubt, where the Work is a musical work,
performance or phonogram, the synchronization of the Work in
timed-relation with a moving image ("synching") will be
considered an Adaptation for the purpose of this License.
considered an Adaptation for the purpose of this License. In
addition, where the Work is designed to output a neural network
the output of the neural network will be considered an
Adaptation for the purpose of this license.
c. "Bodily Harm" means any physical hurt or injury to a person that
interferes with the health or comfort of the person and that is more
more than merely transient or trifling in nature.
than merely transient or trifling in nature.
d. "Collection" means a collection of literary or artistic
works, such as encyclopedias and anthologies, or performances,
@ -75,9 +78,13 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
through sale, gift or any other transfer of possession or
ownership.
f. "Incarceration" means confinement in a jail, prison, or any
other place where individuals of any kind are held against
either their will or the will of their legal guardians.
f. "Incarceration" means confinement in a jail, prison, or
any other place where individuals of any kind are held against
either their will or (if their will cannot be determined) the
will of their legal guardian or guardians. In the case of a
conflict between the will of the individual and the will of
their legal guardian or guardians, the will of the
individual will take precedence.
g. "Licensor" means the individual, individuals, entity or
entities that offer(s) the Work under the terms of this License.
@ -153,7 +160,7 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
overtly or covertly observe and record persons and or their
activities.
p. "Web Service" means the use of a piece of Software to
p. "Network Service" means the use of a piece of Software to
interpret or modify information that is subsequently and directly
served to users over the Internet.
@ -166,6 +173,11 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
for some group or advocating a form of Discrimination
(to Discriminate per definition in (q)) between humans.
s. "Coercion" means leveraging of the threat of force or use of force
to intimidate a person in order to gain compliance, or to offer
large incentives which aim to entice a person to act against their
will.
2. FAIR DEALING RIGHTS
Nothing in this License is intended to reduce, limit, or restrict any
@ -251,7 +263,7 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
form, or You provide a URI for the corresponding Source Code of
the Work, to any recipients upon request.
d. If the Work is used as or for a Web Service, You may exercise
d. If the Work is used as or for a Network Service, You may exercise
the rights granted in Section 3 only if You provide a copy of the
corresponding Source Code from which the Work was derived in digital
form, or You provide a URI for the corresponding Source Code to the
@ -259,11 +271,11 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
Service.
e. You may exercise the rights granted in Section 3 for
commercial purposes only if you satisfy any of the following:
commercial purposes only if:
i. You are a worker-owned business or worker-owned
collective; and
ii. after tax, all financial gain, surplus, profits and
ii. after tax, all financial gain, surplus, profits and
benefits produced by the business or collective are
distributed among the worker-owners unless a set amount
is to be allocated towards community projects as decided
@ -285,15 +297,19 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
i. You do not use the Work for the purpose of inflicting
Bodily Harm on human beings (subject to criminal
prosecution or otherwise) outside of providing medical aid.
prosecution or otherwise) outside of providing medical aid
or undergoing a voluntary procedure under no form of
Coercion.
ii.You do not use the Work for the purpose of Surveilling
or tracking individuals for financial gain.
iii. You do not use the Work in an Act of War.
iv. You do not use the Work for the purpose of supporting
or profiting from an Act of War.
v. You do not use the Work for the purpose of Incarceration.
vi. You do not use the Work for the purpose of extracting
oil, gas, or coal.
vi. You do not use the Work for the purpose of extracting,
processing, or refining, oil, gas, or coal. Or to in any other
way to deliberately pollute the environment as a byproduct
of manufacturing or irresponsible disposal of hazardous materials.
vii. You do not use the Work for the purpose of
expediting, coordinating, or facilitating paid work
undertaken by individuals under the age of 12 years.
@ -310,11 +326,11 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
pseudonym, if applicable) if supplied, and/or if the Original
Author and/or Licensor designate another party or parties (e.g.,
a sponsor institute, publishing entity, journal) for attribution
("Attribution Parties") in Licensor!s copyright notice, terms of
("Attribution Parties") in Licensor's copyright notice, terms of
service or by other reasonable means, the name of such party or
parties; (ii) the title of the Work if supplied; (iii) to the
extent reasonably practicable, the URI, if any, that Licensor
specifies to be associated with the Work, unless such URI does
to be associated with the Work, unless such URI does
not refer to the copyright notice or licensing information for
the Work; and, (iv) consistent with Section 3(b), in the case of
an Adaptation, a credit identifying the use of the Work in the
@ -464,7 +480,7 @@ TO BE BOUND BY THE TERMS AND CONDITIONS OF THIS LICENSE.
a copy and/or URI of the corresponding Source Code on the same
terms and conditions as the license granted to You under this License.
d. If the Work is used as a Web Service, each time You Distribute
d. If the Work is used as a Network Service, each time You Distribute
or Publicly Perform an Adaptation, or serve data derived from the
Software, the Licensor offers to any recipients of the data a copy
and/or URI of the corresponding Source Code on the same terms and

1
MANIFEST.in Normal file
View file

@ -0,0 +1 @@
recursive-include izzylib/http_server/frontend *

12
Makefile Normal file
View file

@ -0,0 +1,12 @@
install: setup-deps mklinks
uninstall: rmlinks
setup-deps:
python3 -m pip install -U pip setuptools build
build:
python3 -m build --sdist
python3 -m build --wheel
clean:
rm -rf build dist IzzyLib.egg-info

View file

@ -1,5 +1,43 @@
# IzzyLib
These are just a number of functions I keep reusing over and over again in most of my projects
These are just a number of functions I keep reusing over and over again in most of my projects. It's okay to use them if you also find them useful
Note: not in a stable state yet. Expect major changes
## Installation
### From Git
$(venv)/bin/python -m pip install -e "git+https://git.barkshark.xyz/izaliamae/izzylib.git@rework"
optional dependencies
- dbus
- hasher
- http_server
- http_urllib_client
- sql
- template
- tinydb
## Documentation
### Importing
Most useful classes and functions are imported in the module root, so you don't need to do any multi-level imports. Example
This is fine:
from izzylib import Database
and points to:
from izzylib.sql.database import Database
Or even simply do `import izzylib` and use `izzylib.Database()`.
### Usage
All classes and functions will have docstrings. Either look through the code or run `help()` on an object
# NOTE!
Not in a stable state yet. Expect major changes

64
izzylib/__init__.py Normal file
View file

@ -0,0 +1,64 @@
'''
IzzyLib by Zoey Mae
Licensed under the CNPL: https://git.pixie.town/thufie/CNPL
https://git.barkshark.xyz/izaliamae/izzylib
'''
import os, sys, traceback
assert sys.version_info >= (3, 7)
__version_tpl__ = (0, 6, 0)
__version__ = '.'.join([str(v) for v in __version_tpl__])
from . import logging
izzylog = logging.logger['IzzyLib']
izzylog.set_config('level', os.environ.get('IZZYLOG_LEVEL', 'INFO'))
from .path import Path
from .dotdict import DotDict, LowerDotDict, DefaultDotDict, MultiDotDict, JsonEncoder
from .misc import *
from .cache import CacheDecorator, LruCache, TtlCache
from .connection import Connection
from .http_client import HttpClient, HttpResponse
def log_import_error(package, *message):
izzylog.debug(*message)
path = Path(__file__).resolve.parent.join(package)
if path.exists and izzylog.get_config('level') == logging.Levels.DEBUG:
traceback.print_exc()
try:
from izzylib.sql import SqlColumn, CustomRows, SqlSession, SqlDatabase, Tables, SqliteClient, SqliteColumn, SqliteServer, SqliteSession
except ImportError:
log_import_error('sql', 'Failed to import SQL classes. Connecting to SQL databases is disabled')
try:
from izzylib.tinydb import TinyDatabase, TinyRow, TinyRows
except ImportError:
log_import_error('tinydb', 'Failed to import TinyDB classes. TinyDB database is disabled')
try:
from izzylib.template import Template, Color
except ImportError:
log_import_error('template', 'Failed to import http template classes. Jinja and HAML templates disabled')
try:
from izzylib.http_urllib_client import *
except ImportError:
log_import_error('http_urllib_client', 'Failed to import Requests http client classes. Requests http client is disabled')
try:
from izzylib.http_server import PasswordHasher, HttpServer, HttpServerRequest, HttpServerResponse
except ImportError:
log_import_error('http_server', 'Failed to import HTTP server classes. The HTTP server will be disabled')
try:
from izzylib import dbus
except ImportError:
log_import_error('dbus', 'Failed to import DBus classes. DBus access will be disabled')

View file

@ -1,11 +1,11 @@
'''Simple caches that uses ordered dicts'''
import re
import json, re
from datetime import datetime
from collections import OrderedDict
from functools import update_wrapper
from hashlib import sha1
from .misc import DotDict
from . import DotDict
def parse_ttl(ttl):
@ -42,19 +42,30 @@ def parse_ttl(ttl):
return multiplier * int(amount)
class DefaultValue(object):
pass
class BaseCache(OrderedDict):
_get = OrderedDict.get
_items = OrderedDict.items
def __init__(self, maxsize=1024, ttl=None):
self.ttl = parse_ttl(ttl)
self.maxsize = maxsize
self.set = self.store
self.deco = lambda *args: CacheDecorator(self, *args)
def __str__(self):
data = ', '.join([f'{k}="{v["data"]}"' for k,v in self.items()])
data = ', '.join([f'{k}="{v}"' for k,v in self.items()])
return f'BaseCache({data})'
def items(self):
return [[k, v.data] for k,v in self._items()]
def get(self, key):
while len(self) >= self.maxsize and self.maxsize > 0:
self.popitem(last=False)
@ -110,14 +121,50 @@ class BaseCache(OrderedDict):
self[key]['timestamp'] = timestamp + self.ttl
self.move_to_end(key)
return self[key].data
return item
class TTLCache(BaseCache):
def pop(self, key, default=DefaultValue):
try:
item = self.get(key)
del self[key]
return item
except Exception as e:
if default == DefaultValue:
raise e from None
return default
## This doesn't work for some reason
def CacheDecorator(cache):
def decorator(func):
def wrapper(cls, *args, **kwargs):
key = sha1(json.dumps(args).encode() + json.dumps(kwargs).encode()).hexdigest()
cached = cache.fetch(key)
print(cached)
if cached != None:
print('Returning cached value:', cache)
return cached
result = func(cls, *args, **kwargs)
cache.store(key, result)
return result
return wrapper
return decorator
class TtlCache(BaseCache):
def __init__(self, maxsize=1024, ttl='1h'):
super().__init__(maxsize, ttl)
class LRUCache(BaseCache):
class LruCache(BaseCache):
def __init__(self, maxsize=1024):
super().__init__(maxsize)

28
izzylib/connection.py Normal file
View file

@ -0,0 +1,28 @@
import socket
class Connection(socket.socket):
def __init__(self, address='127.0.0.1', port=8080, tcp=True):
super().__init__(socket.AF_INET, socket.SOCK_STREAM if tcp else socket.SOCK_DGRAM)
self.address = address
self.port = port
def __enter__(self):
self.connect((self.address, self.port))
return self
def __exit__(self, exctype, value, tb):
self.close()
def send(self, msg):
if isinstance(msg, str):
msg = msg.encode('utf-8')
self.sendall(msg)
def recieve(self, size=8192):
return self.recv(size)

308
izzylib/dbus/__init__.py Normal file
View file

@ -0,0 +1,308 @@
import json, traceback
from dasbus.connection import SessionMessageBus, SystemMessageBus
from dasbus.error import DBusError
from dasbus.identifier import DBusServiceIdentifier
from dasbus.loop import EventLoop
from functools import partial
from izzylib import DotDict, Path, logging
from pathlib import Path as Pathlib
from xml.etree import ElementTree
try:
from .template import Template
except ImportError:
logging.verbose('Failed to import IzzyLib.template.Template. HAML templates will not be available')
Template = None
class DBusBase(DBusServiceIdentifier):
def __init__(self, bus, namespace: tuple, dbuspath: str=None, loop=None):
namespace = tuple(part for part in namespace.split('.')) if isinstance(namespace, str) else namespace
super().__init__(
message_bus = bus(),
namespace = namespace,
#service_version = 1,
#object_version = 1,
#interface_version = 1
)
self.dbuspath = dbuspath or '/' + '/'.join('namespace')
self.loop = None
if loop:
self.loop = EventLoop() if loop == True else loop
class DBusClientBase(DBusBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.proxy = None
self.methods = DotDict()
self.signals = DotDict()
def __enter__(self):
self.connect()
return self
def __exit__(self, *args):
self.disconnect()
def connect(self):
self.proxy = self.get_proxy(self.dbuspath)
try:
self.set_methods_and_signals()
return True
## Make errors shorter and just reuse ConnectionError
except DBusError as e:
if str(e) == f'The name {self.interface_name} was not provided by any .service files':
raise ConnectionError(f'Namespace "{self.interface_name}" does not exist') from None
elif str(e) == f'No such object path \'{self.dbuspath}\'':
raise ConnectionError(f'Path "{self.dbuspath}" does not exist') from None
raise e from None
def disconnect(self):
self.message_bus.disconnect()
self.proxy = None
def cmd(self, command, *args, **kwargs):
if not self.proxy:
raise ConnectionError('Not connected')
logging.debug(f'Running dbus command: {command}, {args}, {kwargs}')
func = getattr(self.proxy, command)
return func(*args, **kwargs)
def sig_connect(self, signal, callback, *args, original_args=True, **kwargs):
if original_args:
self.signals[signal].connect(lambda *sigargs, **sigkwargs: callback(*sigargs, *args, **sigkwargs, **kwargs))
else:
self.signals[signal].connect(lambda *sigargs, **sigkargs: callback(*args, **kwargs))
def set_method(self, name):
self.methods[name] = partial(self.cmd, name)
setattr(self, name, partial(self.cmd, name))
def set_signal(self, name):
signal = getattr(self.proxy, name)
self.signals[name] = signal
setattr(self, name, partial(self.connect, name))
def set_methods_and_signals(self, namespace=None):
for element in ElementTree.fromstring(self.cmd('Introspect')):
if element.attrib['name'] == namespace or self.interface_name:
for e in element:
name = e.attrib['name']
if getattr(self, name, None):
logging.verbose('Tried to add an existing method or signal:', name)
continue
if e.tag == 'method':
self.set_method(name)
elif e.tag == 'signal':
self.set_signal(name)
class DBusServerBase(DBusBase):
__dbus_xml__ = None
def __init__(self, bus, xmlfile, *args, **kwargs):
super().__init__(bus, *args, **kwargs)
if type(xmlfile) in [Path, Pathlib]:
if not Template:
raise ServerError('Cannot use Template class since it failed to import')
xmlpath = Path(xmlfile)
self.filename = xmlpath.name
self.template = Template(autoescape=False, search=[xmlpath.parent().str()])
else:
self.filename = None
self.template = xmlfile
def setup(self):
if self.filename:
self.__dbus_xml__ = self.template.render(self.filename)
else:
self.__dbus_xml__ = self.template
def register(self):
self.message_bus.register_service('.'.join(self.namespace))
def publish(self):
self.message_bus.publish_object(self.dbuspath, self)
def run(self, publish=True):
self.setup()
self.register()
if publish:
self.publish()
if self.loop:
self.loop.run()
class DBusJsonClientBase(DBusClientBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def cmd(self, method, *args, **kwargs):
req_data = json.dumps({'method': method, 'args': args, 'kwargs': kwargs})
resp = self.proxy.HandleMethod(req_data)
data = DotDict(resp)
error = data.get('error')
message = data.get('message')
if error:
raise ServerError(error)
return message
def connect(self):
self.proxy = self.get_proxy(self.dbuspath)
try:
self.Introspect()
except DBusError as e:
if 'was not provided by any .service files' in str(e):
self.proxy = None
return
traceback.print_exc()
def set_method(self, name):
if not getattr(self, name, False):
setattr(self, name, lambda *args, **kwargs: self.cmd(name, *args, **kwargs))
else:
logging.warning('Tried to add an existing method:', name)
def Introspect(self):
self.cmd('Introspect')
class DBusJsonServerBase(DBusServerBase):
xml = '''
<node>
<interface name="{n}">
<method name="HandleMethod">
<arg direction="in" name="data" type="s" />
<arg direction="out" name="return" type="s" />
</method>
</interface>
</node>
'''
def __init__(self, bus, namespace, *args, **kwargs):
super().__init__(bus, self.xml.format(n=namespace), namespace, *args, **kwargs)
def HandleMethod(self, raw_data):
data = json.loads(raw_data)
method = data.get('method')
args = data.get('args')
kwargs = data.get('kwargs')
if not method:
return self.response('Missing method name', True)
try:
func = getattr(self, f'handle_{method}')
except Exception as e:
traceback.print_exc()
return self.response(f'{e.__class__.__name__}: {e}')
if not func:
return self.response('OK')
state, message = func(*args, **kwargs)
return json.dumps({state: message})
def handle_Introspect(self):
return ('message', self.__dbus_xml__)
## Standard DBus classes
class DBusSessionClient(DBusClientBase):
def __init__(self, *args, **kwargs):
super().__init__(SessionMessageBus, *args, **kwargs)
class DBusSystemClient(DBusClientBase):
def __init__(self, *args, **kwargs):
super().__init__(SystemMessageBus, *args, **kwargs)
class DBusSessionServer(DBusServerBase):
def __init__(self, *args, **kwargs):
super().__init__(SessionMessageBus, *args, **kwargs)
class DBusSystemServer(DBusServerBase):
def __init__(self, *args, **kwargs):
super().__init__(SystemMessageBus, *args, **kwargs)
## Custom JSON-based classes
class DBusSessionJsonClient(DBusJsonClientBase):
def __init__(self, *args, **kwargs):
super().__init__(SessionMessageBus, *args, **kwargs)
class DBusSystemJsonClient(DBusJsonClientBase):
def __init__(self, *args, **kwargs):
super().__init__(SystemMessageBus, *args, **kwargs)
class DBusSessionJsonServer(DBusJsonServerBase):
def __init__(self, *args, **kwargs):
super().__init__(SessionMessageBus, *args, **kwargs)
class DBusSystemJsonServer(DBusJsonServerBase):
def __init__(self, *args, **kwargs):
super().__init__(SystemMessageBus, *args, **kwargs)
class ClientError(Exception):
pass
class ServerError(Exception):
pass

85
izzylib/dbus/types.py Normal file
View file

@ -0,0 +1,85 @@
import typing
class DBusType(typing.NewType):
def __init__(self, name, type, string):
super().__init__(name, type)
self.name = name
self.type = type
self.string = string
def __str__(self):
return self.string
class Dict(DBusType):
def __init__(key=Str, value=Str):
super().__init__('Dict', dict, None)
self.key = key
self.value = value
# I'm pretty sure there's an easier way to do f-strings without parsing curly brackets, but I'm not sure how atm
def __str__(self):
return '{' + f'{self.key}{self.value}' + '}'
class List(DBusType):
def __init__(*types):
super().__init__('List', list, None)
self.types = types
def __str__(self):
types = ''.join(self.types)
return f'a{types}'
def Set(List):
def __init__(*types):
super().__init__('Set', set, None)
self.types = types
def Tuple(List):
def __init__(*types):
super().__init__('Tuple', Tuple, None)
self.types = types
Str = DBusType('String', str, 's')
Byte = DBusType('Byte', bytes, 'y')
Bool = DBusType('Boolean', bool, 'b')
Float = DBusType('Float', float, 'd')
Int = DbusType('Int64', int, 'x')
Int16 = DBusType('Int16', int, 'n')
Int32 = DbusType('Int32', int, 'i')
Int64 = DbusType('Int64', int, 'x')
Uint16 = DBusType('Uint16', int, 'q')
Uint32 = DBusType('Uint32', int, 'u')
Uint64 = DBusType('Uint64', int, 't')
__all__ = [
'Any',
'Bytes',
'Dict',
'Float',
'Int',
'Int16',
'Int32',
'Int64',
'List',
'Set',
'Str',
'Tuple',
'Uint16',
'Uint32',
'Uint64'
]

191
izzylib/dotdict.py Normal file
View file

@ -0,0 +1,191 @@
import json
from . import Path
class DotDict(dict):
non_dict_vars = []
def __init__(self, value=None, **kwargs):
'''Python dictionary, but variables can be set/get via attributes
value [str, bytes, dict]: JSON or dict of values to init with
kwargs: key/value pairs to set on init. Overrides identical keys set by 'value'
'''
super().__init__()
if isinstance(value, (str, bytes)):
self.from_json(value)
elif isinstance(value, dict):
self.update(value)
elif value:
raise TypeError(f'The value must be a JSON string, list, dict, or another DotDict object, not {value.__class__}')
if kwargs:
self.update(kwargs)
def __getattr__(self, k):
try:
return super().__getitem__(k)
except KeyError:
raise AttributeError(f'{self.__class__.__name__} object has no attribute {k}') from None
def __setattr__(self, k, v):
if k in self.non_dict_vars or k.startswith('_'):
super().__setattr__(k, v)
else:
self.__setitem__(k, v)
def __setitem__(self, k, v):
if type(v) == dict:
v = DotDict(v)
super().__setitem__(k, v)
def __delattr__(self, k):
try:
dict.__delitem__(self, k)
except KeyError:
raise AttributeError(f'{self.__class__.__name__} object has no attribute {k}') from None
@classmethod
def new_from_json_file(cls, path):
data = cls()
data.load_json(path)
return data
def copy(self):
return DotDict(self)
def update(self, data):
for k,v in data.items():
self.__setitem__(k, v)
def to_json(self, indent=None, **kwargs):
if 'cls' not in kwargs:
kwargs['cls'] = JsonEncoder
return json.dumps(dict(self), indent=indent, **kwargs)
def from_json(self, string):
data = json.loads(string)
self.update(data)
def load_json(self, path):
with open(path) as fd:
self.update(json.load(fd))
def save_json(self, path, indent=None):
with open(path, 'w') as fp:
fp.write(self.to_json(indent=indent))
## This has to be reworked tbh
class DefaultDotDict(DotDict):
def __getattr__(self, key):
try:
val = super().__getattribute__(key)
except AttributeError:
val = self.get(key, DefaultDotDict())
return DotDict(val) if type(val) == dict else val
class LowerDotDict(DotDict):
def __getitem__(self, key):
return super().__getitem__(key.lower())
def __setitem__(self, key, value):
return super().__setitem__(key.lower(), value)
class MultiDotDict(DotDict):
def __getattr__(self, key):
return self.__getitem__(key)
def __setitem__(self, key, value):
try:
self.__getitem__(key.lower(), False).append(value)
except KeyError as e:
super().__setitem__(key.lower(), [value])
def __getitem__(self, key, single=True):
values = super().__getitem__(key.lower())
if single:
try:
return values[0]
except IndexError:
return None
return values
def update(self, data):
for k,v in data.items():
self[k] = v
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def getall(self, key):
try:
return super().__getitem__(key)
except KeyError as e:
if not default:
raise e from None
return default
def set(self, key, value):
if self.get(key):
del self[key]
self[key] = value
def delone(self, key, value):
self.__getitem__(key, False).remove(value)
def delete(self, key):
self.pop(key)
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if not any(map(isinstance, [obj], [str, int, float, dict])):
return str(obj)
return json.JSONEncoder.default(self, obj)

2
izzylib/exceptions.py Normal file
View file

@ -0,0 +1,2 @@
class HttpFileDownloadedError(Exception):
'raise when a download failed for any reason'

View file

@ -0,0 +1 @@
from .hasher import PasswordHasher

78
izzylib/hasher/hasher.py Normal file
View file

@ -0,0 +1,78 @@
import argon2, os
from izzylib import time_function_pprint
class PasswordHasher:
'''
Argon2 password hasher and validator
Attributes:
config (dict): The settings used for the hasher
Methods:
get_config(key): Get the value of a config options
set_config(key, value): Set a config option
hash(password): hash a password and return the digest as a hex string
verify(hash, password): verify a password and the password hash match
iteration_test(string, passes, iterations): Time the hashing functionality
'''
aliases = {
'iterations': 'time_cost',
'memory': 'memory_cost',
'threads': 'parallelism'
}
def __init__(self, iterations=16, memory=100, threads=os.cpu_count(), type=argon2.Type.ID):
if not argon2:
raise ValueError('password hashing disabled')
self.config = {
'time_cost': iterations,
'memory_cost': memory * 1024,
'parallelism': threads,
'encoding': 'utf-8',
'type': type,
}
self.hasher = argon2.PasswordHasher(**self.config)
def get_config(self, key):
key = self.aliases.get(key, key)
self[key]
return self.get(key) / 1024 if key == 'memory_cost' else self.get(key)
def set_config(self, key, value):
key = self.aliases.get(key, key)
self.config[key] = value * 1024 if key == 'memory_cost' else value
self.hasher = argon2.PasswordHasher(**self.config)
def hash(self, password: str):
return self.hasher.hash(password)
def verify(self, passhash: str, password: str):
try:
return self.hasher.verify(passhash, password)
except argon2.exceptions.VerifyMismatchError:
return False
def iteration_test(self, string='hecking heck', passes=3, iterations=[8,16,24,32,40,48,56,64]):
original_iter = self.get_config('iterations')
for iteration in iterations:
self.set_config('iterations', iteration)
print('\nTesting hash iterations:', iteration)
time_function_pprint(self.verify, self.hash(string), string, passes=passes)
self.set_config('iterations', original_iter)

163
izzylib/http_client.py Normal file
View file

@ -0,0 +1,163 @@
import functools, json, sys
from base64 import b64decode, b64encode
from datetime import datetime
from functools import cached_property
from io import BytesIO
from ssl import SSLCertVerificationError
from urllib.error import HTTPError
from urllib.parse import urlparse
from urllib.request import Request, urlopen
from . import DefaultDotDict, DotDict, Path, exceptions, izzylog, __version__
try:
from PIL import Image
except ImportError:
izzylog.verbose('Pillow module not found. Image downloading is disabled')
Image = False
methods = ['connect', 'delete', 'get', 'head', 'options', 'patch', 'post', 'put', 'trace']
class HttpClient:
def __init__(self, headers={}, useragent=f'IzzyLib/{__version__}', appagent=None, proxy_type='https', proxy_host=None, proxy_port=None):
proxy_ports = {
'http': 80,
'https': 443
}
if proxy_type not in ['http', 'https']:
raise ValueError(f'Not a valid proxy type: {proxy_type}')
self.headers=headers
self.agent = f'{useragent} ({appagent})' if appagent else useragent
self.proxy = DotDict({
'enabled': True if proxy_host else False,
'ptype': proxy_type,
'host': proxy_host,
'port': proxy_ports[proxy_type] if not proxy_port else proxy_port
})
def __build_request(self, url, data=None, headers={}, method='GET'):
new_headers = self.headers.copy()
new_headers.update(headers)
parsed_headers = {k.lower(): v for k,v in new_headers.items()}
if not parsed_headers.get('user-agent'):
parsed_headers['user-agent'] = self.agent
if isinstance(data, dict):
data = json.dumps(data)
if isinstance(data, str):
data = data.encode('UTF-8')
request = Request(url, data=data, headers=parsed_headers, method=method)
if self.proxy.enabled:
request.set_proxy(f'{self.proxy.host}:{self.proxy.port}', self.proxy.ptype)
return request
def request(self, *args, **kwargs):
request = self.__build_request(*args, **kwargs)
try:
response = urlopen(request)
except HTTPError as e:
response = e.fp
return HttpResponse(response)
def file(self, url, filepath, *args, filename=None, size=2048, create_dirs=True, **kwargs):
filepath = Path(filepath)
path = filepath.parent
if not path.exists and not create_dirs:
raise FileNotFoundError(f'Path does not exist: {path}')
path.mkdir()
if filepath.exists:
kwargs['headers']['range'] = f'bytes={filepath.size}'
resp = self.request(url, *args, stream=True, **kwargs)
if not resp.headers.get('content-length'):
raise exceptions.HttpFileDownloadedError('File already downloaded fully')
if resp.status != 200:
raise exceptions.HttpFileDownloadedError(f'Failed to download {url}: {resp.status}, body: {resp.body}')
with filepath.open('ab') as fd:
for chunk in resp.chunks(size):
fd.write(chunk)
return True
def image(self, url, filepath, *args, filename=None, ext='png', dimensions=(50, 50), create_dirs=True, **kwargs):
if not Image:
raise ValueError('Pillow module is not installed')
filepath = Path(filepath)
path = filepath.parent
if not path.exists and not create_dirs:
raise FileNotFoundError(f'Path does not exist: {path}')
path.mkdir()
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
raise exceptions.HttpFileDownloadedError(f'Failed to download {url}: {resp.status}, body: {resp.body}')
if not filename:
filename = Path(url).stem
byte = BytesIO()
image = Image.open(BytesIO(resp.body))
image.thumbnail(dimensions)
image.save(byte, format=ext.upper())
with path.join(filename).open('wb') as fd:
fd.write(byte.getvalue())
def json(self, *args, headers={}, activity=True, **kwargs):
json_type = 'activity+json' if activity else 'json'
headers.update({
'accept': f'application/{json_type}'
})
return self.request(*args, headers=headers, **kwargs)
class HttpResponse(object):
def __init__(self, response):
self.body = response.read()
self.headers = DefaultDotDict({k.lower(): v.lower() for k,v in response.headers.items()})
self.status = response.status
self.url = response.url
@cached_property
def text(self):
return self.body.decode('UTF-8')
@cached_property
def json(self):
return DotDict(self.text)
def json_pretty(self, indent=4):
return json.dumps(self.json, indent=indent)

View file

@ -0,0 +1,10 @@
from datetime import datetime
start_time = datetime.now()
from .application import Application
from .config import Config, UserLevel
from .middleware import MiddlewareBase, Headers, AccessLog
from .request import Request
from .response import Response
from .view import View

View file

@ -0,0 +1,15 @@
from . import view
class ActivityPub:
def __init__(self, app):
self.app = app
self.cfg = app.cfg
def setup_views(self):
self.app.add_class_route(view.Actor)
self.app.add_class_route(view.Nodeinfo)
self.app.add_class_route(view.Webfinger)
self.app.add_class_route(view.WkHostMeta)
self.app.add_class_route(view.WkNodeinfo)

View file

@ -0,0 +1,150 @@
import multiprocessing, sanic, signal, traceback
import logging as pylog
from multidict import CIMultiDict
from multiprocessing import cpu_count, current_process
from urllib.parse import parse_qsl, urlparse
from izzylib import DotDict, Path, izzylog as logging
from izzylib.template import Template
from .config import Config, UserLevel
from .error_handlers import GenericError, MissingTemplateError
from .middleware import AccessLog, Headers
from .view import Manifest, Robots, Style
log_path_ignore = [
'/media',
'/static'
]
log_ext_ignore = [
'js', 'ttf', 'woff2',
'ac3', 'aiff', 'flac', 'm4a', 'mp3', 'ogg', 'wav', 'wma',
'apng', 'ico', 'jpeg', 'jpg', 'png', 'svg',
'divx', 'mov', 'mp4', 'webm', 'wmv'
]
frontend = Path(__file__).resolve.parent.join('frontend')
class Application(sanic.Sanic):
def __init__(self, class_views=[], **kwargs):
self.cfg = Config(**kwargs)
super().__init__(self.cfg.name, request_class=self.cfg.request_class)
for log in ['sanic.root', 'sanic.access']:
pylog.getLogger(log).setLevel(pylog.WARNING)
self.template = Template(
self.cfg.tpl_search,
self.cfg.tpl_globals,
self.cfg.tpl_context,
self.cfg.tpl_autoescape
)
self.template.add_env('app', self)
self.template.add_env('cfg', self.cfg)
self.template.add_env('len', len)
if self.cfg.tpl_default:
self.template.add_search_path(frontend)
self.add_class_route(Manifest)
self.add_class_route(Robots)
self.add_class_route(Style)
self.static('/favicon.ico', frontend.join('static/icon64.png'))
self.static('/framework/static', frontend.join('static'))
for view in class_views:
self.add_class_route(view)
self.add_error_handler(MissingTemplateError)
self.add_error_handler(GenericError)
signal.signal(signal.SIGHUP, self.finish)
signal.signal(signal.SIGINT, self.finish)
signal.signal(signal.SIGQUIT, self.finish)
signal.signal(signal.SIGTERM, self.finish)
## compat
self.start = self.run
def add_class_route(self, cls):
for route in cls.paths:
self.add_route(cls.as_view(), route)
if cls.menu:
self.set_menu_item(*cls.menu)
def add_error_handler(self, handler):
handle = handler(self)
self.error_handler.add(*handle())
def add_middleware(self, middleware):
mw = middleware(self)
self.register_middleware(mw, mw.attach)
def get_menu_item(self, name):
return self.cfg.menu[name]
def set_menu_item(self, name, path, level=0):
self.cfg.menu[name] = (path, parse_level(level))
def del_menu_item(self, name):
del self.cfg.menu[name]
def get_route_by_path(self, path, method='get', host=None,):
route, handler, _ = self.router.get(path, method, host)
return handler
def run(self):
# register built-in middleware now so they're last in the chain
self.add_middleware(Headers)
if self.cfg.access_log:
self.add_middleware(AccessLog)
msg = f'Starting {self.cfg.name} at {self.cfg.host}:{self.cfg.port}'
if self.cfg.workers > 1:
msg += f' with {self.cfg.workers} workers'
logging.info(msg)
super().run(
host = self.cfg.listen,
port = self.cfg.port,
workers = self.cfg.workers,
access_log = False,
debug = False
)
def finish(self):
if self.cfg.sig_handler:
self.cfg.sig_handler(*self.cfg.sig_handler_args, **self.cfg.sig_handler_kwargs)
self.stop()
logging.info('Bye! :3')
def parse_level(level):
if type(level) == int:
level = UserLevel(level)
elif type(level) == str:
level = UserLevel[level.upper()]
elif type(level) != UserLevel:
raise TypeError(f'User level must be a string, integer, or UserLevel, not a {level.__class__.__name__}')
return level

View file

@ -0,0 +1,66 @@
from enum import IntEnum
from izzylib import DotDict
from multiprocessing import cpu_count
from .request import Request
from .response import Response
class UserLevel(IntEnum):
GUEST = 0
USER = 10
MODERATOR = 20
ADMIN = 30
AUTH = 1000
# Note: sub-classing the Request class breaks things for some reason
class Config(DotDict):
defaults = dict(
name = 'IzzyLib Http Server',
title = None,
version = '0.0.1',
git_repo = 'https://git.barkshark.xyz/izaliamae/izzylib',
listen = 'localhost',
host = 'localhost',
web_host = 'localhost',
alt_hosts = [],
port = 8080,
proto = 'http',
workers = cpu_count(),
access_log = True,
request_class = Request,
response_class = Response,
sig_handler = None,
sig_handler_args = [],
sig_handler_kwargs = {},
tpl_search = [],
tpl_globals = {},
tpl_context = None,
tpl_autoescape = True,
tpl_default = True,
menu = {}
)
def __init__(self, **kwargs):
if kwargs.pop('menu', None):
raise ValueError('Use the Application.set_menu_item function to set menu items')
super().__init__({**self.defaults, **kwargs})
if kwargs.get('host') and not kwargs.get('web_host'):
self.web_host = self.host
if self.name and not self.title:
self.title = self.name
def __setitem__(self, key, value):
if key not in self.defaults.keys():
raise KeyError(f'Invalid config key {key}')
if key == 'port' and not isinstance(value, int):
raise TypeError('Port must be an integer')
super().__setitem__(key, value)

View file

@ -0,0 +1,49 @@
import traceback
from izzylib import izzylog as logging
from jinja2.exceptions import TemplateNotFound
from .response import Response
class GenericError:
error = Exception
def __init__(self, app):
self.app = app
def __call__(self):
return self.error, lambda request, exception: self.handler(request, Response(self.app, request), exception)
def handler(self, request, response, exception):
try:
status = exception.status_code
msg = str(exception)
except:
msg = f'{exception.__class__.__name__}: {str(exception)}'
status = 500
if status not in range(200, 499):
traceback.print_exc()
try:
if 'json' in request.headers.get('accept', ''):
return response.json(msg, status=status)
return response.error(msg, status, pprint=True)
except Exception as e:
traceback.print_exc()
return response.text(f'{exception.__class__.__name__}: {msg}', status=500)
class MissingTemplateError(GenericError):
error = TemplateNotFound
def handler(self, request, response, exception):
logging.error('TEMPLATE_ERROR:', f'{exception.__class__.__name__}: {str(exception)}')
return response.error('I\'m a dingleberry and forgot to create a template for this page', 500)

View file

@ -0,0 +1,461 @@
:root {
--text: #eee;
--hover: {{primary.desaturate(50).lighten(50)}};
--primary: {{primary}};
--background: {{background}};
--ui: {{primary.desaturate(25).lighten(5)}};
--ui-background: {{background.lighten(7.5)}};
--shadow-color: {{black.rgba(25)}};
--shadow: 0 4px 4px 0 var(--shadow-color), 3px 0 4px 0 var(--shadow-color);
--negative: {{negative}};
--negative-dark: {{negative.darken(85)}};
--positive: {{positive}};
--positive-dark: {{positive.darken(85)}};
--message: var(--positive);
--error: var(--negative);
--gap: 15px;
--easing: cubic-bezier(.6, .05, .28, .91);
--trans-speed: {{speed}}ms;
}
body {
color: var(--text);
background-color: var(--background);
font-family: sans undertale;
font-size: 16px;
margin: 15px 0;
}
a, a:visited {
color: var(--primary);
text-decoration: none;
}
a:hover {
color: var(--hover);
text-decoration: underline;
}
input:not([type='checkbox']), select, textarea {
color: var(--text);
background-color: var(--background);
border: 1px solid var(--background);
box-shadow: 0 2px 2px 0 var(--shadow-color);
padding: 5px;
}
input:hover, select:hover, textarea:hover {
border-color: var(--hover);
}
input:focus, select:focus, textarea:focus {
outline: 0;
border-color: var(--primary);
}
details:focus, summary:focus {
outline: 0;
}
/* Classes */
.button {
display: inline-block;
padding: 5px;
background-color: {{primary.darken(85)}};
text-align: center;
box-shadow: var(--shadow);
}
.button:hover {
background-color: {{primary.darken(65)}};
text-decoration: none;
}
.grid-container {
display: grid;
grid-template-columns: auto;
grid-gap: var(--gap);
}
.grid-item {
display: inline-grid;
}
.flex-container {
display: flex;
flex-wrap; wrap;
}
.menu {
list-style-type: none;
padding: 0;
margin: 0;
}
.menu li {
display: inline-block;
text-align: center;
min-width: 60px;
background-color: {{background.lighten(20)}};
}
.menu li a {
display: block;
padding-left: 5px;
padding-right: 5px;
}
.menu li:hover {
background-color: {{primary.lighten(25).desaturate(25)}};
}
.menu li a:hover {
text-decoration: none;
color: {{primary.darken(90).desaturate(50)}};
}
.section {
padding: 8px;
background-color: var(--ui-background);
box-shadow: var(--shadow);
}
.shadow {
box-shadow: 0 4px 4px 0 var(--shadow-color), 3px 0 4px 0 var(--shadow-color);
}
.message {
line-height: 2em;
display: block;
}
/* # this is kinda hacky and needs to be replaced */
.tooltip:hover::after {
position: relative;
padding: 8px;
bottom: 35px;
border-radius: 5px;
white-space: nowrap;
border: 1px solid var(--text);
color: var(--text);
background-color: {{primary.desaturate(50).darken(75)}};
box-shadow: var(--shadow);
/*z-index: -1;*/
}
/* ids */
#title {
font-size: 36px;
font-weight: bold;
text-align: center;
}
#message, #error {
padding: 10px;
color: var(--background);
margin-bottom: var(--gap);
text-align: center;
}
#message {
background-color: var(--message);
}
#error {
background-color: var(--error);
}
#body {
width: 790px;
margin: 0 auto;
}
#header {
display: flex;
margin-bottom: var(--gap);
text-align: center;
font-size: 2em;
line-height: 40px;
font-weight: bold;
}
#header > div {
/*display: inline-block;*/
height: 40px;
}
#header .page-title {
text-align: {% if menu_left %}right{% else %}left{% endif %};
white-space: nowrap;
overflow: hidden;
width: 100%;
}
#content-body .title {
text-align: center;
font-size: 2em;
font-weight: bold;
color: var(--primary);
}
#footer {
margin-top: var(--gap);
display: flex;
grid-gap: 5px;
font-size: 0.80em;
line-height: 20px;
}
#footer > div {
height: 20px;
}
#footer .avatar img {
margin: 0 auto;
}
#footer .user {
white-space: nowrap;
overflow: hidden;
width: 100%;
}
#footer .source {
white-space: nowrap;
}
#logreg input, textarea {
display: block;
margin: 8px auto;
}
#logreg textarea, input:not([type='submit']) {
width: 50%;
}
/* Main menu */
#btn {
cursor: pointer;
transition: left 500ms var(--easing);
}
#btn {
transition: background-color var(--trans-speed);
width: 55px;
margin-left: var(--gap);
background-image: url('/framework/static/menu.svg');
background-size: 50px;
background-position: center center;
background-repeat: no-repeat;
}
#btn div {
transition: transform var(--trans-speed) ease, opacity var(--trans-speed), background-color var(--trans-speed);
}
#btn.active {
margin-left: 0;
position: fixed;
z-index: 5;
top: 12px;
{% if menu_left %}right: calc(100% - 250px + 12px){% else %}right: 12px{% endif %};
background-color: {{primary.darken(75)}};
color: {{background}};
}
/*#btn.active div {
width: 35px;
height: 2px;
margin-bottom: 8px;
}*/
#btn.active:parent {
grid-template-columns: auto;
}
#menu {
position: fixed;
z-index: 4;
overflow: auto;
top: 0px;
opacity: 0;
padding: 20px 0px;
width: 250px;
height: 100%;
transition: all var(--trans-speed) ease;
{% if menu_left %}left{% else %}right{% endif %}: -250px;
}
#menu.active {
{% if menu_left %}left{% else %}right{% endif %}: 0;
opacity: 1;
}
#menu #items {
/*margin-top: 50px;*/
margin-bottom: 30px;
}
#menu a:hover {
text-decoration: none;
}
#menu {
font-weight: bold;
}
#menu .item {
display: block;
position: relative;
font-size: 2em;
transition: all var(--trans-speed);
padding-left: 20px;
}
#menu .title-item {
color: var(--primary);
}
#items .sub-item {
padding-left: 40px;
}
#items .item:not(.title-item):hover {
padding-left: 40px;
}
#items .sub-item:hover {
padding-left: 60px !important;
}
/*#menu details .item:hover {
padding-left: 60px;
}*/
#items summary {
cursor: pointer;
color: var(--primary);
}
#items details[open]>.item:not(details) {
animation-name: fadeInDown;
animation-duration: var(--trans-speed);
}
#items summary::-webkit-details-marker {
display: none;
}
#items details summary:after {
content: " +";
}
#items details[open] summary:after {
content: " -";
}
#btn, #btn * {
will-change: transform;
}
#menu {
will-change: transform, opacity;
}
@keyframes fadeInDown {
0% {
opacity: 0;
transform: translateY(-1.25em);
}
100% {
opacity: 1;
transform: translateY(0);
}
}
{% for name in cssfiles %}
{% include 'style/' + name + '.css' %}
{% endfor %}
/* responsive design */
@media (max-width: 810px) {
body {
margin: 0;
}
#body {
width: auto;
}
}
@media (max-width: 610px) {
.settings .grid-container {
grid-template-columns: auto;
}
.settings .label {
text-align: center;
}
#logreg textarea, input:not([type='submit']) {
width: calc(100% - 16px);
}
}
/* scrollbar */
body {scrollbar-width: 15px; scrollbar-color: var(--primary) {{background.darken(10)}};}
::-webkit-scrollbar {width: 15px;}
::-webkit-scrollbar-track {background: {{background.darken(10)}};}
/*::-webkit-scrollbar-button {background: var(--primary);}
::-webkit-scrollbar-button:hover {background: var(--text);}*/
::-webkit-scrollbar-thumb {background: var(--primary);}
::-webkit-scrollbar-thumb:hover {background: {{primary.lighten(25)}};}
/* page font */
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Bold'),
url('/framework/static/nunito/NunitoSans-SemiBold.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-SemiBold.ttf') format('ttf');
font-weight: bold;
font-style: normal;
}
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Light Italic'),
url('/framework/static/nunito/NunitoSans-ExtraLightItalic.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-ExtraLightItalic.ttf') format('ttf');
font-weight: normal;
font-style: italic;
}
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Bold Italic'),
url('/framework/static/nunito/NunitoSans-Italic.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-Italic.ttf') format('ttf');
font-weight: bold;
font-style: italic;
}
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Light'),
url('/framework/static/nunito/NunitoSans-Light.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-Light.ttf') format('ttf');
font-weight: normal;
font-style: normal;
}

View file

@ -0,0 +1,50 @@
<!DOCTYPE html>
%html
%head
%title << {{cfg.title}}: {{page}}
%link(rel='stylesheet' type='text/css' href='/framework/style.css')
%link(rel='manifest' href='/framework/manifest.json')
%meta(charset='UTF-8')
%meta(name='viewport' content='width=device-width, initial-scale=1')
-block head
%body
#body
#header.flex-container
-if menu_left
#btn.section
.page-title.section -> %a.title(href='/') << {{cfg.title}}
-else
.page-title.section -> %a.title(href='/') << {{cfg.title}}
#btn.section
-if message
#message.section << {{message}}
-if error
#error.secion << {{error}}
#menu.section
.title-item.item << Menu
#items
-if not len(cfg.menu):
-include 'menu.haml'
-else:
-for label, path_data in cfg.menu.items()
-if path_data[1] == 1000 and request.user_level == 0:
.item -> %a(href='{{path_data[0]}}') << {{label}}
-elif request.user_level >= path_data[1]
.item -> %a(href='{{path_data[0]}}') << {{label}}
#content-body.section
-block content
#footer.grid-container.section
.avatar
.user
.source
%a(href='{{cfg.git_repo}}' target='_new') << {{cfg.name}}/{{cfg.version}}
%script(type='application/javascript' src='/framework/static/menu.js')

View file

@ -0,0 +1,8 @@
-extends 'base.haml'
-set page = 'Error'
-block content
%center
%font size='8'
HTTP {{response.status}}
%br
=error_message

View file

@ -0,0 +1 @@
.item -> %a(href='/') << Home

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

View file

@ -0,0 +1,29 @@
const sidebarBox = document.querySelector('#menu'),
sidebarBtn = document.querySelector('#btn'),
pageWrapper = document.querySelector('html');
header = document.querySelector('#header')
sidebarBtn.addEventListener('click', event => {
sidebarBtn.classList.toggle('active');
sidebarBox.classList.toggle('active');
});
pageWrapper.addEventListener('click', event => {
itemId = event.srcElement.id
itemClass = event.srcElement.className
indexId = ['menu', 'btn', 'items'].indexOf(itemId)
indexClass = ['item', 'item name', 'items'].indexOf(itemClass)
if (sidebarBox.classList.contains('active') && (indexId == -1 && indexClass == -1)) {
sidebarBtn.classList.remove('active');
sidebarBox.classList.remove('active');
}
});
window.addEventListener('keydown', event => {
if (sidebarBox.classList.contains('active') && event.keyCode === 27) {
sidebarBtn.classList.remove('active');
sidebarBox.classList.remove('active');
}
});

View file

@ -0,0 +1,84 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
sodipodi:docname="menu.svg"
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
id="svg8"
version="1.1"
viewBox="0 0 132.29167 79.375002"
height="300"
width="500">
<defs
id="defs2" />
<sodipodi:namedview
inkscape:window-maximized="1"
inkscape:window-y="36"
inkscape:window-x="36"
inkscape:window-height="990"
inkscape:window-width="1644"
units="px"
showgrid="true"
inkscape:document-rotation="0"
inkscape:current-layer="layer2"
inkscape:document-units="mm"
inkscape:cy="151.34478"
inkscape:cx="232.18877"
inkscape:zoom="1.4"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
borderopacity="1.0"
bordercolor="#666666"
pagecolor="#ffffff"
id="base"
inkscape:snap-text-baseline="true"
inkscape:snap-intersection-paths="true"
inkscape:snap-bbox="true"
inkscape:bbox-nodes="true"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0">
<inkscape:grid
dotted="true"
id="grid1402"
type="xygrid"
originx="-7.9375001"
originy="-27.781234" />
</sodipodi:namedview>
<metadata
id="metadata5">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
id="layer2"
inkscape:groupmode="layer"
transform="translate(-7.9374999,-27.781233)">
<path
style="fill:none;fill-opacity:1;stroke:#cfcfcf;stroke-width:13.2292;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 15.875,67.468765 c 116.41667,0 116.41667,0 116.41667,0 z"
id="path1590" />
<path
style="fill:none;fill-opacity:1;stroke:#cfcfcf;stroke-width:13.2292;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 15.875,35.718766 c 116.41667,0 116.41667,0 116.41667,0 z"
id="path1590-7" />
<path
style="fill:none;fill-opacity:1;stroke:#cfcfcf;stroke-width:13.2292;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 15.875,99.218766 c 116.41667,0 116.41667,0 116.41667,0 z"
id="path1590-7-8" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View file

@ -0,0 +1,44 @@
Copyright 2016 The Nunito Project Authors (contact@sansoxygen.com),
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide development of collaborative font projects, to support the font creation efforts of academic and linguistic communities, and to provide a free and open framework in which fonts may be shared and improved in partnership with others.
The OFL allows the licensed fonts to be used, studied, modified and redistributed freely as long as they are not sold by themselves. The fonts, including any derivative works, can be bundled, embedded, redistributed and/or sold with any software provided that any reserved names are not used by derivative works. The fonts and derivatives, however, cannot be released under any other type of license. The requirement for fonts to remain under this license does not apply to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright Holder(s) under this license and clearly marked as such. This may include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the copyright statement(s).
"Original Version" refers to the collection of Font Software components as distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting, or substituting -- in part or in whole -- any of the components of the Original Version, by changing formats or by porting the Font Software to a new environment.
"Author" refers to any designer, engineer, programmer, technical writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to use, study, copy, merge, embed, modify, redistribute, and sell modified and unmodified copies of the Font Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components, in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled, redistributed and/or sold with any software, provided that each copy contains the above copyright notice and this license. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine-readable metadata fields within text or binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font Name(s) unless explicit written permission is granted by the corresponding Copyright Holder. This restriction only applies to the primary font name as presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font Software shall not be used to promote, endorse or advertise any Modified Version, except to acknowledge the contribution(s) of the Copyright Holder(s) and the Author(s) or with their explicit written permission.
5) The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this license, and must not be distributed under any other license. The requirement for fonts to remain under this license does not apply to any document created using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.

View file

@ -0,0 +1,82 @@
import multiprocessing
from datetime import datetime, timedelta, timezone
from izzylib import izzylog as logging, logging as applog
from . import start_time
from .response import Response
cache_types = [
'text/css',
'application/javascript',
]
cache_base_types = [
'image',
'audio',
'video'
]
def cache_check(request, response):
content_type = response.headers.get('content-type')
if request.path.startswith('/framework'):
return True
if not content_type:
return False
if content_type in cache_types:
return True
if any(map(content_type.startswith, cache_base_types)):
return True
class MiddlewareBase:
attach = 'request'
def __init__(self, app):
self.app = app
self.cfg = app.cfg
async def __call__(self, request, response=None):
if not response:
response = Response(self.app, request)
return await self.handler(request, response)
async def handler(self, request, response):
pass
class Headers(MiddlewareBase):
attach = 'response'
async def handler(self, request, response):
if not response.headers.get('content-type'):
if request.path.endswith('.css'):
response.headers['content-type'] = 'text/css'
elif request.path.endswith('.js'):
response.headers['content-type'] = 'application/javascript'
if cache_check(request, response):
max_age = int(timedelta(weeks=2).total_seconds())
response.headers['Cache-Control'] = f'immutable,private,max-age={max_age}'
response.headers['Server'] = f'{self.cfg.name}/{self.cfg.version}'
response.headers['Trans'] = 'Rights'
class AccessLog(MiddlewareBase):
attach = 'response'
async def handler(self, request, response):
uagent = request.headers.get('user-agent', 'None')
applog.info(f'({multiprocessing.current_process().name}) {request.address} {request.method} {request.path} {response.status} "{uagent}"')

View file

@ -0,0 +1,41 @@
import json
from izzylib import LowerDotDict
def ReplaceHeader(headers, key, value):
for k,v in headers.items():
if k.lower() == header.lower():
del headers[k]
class Headers(LowerDotDict):
def __init__(self, headers):
super().__init__()
for k,v in headers.items():
if not self.get(k):
self[k] = []
self[k].append(v)
def getone(self, key, default=None):
value = self.get(key)
if not value:
return default
return value[0]
def getall(self, key, default=[]):
return self.get(key.lower(), default)
def to_json(self, indent=None):
return json.dumps(self.to_dict(), indent=indent)
def to_dict(self):
return {k: self.getone(k) for k in self}

View file

@ -0,0 +1,108 @@
import sanic
from izzylib import DotDict
from urllib.parse import parse_qsl
from .misc import Headers
class Request(sanic.request.Request):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.Headers = Headers(self.headers)
self.address = self.headers.get('x-real-ip', self.forwarded.get('for', self.remote_addr))
self.data = Data(self)
self.template = self.app.template
self.user_level = 0
self.setup()
def setup(self):
pass
def response(self, tpl, *args, **kwargs):
return self.template.response(self, tpl, *args, **kwargs)
def alldata(self):
return DotDict(
**self.content.json,
**self.data.query,
**self.data.form
)
def json_check(self):
if self.path.endswith('.json'):
return True
accept = self.headers.getone('Accept', None)
if accept:
mimes = [v.strip() for v in accept.split(',')]
if any(mime in ['application/json', 'application/activity+json'] for mime in mimes):
return True
return False
class Data(object):
def __init__(self, request):
self.request = request
@property
def combined(self):
return DotDict(**self.form.asDict(), **self.query.asDict(), **self.json.asDict())
@property
def query(self):
data = {k: v for k,v in parse_qsl(self.request.query_string)}
return DotDict(data)
@property
def form(self):
data = {k: v[0] for k,v in self.request.form.items()}
return DotDict(data)
@property
def files(self):
return DotDict({k:v[0] for k,v in self.request.files.items()})
### body functions
@property
def raw(self):
try:
return self.request.body
except Exception as e:
logging.verbose('IzzyLib.http_server.Data.raw: failed to get body')
logging.debug(f'{e.__class__.__name__}: {e}')
return b''
@property
def text(self):
try:
return self.raw.decode()
except Exception as e:
logging.verbose('IzzyLib.http_server.Data.text: failed to get body')
logging.debug(f'{e.__class__.__name__}: {e}')
return ''
@property
def json(self):
try:
return DotDict(self.text)
except Exception as e:
logging.verbose('IzzyLib.http_server.Data.json: failed to get body')
logging.debug(f'{e.__class__.__name__}: {e}')
data = '{}'
return {}

View file

@ -0,0 +1,232 @@
import json, sanic
from datetime import datetime
from izzylib import DotDict, izzylog
from izzylib.template import Color
from sanic.compat import Header
from sanic.cookies import CookieJar
from sanic.response import text as Raw
class Response:
content_types = DotDict({
'text': 'text/plain',
'html': 'text/html',
'css': 'text/css',
'javascript': 'application/javascript',
'json': 'application/json',
'activitypub': 'application/activity+json'
})
default_theme = DotDict({
'primary': Color('#e7a'),
'secondary': Color('#a7e'),
'background': Color('#191919'),
'positive': Color('#aea'),
'negative': Color('#e99'),
'white': Color('#eee'),
'black': Color('#111'),
'speed': 250
})
cookie_keys = {
'expires': 'Expires',
'path': 'Path',
'comment': 'Comment',
'domain': 'Domain',
'max_age': 'Max-Age',
'secure': 'Secure',
'httponly': 'HttpOnly',
'version': 'Version',
'samesite': 'SameSite'
}
def __init__(self, app, request, body=None, headers={}, cookies={}, status=200, content_type='text/html'):
# server objects
self.app = app
self.cfg = app.cfg
self.request = request
# init vars
self._body = None
self._content_type = content_type
self.headers = Header(headers)
self.cookies = CookieJar(self.headers)
self.body = body
self.status = status
for cookie in cookies.items():
pass
def __str__(self):
return str(self.body)
def __bytes__(self):
return self.body if isinstance(self.body, bytes) else self.body.encode('utf-8')
def __repr__(self):
return self.get_response()
@property
def body(self):
return self._body
@body.setter
def body(self, body):
if not body:
self._body = b''
return
if self.content_type in [self.content_types.json, self.content_types.activitypub]:
body = json.dumps(body)
if isinstance(body, str):
self._body = body.encode('utf-8')
elif isinstance(body, bytes):
self._body = body
else:
raise TypeError(f'Response body must be a string or bytes, not {body.__class__.__name__}')
@property
def content_type(self):
return self._content_type
@content_type.setter
def content_type(self, ctype):
self._content_type = self.content_types.get(ctype, ctype)
def set_headers(self, data: dict):
try:
self.set_content_type(headers.pop('content-type'))
except:
pass
self.headers.clear()
self.headers.update(data)
def set_cookie(self, key, value, data={}, **kwargs):
self.cookies[key] = value
data.update(kwargs)
for k,v in data.items():
k = self.cookie_keys.get(k, k)
if k.lower() == 'max-age':
if isinstance(v, timedelta):
v = int(v.total_seconds())
elif not isinstance(v, int):
raise TypeError('Max-Age must be an integer or timedelta')
elif k.lower() == 'expires':
if isinstance(v, str):
v = datetime.strptime(v, '%a, %d-%b-%Y %T GMT')
elif not isinstance(v, datetime):
raise TypeError('Expires must be a string or datetime')
try:
self.cookies[key][k] = v
except KeyError as e:
izzylog.error('Invalid cookie key:', k)
def get_cookie(self, key):
try:
cookie = self.cookies[key]
except KeyError:
return None
return
def del_cookie(self, key):
del self.cookies[key]
def template(self, tplfile, context={}, headers={}, status=200, content_type='text/html', pprint=False):
self.status = status
context.update({
'response': self,
**self.default_theme
})
html = self.app.template.render(tplfile, context, request=self.request, pprint=pprint)
return self.html(html, headers=headers, status=status, content_type=content_type)
def error(self, message, status=500, **kwargs):
if self.request and 'json' in self.request.headers.get('accept', ''):
kwargs.pop('pprint', None)
return self.json({f'error {status}': message}, status=status, **kwargs)
return self.template('error.haml', {'error_message': message}, status=status, **kwargs)
def json(self, body={}, headers={}, status=200, content_type='application/json'):
body = json.dumps(body)
return self.get_response(body, headers, status, content_type)
def text(self, body, headers={}, status=200, content_type='text/plain'):
return self.get_response(body, headers, status, content_type)
def html(self, *args, **kwargs):
self.content_type = 'text/html'
return self.text(*args, **kwargs)
def css(self, *args, **kwargs):
self.content_type = 'text/css'
return self.text(*args, **kwargs)
def javascript(self, *args, **kwargs):
self.content_type = 'application/javascript'
return self.text(*args, **kwargs)
def activitypub(self, *args, **kwargs):
self.content_type = 'application/activity+json'
return self.text(*args, **kwargs)
def redir(self, path, status=302, headers={}):
headers.update(dict(location=path))
return self.text(body=None, status=status, headers=headers)
#return sanic.response.redirect(path, status=status, headers={})
def set_data(self, body=None, headers={}, status=200, content_type='text/html'):
ctype = self.content_types.get(content_type, content_type)
self.body = body
self.status = status
self.content_type = content_type
self.headers.update(headers)
self.headers.pop('content-type', None)
def get_response(self, *args, **kwargs):
self.set_data(*args, **kwargs)
response = sanic.response.raw(self.body, self.status, self.headers, self.content_type)
response._cookies = self.cookies
return response

113
izzylib/http_server/view.py Normal file
View file

@ -0,0 +1,113 @@
from izzylib.template import Color
from sanic.views import HTTPMethodView
from .response import Response
class View(HTTPMethodView):
routes = []
menu = None # example: ("Home", "\", 0)
def dispatch_request(self, request, *args, **kwargs):
self.app = request.app
self.cfg = request.app.cfg
handler = getattr(self, request.method.lower(), None)
return handler(request, Response(self.app, request), *args, **kwargs)
### Frontend Template Views ###
class Manifest(View):
paths = ['/framework/manifest.json']
async def get(self, request, response):
data = {
'name': self.cfg.name,
'short_name': self.cfg.name.replace(' ', ''),
'description': 'UvU',
'icons': [
{
'src': "/framework/static/icon512.png",
'sizes': '512x512',
'type': 'image/png'
},
{
'src': "/framework/static/icon64.png",
'sizes': '64x64',
'type': 'image/png'
}
],
'theme_color': str(response.default_theme.primary),
'background_color': str(response.default_theme.background),
'display': 'standalone',
'start_url': '/',
'scope': f'{self.cfg.proto}://{self.cfg.web_host}'
}
return response.json(data)
class Robots(View):
paths = ['/robots.txt']
async def get(self, request, response):
data = '# Disallow all\nUser-agent: *\nDisallow: /'
return response.text(data)
class Style(View):
paths = ['/framework/style.css']
async def get(self, request, response):
resp = response.template('base.css', content_type='text/css')
return resp
### ActivityPub Views ###
class Actor(View):
paths = ['/actor', '/actor/<actor>', '/inbox']
def get(self, request, response, actor=None):
if not actor:
actor = 'system'
pass
def post(self, request, response, actor=None):
if not actor:
actor = 'system'
pass
class Nodeinfo(View):
paths = ['/nodeinfo/2.0.json']
def get(self, request, response):
pass
class Webfinger(View):
paths = ['/.well-known/webfinger']
def get(self, request, response):
pass
class WkHostMeta(View):
paths = ['/.well-known/host-meta']
def get(self, request, response):
pass
class WkNodeinfo(View):
paths = ['/.well-known/nodeinfo']
def get(self, request, response):
pass

View file

@ -0,0 +1,31 @@
from .signatures import (
verify_request,
verify_headers,
parse_signature,
fetch_actor,
fetch_instance,
fetch_nodeinfo,
fetch_webfinger_account,
generate_rsa_key
)
from . import error
from .client import HttpUrllibClient, set_default_client
from .request import HttpUrllibRequest
from .response import HttpUrllibResponse
#__all__ = [
#'HttpRequestsClient',
#'HttpRequestsRequest',
#'HttpRequestsResponse',
#'fetch_actor',
#'fetch_instance',
#'fetch_nodeinfo',
#'fetch_webfinger_account',
#'generate_rsa_key',
#'parse_signature',
#'set_requests_client',
#'verify_headers',
#'verify_request',
#]

View file

@ -0,0 +1,128 @@
import json, sys, urllib3
from PIL import Image
from base64 import b64encode
from datetime import datetime
from functools import cached_property
from io import BytesIO
from izzylib import DefaultDotDict, DotDict, LowerDotDict, Path, izzylog as logging, __version__
from izzylib.exceptions import HttpFileDownloadedError
from ssl import SSLCertVerificationError
from urllib.parse import urlparse
from .request import HttpUrllibRequest
from .response import HttpUrllibResponse
from .signatures import set_client
Client = None
proxy_ports = {
'http': 80,
'https': 443
}
class HttpUrllibClient:
def __init__(self, headers={}, useragent=None, appagent=None, proxy_type='https', proxy_host=None, proxy_port=None, num_pools=20):
if not useragent:
useragent = f'IzzyLib/{__version__}'
self.headers = {k:v.lower() for k,v in headers.items()}
self.agent = f'{useragent} ({appagent})' if appagent else useragent
if proxy_type not in ['http', 'https']:
raise ValueError(f'Not a valid proxy type: {proxy_type}')
if proxy_host:
proxy = f'{proxy_type}://{proxy_host}:{proxy_ports[proxy_type] if not proxy_port else proxy_port}'
self.pool = urllib3.ProxyManager(proxy, num_pools=num_pools)
else:
self.pool = urllib3.PoolManager(num_pools=num_pools)
@property
def agent(self):
return self.headers['user-agent']
@agent.setter
def agent(self, value):
self.headers['user-agent'] = value
def set_global(self):
set_default_client(self)
def build_request(self, *args, **kwargs):
return HttpUrllibRequest(*args, **kwargs)
def handle_request(self, request):
request.headers.update(self.headers)
response = self.pool.urlopen(*request._args, **request._kwargs)
return HttpUrllibResponse(response)
def request(self, *args, **kwargs):
return self.handle_request(self.build_request(*args, **kwargs))
def signed_request(self, privkey, keyid, *args, **kwargs):
return self.request(*args, privkey=privkey, keyid=keyid, **kwargs)
def download(self, url, filepath, *args, filename=None, **kwargs):
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
raise HttpFileDownloadedError(f'Failed to download {url}: Status: {resp.status}, Body: {resp.body}')
return resp.save(filepath)
def image(self, url, filepath, *args, filename=None, ext='png', dimensions=(50, 50), **kwargs):
if not Image:
izzylog.error('Pillow module is not installed')
return
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
izzylog.error(f'Failed to download {url}:', resp.status, resp.body)
return False
if not filename:
filename = Path(url).stem()
path = Path(filepath)
if not path.exists:
izzylog.error('Path does not exist:', path)
return False
byte = BytesIO()
image = Image.open(BytesIO(resp.body))
image.thumbnail(dimensions)
image.save(byte, format=ext.upper())
with path.join(filename).open('wb') as fd:
fd.write(byte.getvalue())
def json(self, *args, headers={}, activity=True, **kwargs):
json_type = 'activity+json' if activity else 'json'
headers.update({
'accept': f'application/{json_type}'
})
return self.request(*args, headers=headers, **kwargs)
def set_default_client(client=None):
global Client
Client = client or HttpClient()
set_client(Client)

View file

@ -0,0 +1,36 @@
from urllib3.exceptions import (
HTTPError,
PoolError,
RequestError,
SSLError,
ProxyError,
DecodeError,
ProtocolError,
MaxRetryError,
HostChangedError,
TimeoutStateError,
TimeoutError,
ReadTimeoutError,
NewConnectionError,
EmptyPoolError,
ClosedPoolError,
LocationValueError,
LocationParseError,
URLSchemeUnknown,
ResponseError,
SecurityWarning,
InsecureRequestWarning,
SystemTimeWarning,
InsecurePlatformWarning,
SNIMissingWarning,
DependencyWarning,
ResponseNotChunked,
BodyNotHttplibCompatible,
IncompleteRead,
InvalidChunkLength,
InvalidHeader,
ProxySchemeUnknown,
ProxySchemeUnsupported,
HeaderParsingError,
UnrewindableBodyError
)

View file

@ -0,0 +1,111 @@
import json
from Crypto.Hash import SHA256
from izzylib import DotDict, LowerDotDict, Url, boolean
from base64 import b64decode, b64encode
from datetime import datetime
from izzylib import izzylog as logging
from .signatures import sign_pkcs_headers
methods = ['delete', 'get', 'head', 'options', 'patch', 'post', 'put']
class HttpUrllibRequest:
def __init__(self, url, **kwargs):
self._body = b''
method = kwargs.get('method', 'get').lower()
if method not in methods:
raise ValueError(f'Invalid method: {method}')
self.url = Url(url)
self.body = kwargs.get('body')
self.method = method
self.headers = LowerDotDict(kwargs.get('headers', {}))
self.redirect = boolean(kwargs.get('redirect', True))
self.retries = int(kwargs.get('retries', 10))
self.timeout = int(kwargs.get('timeout', 5))
privkey = kwargs.get('privkey')
keyid = kwargs.get('keyid')
if privkey and keyid:
self.sign(privkey, keyid)
@property
def _args(self):
return [self.method.upper(), self.url]
@property
def _kwargs(self):
return {
'body': self.body,
'headers': self.headers,
'redirect': self.redirect,
'retries': self.retries,
'timeout': self.timeout
}
@property
def body(self):
return self._body
@body.setter
def body(self, data):
if isinstance(data, dict):
data = DotDict(data).to_json()
elif any(map(isinstance, [data], [list, tuple])):
data = json.dumps(data)
if data == None:
data = b''
elif not isinstance(data, bytes):
data = bytes(data, 'utf-8')
self._body = data
def set_header(self, key, value):
self.headers[key] = value
def unset_header(self, key):
self.headers.pop(key, None)
def sign(self, privkey, keyid):
self.unset_header('signature')
self.set_header('(request-target)', f'{self.method.lower()} {self.url.path}')
self.set_header('host', self.url.host)
self.set_header('date', datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'))
if self.body:
body_hash = b64encode(SHA256.new(self.body).digest()).decode("UTF-8")
self.set_header('digest', f'SHA-256={body_hash}')
self.set_header('content-length', str(len(self.body)))
sig = {
'keyId': keyid,
'algorithm': 'rsa-sha256',
'headers': ' '.join([k.lower() for k in self.headers.keys()]),
'signature': b64encode(sign_pkcs_headers(privkey, self.headers)).decode('UTF-8')
}
sig_items = [f'{k}="{v}"' for k,v in sig.items()]
sig_string = ','.join(sig_items)
self.set_header('signature', sig_string)
self.unset_header('(request-target)')
self.unset_header('host')

View file

@ -0,0 +1,97 @@
import json
from io import BytesIO
from izzylib import DefaultDotDict, DotDict, Path, Url
class HttpUrllibResponse:
def __init__(self, response):
self.response = response
self._dict = None
def __getitem__(self, key):
return self.dict[key]
def __setitem__(self, key, value):
self.dict[key] = value
@property
def encoding(self):
for line in self.headers.get('content-type', '').split(';'):
try:
k,v = line.split('=')
if k.lower == 'charset':
return v.lower()
except:
pass
return 'utf-8'
@property
def headers(self):
return self.response.headers
@property
def status(self):
return self.response.status
@property
def url(self):
return Url(self.response.geturl())
@property
def body(self):
data = self.response.read(cache_content=True)
if not data:
data = self.response.data
return data
@property
def text(self):
return self.body.decode(self.encoding)
@property
def dict(self):
if not self._dict:
self._dict = DotDict(self.text)
return self._dict
def json_pretty(self, indent=4):
return self.dict.to_json(indent)
def chunks(self, size=1024):
return self.response.stream(amt=size)
def save(self, path, overwrite=True, create_parents=True):
path = Path(path)
if not path.parent.exists:
if not create_parents:
raise ValueError(f'Path does not exist: {path.parent}')
path.parent.mkdir()
if overwrite and path.exists:
path.delete()
with path.open('wb') as fd:
for chunk in self.chunks():
fd.write(chunk)

View file

@ -0,0 +1,280 @@
import json, requests, sys
from PIL import Image
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
from base64 import b64decode, b64encode
from datetime import datetime
from functools import lru_cache
from izzylib import DefaultDotDict, DotDict
from izzylib import izzylog
from tldextract import extract
from urllib.parse import urlparse
Client = None
def set_client(client):
global Client
Client = client
@lru_cache(maxsize=512)
def fetch_actor(url):
if not Client:
raise ValueError('Please set global client with "SetRequestsClient(client)"')
url = url.split('#')[0]
headers = {'Accept': 'application/activity+json'}
resp = Client.request(url, headers=headers)
try:
actor = resp.json
except json.decoder.JSONDecodeError:
return
except Exception as e:
izzylog.debug(f'HTTP {resp.status}: {resp.body}')
raise e from None
actor.web_domain = urlparse(url).netloc
actor.shared_inbox = actor.inbox
actor.pubkey = None
actor.handle = actor.preferredUsername
if actor.get('endpoints'):
actor.shared_inbox = actor.endpoints.get('sharedInbox', actor.inbox)
if actor.get('publicKey'):
actor.pubkey = actor.publicKey.get('publicKeyPem')
return actor
@lru_cache(maxsize=512)
def fetch_instance(domain):
if not Client:
raise ValueError('Please set global client with "SetRequestsClient(client)"')
headers = {'Accept': 'application/json'}
resp = Client.request(f'https://{domain}/api/v1/instance', headers=headers)
try:
return resp.json
except json.decoder.JSONDecodeError:
return
except Exception as e:
izzylog.debug(f'HTTP {resp.status}: {resp.body}')
raise e from None
@lru_cache(maxsize=512)
def fetch_nodeinfo(domain):
if not Client:
raise ValueError('Please set global client with HttpRequestsClient.set_global()')
webfinger = Client.request(f'https://{domain}/.well-known/nodeinfo')
webfinger_data = DotDict(webfinger.body)
for link in webfinger.json.links:
if link['rel'] == 'http://nodeinfo.diaspora.software/ns/schema/2.0':
nodeinfo_url = link['href']
break
nodeinfo = Client.request(nodeinfo_url)
return nodeinfo.json
@lru_cache(maxsize=512)
def fetch_webfinger_account(handle, domain):
if not Client:
raise ValueError('Please set global client with HttpRequestsClient.set_global()')
data = DefaultDotDict()
webfinger = Client.request(f'https://{domain}/.well-known/webfinger?resource=acct:{handle}@{domain}')
if not webfinger.body:
raise ValueError('Webfinger body empty')
data.handle, data.domain = webfinger.json.subject.replace('acct:', '').split('@')
for link in webfinger.json.links:
if link['rel'] == 'self' and link['type'] == 'application/activity+json':
data.actor = link['href']
return data
def generate_rsa_key():
privkey = RSA.generate(2048)
key = DotDict({'PRIVKEY': privkey, 'PUBKEY': privkey.publickey()})
key.update({'privkey': key.PRIVKEY.export_key().decode(), 'pubkey': key.PUBKEY.export_key().decode()})
return key
def parse_signature(signature: str):
if not signature:
return
raise AssertionError('Missing signature header')
split_sig = signature.split(',')
sig = DefaultDotDict()
for part in split_sig:
key, value = part.split('=', 1)
sig[key.lower()] = value.replace('"', '')
sig.headers = sig.headers.split()
sig.domain = urlparse(sig.keyid).netloc
sig.top_domain = '.'.join(extract(sig.domain)[1:])
sig.actor = sig.keyid.split('#')[0]
return sig
def verify_headers(headers: dict, method: str, path: str, actor: dict=None, body=None):
'''Verify a header signature
headers: A dictionary containing all the headers from a request
method: The HTTP method of the request
path: The path of the HTTP request
actor (optional): A dictionary containing the activitypub actor and the link to the pubkey used for verification
body (optional): The body of the request. Only needed if the signature includes the digest header
fail (optional): If set to True, raise an error instead of returning False if any step of the process fails
'''
headers = {k.lower(): v for k,v in headers.items()}
headers['(request-target)'] = f'{method.lower()} {path}'
signature = parse_signature(headers.get('signature'))
digest = headers.get('digest')
missing_headers = [k for k in headers if k in ['date', 'host'] if headers.get(k) == None]
if not signature:
raise AssertionError('Missing signature')
if not actor:
actor = fetch_actor(signature.keyid)
## Add digest header to missing headers list if it doesn't exist
if method.lower() == 'post' and not digest:
missing_headers.append('digest')
## Fail if missing date, host or digest (if POST) headers
if missing_headers:
raise AssertionError(f'Missing headers: {missing_headers}')
## Fail if body verification fails
if digest:
digest_hash = parse_body_digest(headers.get('digest'))
if not verify_string(body, digest_hash.sig, digest_hash.alg):
raise AssertionError('Failed body digest verification')
pubkey = actor.publicKey['publicKeyPem']
return sign_pkcs_headers(pubkey, {k:v for k,v in headers.items() if k in signature.headers}, sig=signature)
async def verify_request(request, actor: dict=None):
'''Verify a header signature from a SimpleASGI request
request: The request with the headers to verify
actor: A dictionary containing the activitypub actor and the link to the pubkey used for verification
'''
return verify_headers(
request.Headers.to_dict(),
request.method,
request.path,
actor = actor,
body = request.body
)
### Helper functions that shouldn't be used directly ###
def parse_body_digest(digest):
if not digest:
raise AssertionError('Empty digest')
parsed = DotDict()
alg, sig = digest.split('=', 1)
parsed.sig = sig
parsed.alg = alg.replace('-', '')
return parsed
def sign_pkcs_headers(key: str, headers: dict, sig=None):
if sig:
head_items = [f'{item}: {headers[item]}' for item in sig.headers]
else:
head_items = [f'{k.lower()}: {v}' for k,v in headers.items()]
head_string = '\n'.join(head_items)
head_bytes = head_string.encode('UTF-8')
KEY = RSA.importKey(key)
pkcs = PKCS1_v1_5.new(KEY)
h = SHA256.new(head_bytes)
if sig:
return pkcs.verify(h, b64decode(sig.signature))
else:
return pkcs.sign(h)
def sign_request(request, privkey, keyid):
assert isinstance(request.body, bytes)
request.add_header('(request-target)', f'{request.method.lower()} {request.path}')
request.add_header('host', request.host)
request.add_header('date', datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'))
if request.body:
body_hash = b64encode(SHA256.new(request.body).digest()).decode("UTF-8")
request.add_header('digest', f'SHA-256={body_hash}')
request.add_header('content-length', str(len(request.body)))
sig = {
'keyId': keyid,
'algorithm': 'rsa-sha256',
'headers': ' '.join([k.lower() for k in request.headers.keys()]),
'signature': b64encode(sign_pkcs_headers(privkey, request.headers)).decode('UTF-8')
}
sig_items = [f'{k}="{v}"' for k,v in sig.items()]
sig_string = ','.join(sig_items)
request.add_header('signature', sig_string)
request.remove_header('(request-target)')
request.remove_header('host')
def verify_string(string, enc_string, alg='SHA256', fail=False):
if type(string) != bytes:
string = string.encode('UTF-8')
body_hash = b64encode(SHA256.new(string).digest()).decode('UTF-8')
if body_hash == enc_string:
return True
if fail:
raise AssertionError('String failed validation')
else:
return False

View file

@ -1,12 +1,11 @@
import sys
from IzzyLib.misc import DotDict
from os import getppid, environ as env
from datetime import datetime
from enum import Enum
from enum import IntEnum
class Levels(Enum):
class Levels(IntEnum):
CRITICAL = 60,
ERROR = 50
WARNING = 40
@ -47,7 +46,7 @@ class Log:
try:
return Levels(int(level))
except ValueError:
return getattr(Levels, level.upper())
return Levels[level.upper()]
def update_config(self, **data):
@ -56,14 +55,14 @@ class Log:
def set_config(self, key, value):
if key == 'level' and type(value) == str:
if key == 'level' and type(value) in [str, int]:
value = self.parse_level(value)
setattr(self, key, value)
def get_config(self, key):
return self[key]
return getattr(self, key)
def print_config(self):
@ -71,7 +70,10 @@ class Log:
def log(self, level, *msg):
if level.value < self.level.value:
if isinstance(level, str):
Levels[level.upper()]
if level < self.level:
return
default = self.name == 'Default'
@ -102,7 +104,8 @@ def get_logger(name, **config):
'''create a default logger'''
logger = {
'default': Log('Default')
'default': Log('Default'),
'IzzyLib': Log('IzzyLib')
}
DefaultLog = logger['default']
@ -115,9 +118,13 @@ info = DefaultLog.info
verbose = DefaultLog.verbose
debug = DefaultLog.debug
merp = DefaultLog.merp
log = DefaultLog.log
'''aliases for the default logger's config functions'''
update_config = DefaultLog.update_config
set_config = DefaultLog.set_config
get_config = DefaultLog.get_config
print_config = DefaultLog.print_config
logger['IzzyLib'].set_config('level', env.get('IZZYLIB_LOG_LEVEL', 'INFO'))

487
izzylib/misc.py Normal file
View file

@ -0,0 +1,487 @@
import grp, hashlib, os, platform, random, signal, socket, statistics, string, time, timeit
from datetime import datetime
from getpass import getpass, getuser
from importlib import util
from pathlib import Path
from urllib.parse import urlparse
from . import izzylog
from .dotdict import DotDict
__all__ = [
'ap_date',
'boolean',
'catch_kb_interrupt',
'get_current_user_info',
'get_ip',
'hasher',
'import_from_path',
'nfs_check',
'port_check',
'print_methods',
'prompt',
'random_gen',
'remove',
'signal_handler',
'time_function',
'time_function_pprint',
'timestamp',
'var_name',
'Url'
]
def ap_date(date=None, alt=False):
'''
Takes a datetime object and returns it as an ActivityPub-friendly string
Arguments:
date (datetime): The datetime object to be converted. It not set, will create a new datetime object with the current date and time
alt (bool): If True, the returned string will be in the Mastodon API format
Return:
str: The date in an ActivityPub-friendly format
'''
if not date:
date = datetime.utcnow()
elif type(date) == int:
date = datetime.fromtimestamp(date)
elif type(date) != datetime:
raise TypeError(f'Unsupported object type for ApDate: {type(date)}')
return date.strftime('%a, %d %b %Y %H:%M:%S GMT' if alt else '%Y-%m-%dT%H:%M:%SZ')
def boolean(v, return_value=False):
'''
Convert a str, bool, int or None object into a boolean.
Arguments:
v (str, bool, int, None): The value to be checked
return_value (bool): If True, return v instead of True if it can't be converted
Return:
various: A boolean or the value itself
'''
if type(v) not in [str, bool, int, type(None)]:
raise ValueError(f'Value is not a string, boolean, int, or nonetype: {value}')
value = v.lower() if isinstance(v, str) else v
if value in [1, True, 'on', 'y', 'yes', 'true', 'enable']:
return True
if value in [0, False, None, 'off', 'n', 'no', 'false', 'disable', '']:
return False
if return_value:
return v
return True
def catch_kb_interrupt(function, *args, **kwargs):
'''
Run a function and catch the KeyboardInterrupt exception
Parameters:
function (function): The function to be ran
*args, **kwargs: The arguments and keyword arguments to pass to the function
Return:
None
'''
try:
return function(*args, **kwargs)
except KeyboardInterrupt:
izzylog.verbose('Bye! UvU')
def get_current_user_info():
data = DotDict({
'name': getuser(),
'group': None,
'groups': None,
'uid': os.getuid(),
'gid': os.getgid(),
'gids': os.getgroups()
})
try:
data.gids.remove(data.gid)
except:
pass
data.group = grp.getgrgid(data.gid).gr_name
data.groups = [grp.getgrgid(v).gr_name for v in data.gids]
return data
def get_ip():
'''
Get the IP address of the machine
Return:
str: An IP address
'''
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('10.255.255.255', 1))
data = s.getsockname()
ip = data[0]
except Exception:
ip = '127.0.0.1'
finally:
s.close()
return ip
def hasher(string, alg='blake2s'):
'''
Hash a string and return the digest in hex format as a string
Arguments:
string (str, bytes): A string or bytes object to be hashed
alg (str): The name of algorithm to use for hashing. Check hashlib.__always_supported for valid hash algs
Return:
str: The hashed string in hex format as a string
'''
if alg not in hashlib.algorithms_available:
raise TypeError('Unsupported hash algorithm. Supported algs:', ', '.join(hashlib.algorithms_available))
if alg in ['sha1', 'md4', 'md5', 'md5-sha1']:
logging.verbose('Warning: Using an insecure hashing algorithm. sha256 or sha512 is recommended')
string = string.encode('UTF-8') if type(string) != bytes else string
newhash = hashlib.new(alg)
newhash.update(string)
return newhash.hexdigest()
def import_from_path(mod_path):
'''
Import a module from a directory
Arguments:
mod_path (str, Path): Py file or directory to import
Return:
module: A module object
'''
mod_path = Path(mod_path)
if mod_path.is_dir():
path = mod_path.joinpath('__init__.py')
name = path.name
else:
path = mod_path
name = path.name.replace('.py', '', -1)
spec = util.spec_from_file_location(name, path)
module = util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def nfs_check(path):
'''
Check if a file or directory is on an NFS share. Only tested on Linux
Arguments:
path (str, Path): Path to a file or directory
Return:
bool: True if the path is on an nfs share. False if not
'''
if platform.system() == 'Windows':
izzylog.verbose('Refusing to check unix mounts on a non-unix system')
return
proc = Path('/proc/mounts')
path = Path(path).resolve
if not proc.exists:
return True
with proc.open() as fd:
for line in fd:
line = line.split()
if line[2] == 'nfs' and line[1] in path:
return True
return False
def port_check(port, address='127.0.0.1', tcp=True):
'''
Checks if a TCP or UDP port is open or not
Arguments:
port (int): The port number to check
address (str): The address to connect to to check
tcp (bool): Use TCP if True, else use UDP
Return:
bool: True if the port is in use. False if it is not
'''
with socket.socket(socket.AF_INET, socket.SOCK_STREAM if tcp else socket.SOCK_DGRAM) as s:
try:
return s.connect_ex((address, port)) == 0
except socket.error as e:
return False
def print_methods(object, include_underscore=False):
'''
Prints each method of an object on a new line
Arguments:
object (object): The object to work with
include_underscore (bool): If True, also include methods that start with '_'
Return:
None (The methods are printed to stdout)
'''
for line in dir(object):
if line.startswith('_'):
if include_underscore:
print(line)
else:
print(line)
def prompt(prompt, default=None, valtype=str, options=[], password=False):
'''An upgraded `input`
Arguments:
prompt (str): The string to display to the user
default (various): The value that should be returned if there is no user input
valtype (str): The type the value should be returned as
options (list(str)): If set, these are the only values the user can select
password (bool): If set to True, the input will be treated like a password and not show the user's input on screen
Return:
various: The value typed by the user (and converted if necessary)
'''
input_func = getpass if password else input
if default != None:
prompt += ' [-redacted-]' if password else f' [{default}]'
prompt += '\n'
if options:
opt = '/'.join(options)
prompt += f'[{opt}]'
prompt += ': '
value = input_func(prompt)
while value and len(options) > 0 and value not in options:
input_func('Invalid value:', value)
value = input(prompt)
if not value or value == '':
return default
ret = valtype(value)
while valtype == Path and not ret.parent.exists:
input_func('Parent directory doesn\'t exist')
ret = Path(input(prompt))
return ret
def random_gen(length=20, letters=True, numbers=True, extra=None):
'''Return a randomly generated string
Arguments:
length (int): The length of the returned string
letters (bool): If True, include all upper and lowercase letters
numbers (bool): if True, include all numbers
extra (str): A string of any extra characters to include
Return:
str: A random string of characters
'''
if not isinstance(length, int):
raise TypeError(f'Character length must be an integer, not {type(length)}')
characters = ''
if letters:
characters += string.ascii_letters
if numbers:
characters += string.digits
if extra:
characters += extra
return ''.join(random.choices(characters, k=length))
def remove(string: str, junk: list):
for line in junk:
string = string.replace(line, '')
return string
def signal_handler(func, *args, original_args=True, **kwargs):
if original_args:
handler = lambda signum, frame: func(signum, frame, *args, **kwargs)
else:
handler = lambda *_: func(*args, **kwargs)
signal.signal(signal.SIGHUP, handler)
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGQUIT, handler)
signal.signal(signal.SIGTERM, handler)
def time_function(func, *args, passes=1, use_gc=True, **kwargs):
'''Run a function and return the time it took
Arguments:
func (function): The command to be timed
args (list(various)): The arguments to be passed to the timed function
kwargs (dict(str:various)): The keyword arguments to be passed to the timed function
passes (int): How many times the timed function should be run
use_gc (bool): If True, keep garbage collection enabled
Return:
int: The time it took to run the function in miliseconds
'''
options = [
lambda: func(*args, **kwargs)
]
if use_gc:
options.append('gc.enable()')
timer = timeit.Timer(*options)
if passes > 1:
return timer.repeat(passes, 1)
return timer.timeit(1)
def time_function_pprint(func, *args, passes=5, use_gc=True, floatlen=3, **kwargs):
'''Run a function and print out the time it took for each pass, the average and total
Arguments:
func (function): The command to be timed
args (list(various)): The arguments to be passed to the timed function
kwargs (dict(str:various)): The keyword arguments to be passed to the timed function
passes (int): How many times the timed function should be run
use_gc (bool): If True, keep garbage collection enabled
floatlen (int): The amount of decimal places each result should have
Return:
None: The data gets printed to stdout
'''
parse_time = lambda num: f'{round(num, floatlen)}s'
times = []
for idx in range(0, passes):
passtime = time_function(func, *args, **kwargs, passes=1, use_gc=use_gc)
times.append(passtime)
print(f'Pass {idx+1}: {parse_time(passtime)}')
average = statistics.fmean(times)
print('-----------------')
print(f'Average: {parse_time(average)}')
print(f'Total: {parse_time(sum(times))}')
def timestamp(dtobj=None, utc=False):
'''
Turn a datetime object into a unix timestamp
Arguments:
dtobj (datetime): The datetime object to be converted
utc (bool): If True, use UTC instead of local time for new objects
Return:
int: The timestamp version of a datetime object
'''
dtime = dtobj if dtobj else datetime
date = dtime.utcnow() if utc else dtime.now()
return date.timestamp()
def var_name(single=True, **kwargs):
'''
Return a variable name as a string
Agruments:
kwargs (dict(str:variable)): Variables and their values
single (bool): If True, only return the first variable name
Return:
str or list(str): The variable name as a str (or names in a list if not Single)
'''
keys = list(kwargs.keys())
return key[0] if single else keys
class Url(str):
protocols = {
'http': 80,
'https': 443,
'ftp': 21,
'ftps': 990
}
def __init__(self, url):
str.__new__(Url, url)
parsed = urlparse(url)
self.__parsed = parsed
self.proto = parsed.scheme
self.host = parsed.netloc
self.path = parsed.path
self.query = parsed.query
self.username = parsed.username
self.password = parsed.password
self.port = self.protocols.get(self.proto) if not parsed.port else None

234
izzylib/path.py Normal file
View file

@ -0,0 +1,234 @@
import json, os, shutil
from datetime import datetime
from functools import cached_property
from pathlib import Path as PyPath
class Path(str):
def __init__(self, path=os.getcwd(), exist=True, missing=True, parents=True):
if str(path).startswith('~'):
str.__new__(Path, os.path.expanduser(path))
else:
str.__new__(Path, path)
self.config = {
'missing': missing,
'parents': parents,
'exist': exist
}
def __enter__(self):
self.fd = self.open('r')
return self.fd
def __exit__(self, *args):
self.fd.close()
def __getattr__(self, key):
return self.join(key)
def __new__(cls, content):
return str.__new__(cls, content)
def __check_dir(self, path=None):
target = self if not path else Path(path)
if not self.config['parents'] and not target.parent.exists:
raise FileNotFoundError('Parent directories do not exist:', target)
if not self.config['exist'] and target.exists:
raise FileExistsError('File or directory already exists:', target)
def append(self, text):
return Path(self + text)
def backup(self, ext='backup', overwrite=False):
target = f'{self.parent}.{ext}'
self.copy(target, overwrite)
def chmod(self, mode=None):
os.chmod(self, mode)
def contains(self, text):
return text in str(self)
def copy(self, path, overwrite=False):
target = Path(path)
self.__check_dir(path)
if overwrite:
try:
target.delete()
except FileNotFoundError:
pass
shutil.copy2(self, target)
def delete(self):
if self.isdir:
rmtree(self)
else:
os.remove(self)
return not self.exists
def expanduser(self):
return Path(os.path.expanduser(self))
def glob(self, pattern='*'):
return tuple(sorted(self.join(path) for path in PyPath(self).rglob(pattern)))
def join(self, new_path):
return Path(os.path.join(self, new_path))
def json_load(self):
with self as s:
return json.load(s)
def json_dump(self, data, indent=None):
with self.open('w') as s:
s.write(json.dumps(data, indent=indent))
def link(self, path):
target = Path(path)
self.__check_dir(path)
if target.exists:
target.delete()
self.symlink_to(path, target.isdir)
def listdir(self, recursive=True):
if recursive:
return tuple(self.join(f) for dp, dn, fn in os.walk(self) for f in fn)
return [self.join(path) for path in os.listdir(self)]
def mkdir(self, mode=0o755):
if self.config['parents']:
os.makedirs(self, mode, exist_ok=self.config['exist'])
else:
os.makedir(self, mode, exist_ok=self.config['exist'])
return self.exists
def move(self, path, overwrite=False):
if not overwrite and self.exists:
raise FileExistsError(f'Refusing to move file to existing destination: {path}')
shutil.move(self, path)
def open(self, *args, **kwargs):
return open(self, *args, **kwargs)
def read(self, byte=False):
with self.open('rb' if byte else 'r') as fd:
return fd.read()
def readlines(self):
with self.open() as fd:
return fd.readlines()
def touch(self, mode=0o644, utime=None):
timestamp = utime or datetime.now().timestamp()
with self.open('w+') as fd:
os.utime(self, (timestamp, timestamp))
self.chmod(mode)
return self.exists
def write(self, data, mode='w'):
with self.open(mode) as fd:
fd.write(data)
@property
def exists(self):
return os.path.exists(self)
@cached_property
def home(self):
return Path('~')
@cached_property
def isdir(self):
return os.path.isdir(self)
@cached_property
def isfile(self):
return os.path.isfile(self)
@cached_property
def islink(self):
return os.path.islink(self)
@property
def mtime(self):
return os.path.getmtime(self)
@cached_property
def name(self):
return os.path.basename(self)
@cached_property
def parent(self):
return Path(os.path.dirname(self))
@cached_property
def resolve(self):
return Path(os.path.abspath(self))
@property
def size(self):
return os.path.getsize(self)
@cached_property
def stem(self):
return os.path.basename(self).split('.')[0]
@cached_property
def suffix(self):
return os.path.splitext(self)[1]

13
izzylib/sql/__init__.py Normal file
View file

@ -0,0 +1,13 @@
## Normal SQL client
from .database import Database, OperationalError, ProgrammingError
from .session import Session
from .column import Column
## Sqlite server
#from .sqlite_server import SqliteClient, SqliteColumn, SqliteServer, SqliteSession
## Compat
SqlDatabase = Database
SqlSession = Session
SqlColumn = Column

54
izzylib/sql/column.py Normal file
View file

@ -0,0 +1,54 @@
from sqlalchemy import ForeignKey
from sqlalchemy import (
Column as sqlalchemy_column,
types as Types
)
SqlTypes = {t.lower(): getattr(Types, t) for t in dir(Types) if not t.startswith('_')}
class Column(sqlalchemy_column):
def __init__(self, name, stype=None, fkey=None, **kwargs):
if not stype and not kwargs:
if name == 'id':
stype = 'integer'
kwargs['primary_key'] = True
kwargs['autoincrement'] = True
elif name == 'timestamp':
stype = 'datetime'
else:
raise ValueError('Missing column type and options')
stype = (stype.lower() if type(stype) == str else stype) or 'string'
if type(stype) == str:
try:
stype = SqlTypes[stype.lower()]
except KeyError:
raise KeyError(f'Invalid SQL data type: {stype}')
options = [name, stype]
if fkey:
options.append(ForeignKey(fkey))
super().__init__(*options, **kwargs)
def compile(self):
sql = f'{self.name} {self.type}'
if not self.nullable:
sql += ' NOT NULL'
if self.primary_key:
sql += ' PRIMARY KEY'
if self.unique:
sql += ' UNIQUE'
return sql

186
izzylib/sql/database.py Normal file
View file

@ -0,0 +1,186 @@
import json, pkgutil, sys, threading, time
from contextlib import contextmanager
from datetime import datetime
from izzylib import LruCache, DotDict, Path, nfs_check, izzylog
from sqlalchemy import Table, create_engine
from sqlalchemy.exc import OperationalError, ProgrammingError
from sqlalchemy.engine import URL
from sqlalchemy.schema import MetaData
from .rows import RowClasses
from .session import Session
modules = dict(
postgresql = ['pygresql', 'pg8000', 'psycopg2', 'psycopg3']
)
class Database:
def __init__(self, dbtype='sqlite', **kwargs):
self._connect_args = [dbtype, kwargs]
self.db = None
self.cache = None
self.config = DotDict()
self.meta = MetaData()
self.classes = RowClasses(*kwargs.get('row_classes', []))
self.cache = None
self.session_class = kwargs.get('session_class', Session)
self.sessions = {}
self.open()
def _setup_cache(self):
self.cache = DotDict({table: LruCache() for table in self.get_tables()})
@property
def session(self):
return self.session_class(self)
@property
def dbtype(self):
return self.db.url.get_backend_name()
@property
def table(self):
return DotDict(self.meta.tables)
def get_tables(self):
return list(self.table.keys())
def get_columns(self, table):
return list(col.name for col in self.table[table].columns)
def new_session(self, trans=True):
return self.session_class(self, trans=trans)
## Leaving link to example code for read-only sqlite for later use
## https://github.com/pudo/dataset/issues/136#issuecomment-128693122
def open(self):
dbtype, kwargs = self._connect_args
engine_kwargs = {
'future': True,
#'maxconnections': 25
}
if not kwargs.get('name'):
raise KeyError('Database "name" is not set')
if dbtype == 'sqlite':
database = kwargs['name']
if nfs_check(database):
izzylog.warning('Database file is on an NFS share which does not support locking. Any writes to the database will fail')
engine_kwargs['connect_args'] = {'check_same_thread': False}
elif dbtype == 'postgresql':
ssl_context = kwargs.get('ssl')
if ssl_context:
engine_kwargs['ssl_context'] = ssl_context
if not kwargs.get('host'):
kwargs['unix_socket'] = '/var/run/postgresql'
if kwargs.get('host') and Path(kwargs['host']).exists():
kwargs['unix_socket'] = kwargs.pop('host')
else:
raise TypeError(f'Unsupported database type: {dbtype}')
self.config.update(kwargs)
if dbtype == 'sqlite':
url = URL.create(
drivername='sqlite',
database=kwargs.pop('name')
)
else:
try:
for module in modules[dbtype]:
if pkgutil.get_loader(module):
dbtype = f'{dbtype}+{module}'
except KeyError:
pass
url = URL.create(
drivername = dbtype,
username = kwargs.pop('user', None),
password = kwargs.pop('password', None),
host = kwargs.pop('host', None),
port = kwargs.pop('port', None),
database = kwargs.pop('name'),
)
self.db = create_engine(url, **engine_kwargs)
self.meta = MetaData()
self.meta.reflect(bind=self.db, resolve_fks=True, views=True)
self._setup_cache()
def close(self):
for sid in list(self.sessions):
self.sessions[sid].commit()
self.sessions[sid].close()
self.config = DotDict()
self.cache = DotDict()
self.meta = None
self.db = None
def load_tables(self, **tables):
self.meta = MetaData()
for name, columns in tables.items():
Table(name, self.meta, *columns)
self._setup_cache()
def create_database(self, tables={}):
if tables:
self.load_tables(**tables)
if self.db.url.get_backend_name() == 'postgresql':
predb = create_engine(self.db.engine_string.replace(self.config.name, 'postgres', -1), future=True)
conn = predb.connect()
try:
conn.execute(text(f'CREATE DATABASE {database}'))
except ProgrammingError:
'The database already exists, so just move along'
except Exception as e:
conn.close()
raise e from None
conn.close()
self.meta.create_all(bind=self.db)
def drop_tables(self, *tables):
if not tables:
raise ValueError('No tables specified')
self.meta.drop_all(bind=self.db, tables=tables)
def execute(self, string, **kwargs):
with self.session as s:
s.execute(string, **kwargs)

90
izzylib/sql/rows.py Normal file
View file

@ -0,0 +1,90 @@
from izzylib import DotDict
class RowClasses(DotDict):
def __init__(self, *classes):
super().__init__()
for rowclass in classes:
self.update({rowclass.__name__.lower(): rowclass})
def get_class(self, name):
return self.get(name, Row)
class Row(DotDict):
def __init__(self, table, row, session):
super().__init__()
if row:
try:
self._update(row._asdict())
except:
self._update(row)
self.__db = session.db
self.__table_name = table
self.__run__(session)
@property
def db(self):
return self.__db
@property
def table(self):
return self.__table_name
@property
def columns(self):
return self.keys()
## Subclass Row and redefine this function
def __run__(self, s):
pass
def _update(self, *args, **kwargs):
super().update(*args, **kwargs)
def delete(self, s=None):
izzylog.warning('deprecated function: Row.delete')
if s:
return self.delete_session(s)
with self.db.session as s:
return self.delete_session(s)
def delete_session(self, s):
izzylog.warning('deprecated function: Row.delete_session')
return s.remove(table=self.table, row=self)
def update(self, dict_data={}, s=None, **data):
izzylog.warning('deprecated function: Row.update')
dict_data.update(data)
self._update(dict_data)
if s:
return self.update_session(s, **self)
with self.db.session as s:
s.update(row=self, **self)
def update_session(self, s, dict_data={}, **data):
izzylog.warning('deprecated function: Row.update_session')
dict_data.update(data)
self._update(dict_data)
return s.update(table=self.table, row=self, **dict_data)

179
izzylib/sql/session.py Normal file
View file

@ -0,0 +1,179 @@
from izzylib import DotDict, random_gen, izzylog
from sqlalchemy import text
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.session import Session as sqlalchemy_session
class Session(sqlalchemy_session):
def __init__(self, db, trans=False):
super().__init__(bind=db.db, future=True)
self.closed = False
self.trans = trans
self.database = db
self.classes = db.classes
self.cache = db.cache
self.sessionid = random_gen(10)
self.database.sessions[self.sessionid] = self
# remove in the future
self.db = db
self._setup()
def __enter__(self):
if self.trans:
self.begin()
return self
def __exit__(self, exctype, value, tb):
if self.in_transaction():
if tb:
self.rollback()
self.commit()
self.close()
def _setup(self):
pass
@property
def table(self):
return self.db.table
def commit(self):
if not self.in_transaction():
return
super().commit()
def close(self):
super().close()
self.closed = True
del self.db.sessions[self.sessionid]
self.sessionid = None
def run(self, expression, **kwargs):
result = self.execute(text(expression), params=kwargs)
try:
return result.mappings().all()
except Exception as e:
izzylog.verbose(f'Session.run: {e.__class__.__name__}: {e}')
return result
def count(self, table_name, **kwargs):
return self.query(self.table[table_name]).filter_by(**kwargs).count()
def fetch(self, table, single=True, orderby=None, orderdir='asc', **kwargs):
RowClass = self.classes.get_class(table.lower())
query = self.query(self.table[table]).filter_by(**kwargs)
if not orderby:
rows = query.all()
else:
if orderdir == 'asc':
rows = query.order_by(getattr(self.table[table].c, orderby).asc()).all()
elif orderdir == 'desc':
rows = query.order_by(getattr(self.table[table].c, orderby).desc()).all()
else:
raise ValueError(f'Unsupported order direction: {orderdir}')
if single:
return RowClass(table, rows[0], self) if len(rows) > 0 else None
return [RowClass(table, row, self) for row in rows]
def search(self, *args, **kwargs):
kwargs.pop('single', None)
return self.fetch(*args, single=False, **kwargs)
def insert(self, table, return_row=False, **kwargs):
row = self.fetch(table, **kwargs)
if row:
row.update_session(self, **kwargs)
return
if getattr(self.table[table], 'timestamp', None) and not kwargs.get('timestamp'):
kwargs['timestamp'] = datetime.now()
self.execute(self.table[table].insert().values(**kwargs))
if return_row:
return self.fetch(table, **kwargs)
def update(self, table=None, rowid=None, row=None, return_row=False, **kwargs):
if row:
rowid = row.id
table = row.table
if not rowid or not table:
raise ValueError('Missing row ID or table')
self.execute(self.table[table].update().where(self.table[table].c.id == rowid).values(**kwargs))
if return_row:
return self.fetch(table, id=rowid)
def remove(self, table=None, rowid=None, row=None):
if row:
rowid = row.id
table = row.table
if not rowid or not table:
raise ValueError('Missing row ID or table')
self.run(f'DELETE FROM {table} WHERE id=:id', id=rowid)
def append_column(self, table, column):
if column.name in self.db.get_columns(table):
logging.warning(f'Table "{table}" already has column "{column.name}"')
return
self.run(f'ALTER TABLE {table} ADD COLUMN {column.compile()}')
def remove_column(self, tbl, col):
table = self.table[tbl]
column = getattr(table, col, None)
columns = self.db.get_columns(tbl)
if col not in columns:
izzylog.info(f'Column "{col}" already exists')
return
columns.remove(col)
coltext = ','.join(columns)
self.run(f'CREATE TABLE {tbl}_temp AS SELECT {coltext} FROM {tbl}')
self.run(f'DROP TABLE {tbl}')
self.run(f'ALTER TABLE {tbl}_temp RENAME TO {tbl}')
def clear_table(self, table):
self.run(f'DELETE FROM {table}')

View file

@ -0,0 +1,374 @@
import asyncio, json, socket, sqlite3, ssl, time, traceback
from izzylib import DotDict, JsonEncoder, Path, izzylog
from . import CustomRows, SqlDatabase
commands = [
'insert', 'update', 'remove', 'query', 'execute', 'dirty', 'count',
'DropTables', 'GetTables', 'AppendColumn', 'RemoveColumn'
]
class SqliteClient(object):
def __init__(self, database: str='metadata', host: str='localhost', port: int=3926, password: str=None, session_class=None):
self.ssl = None
self.data = DotDict({
'host': host,
'port': int(port),
'password': password,
'database': database
})
self.session_class = session_class or SqliteSession
self.classes = CustomRows()
self._setup()
@property
def session(self):
return self.session_class(self)
def setup_ssl(self, certfile, keyfile, password=None):
self.ssl = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
self.ssl.load_cert_chain(certfile, keyfile, password)
def switch_database(self, database):
self.data.database = database
def _setup(self):
pass
class SqliteSession(socket.socket):
def __init__(self, client):
super().__init__(socket.AF_INET, socket.SOCK_STREAM)
self.connected = False
self.client = client
self.classes = client.classes
self.data = client.data
self.begin = lambda: self.send('begin')
self.commit = lambda: self.send('commit')
self.rollback = lambda: self.send('rollback')
for cmd in commands:
self.setup_command(cmd)
def __enter__(self):
self.open()
return self
def __exit__(self, exctype, value, tb):
if tb:
self.rollback()
self.commit()
self.close()
def fetch(self, table, *args, **kwargs):
RowClass = self.classes.get(table.capitalize())
data = self.send('fetch', table, *args, **kwargs)
if isinstance(data, dict):
return RowClass(table, data, self)
elif isinstance(data, list):
return [RowClass(table, row, self) for row in data]
def search(self, *args, **kwargs):
return self.fetch(*args, **kwargs, single=False)
def setup_command(self, name):
setattr(self, name, lambda *args, **kwargs: self.send(name, *args, **kwargs))
def send(self, command, *args, **kwargs):
self.sendall(json.dumps({'database': self.data.database, 'command': command, 'args': list(args), 'kwargs': dict(kwargs)}).encode('utf8'))
data = self.recv(8*1024*1024).decode()
try:
data = DotDict(data)
except ValueError:
data = json.loads(data)
if isinstance(data, dict) and data.get('error'):
raise ServerError(data.get('error'))
return data
def open(self):
try:
self.connect((self.data.host, self.data.port))
except ConnectionRefusedError:
time.sleep(2)
self.connect((self.data.host, self.data.port))
if self.data.password:
login = self.send('login', self.data.password)
if not login.get('message') == 'OK':
izzylog.error('Server error:', login.error)
return
self.connected = True
def close(self):
self.send('close')
super().close()
self.connected = False
def is_transaction(self):
self.send('trans_state')
def is_connected(self):
return self.connected
def _setup(self):
pass
def SqliteColumn(*args, **kwargs):
return {'args': list(args), 'kwargs': dict(kwargs)}
class SqliteServer(DotDict):
def __init__(self, path, host='localhost', port=3926, password=None):
self.server = None
self.database = DotDict()
self.path = Path(path).resolve()
self.ssl = None
self.password = password
self.host = host
self.port = int(port)
self.metadata_layout = {
'databases': [
SqliteColumn('id'),
SqliteColumn('name', 'text', nullable=False),
SqliteColumn('layout', 'text', nullable=False)
]
}
if not self.path.exists():
raise FileNotFoundError('Database directory not found')
if not self.path.isdir():
raise NotADirectoryError('Database directory is a file')
try:
self.open('metadata')
except:
self.setup_metadata()
for path in self.path.listdir(False):
if path.endswith('.sqlite3') and path.stem != 'metadata':
self.open(path.stem)
def open(self, database, new=False):
db = SqlDatabase(dbtype='sqlite', database=self.path.join(database + '.sqlite3'))
if database != 'metadata' and not new:
with self.get_database('metadata').session() as s:
row = s.fetch('databases', name=database)
if not row:
izzylog.error('Database not found:', database)
return
db.SetupTables(row.layout)
else:
db.SetupTables(self.metadata_layout)
setattr(db, 'name', database)
self[database] = db
return db
def close(self, database):
del self[database]
def delete(self, database):
self.close(database)
path.join(database + '.sqlite3').unlink()
def get_database(self, database):
return self[database]
def asyncio_run(self):
self.server = asyncio.start_server(self.handle_connection, self.host, self.port, ssl=self.ssl)
return self.server
def run(self):
loop = asyncio.get_event_loop()
loop.run_until_complete(self.asyncio_run())
try:
izzylog.info('Starting Sqlite Server')
loop.run_forever()
except KeyboardInterrupt:
print()
izzylog.info('Closing...')
return
def setup_metadata(self):
meta = self.open('metadata')
tables = {
'databases': [
SqliteColumn('id'),
SqliteColumn('name', 'text', nullable=False),
SqliteColumn('layout', 'text', nullable=False)
]
}
db = self.open('metadata')
db.SetupTables(tables)
db.CreateDatabase()
def setup_ssl(self, certfile, keyfile, password=None):
self.ssl = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
self.ssl.load_cert_chain(certfile, keyfile, password)
async def handle_connection(self, reader, writer):
session = None
database = None
valid = None
close = False
try:
while not close:
raw_data = await asyncio.wait_for(reader.read(8*1024*1024), timeout=60)
if not raw_data:
break
try:
data = DotDict(raw_data)
if self.password:
if valid == None and data.command == 'login':
valid = self.login(*data.get('args'))
if not valid:
response = {'error': 'Missing or invalid password'}
elif data.command in ['session']:
response = {'error': 'Invalid command'}
else:
if not database:
database = data.database
if data.command == 'close' and session:
session.commit()
else:
if not session:
session = self[database].session()
session.open()
response = self.run_command(session, database, data.command, *data.get('args'), **data.get('kwargs'))
except Exception as e:
traceback.print_exc()
response = {'error': f'{e.__class__.__name__}: {str(e)}'}
writer.write(json.dumps(response or {'message': 'OK'}, cls=misc.JsonEncoder).encode('utf8'))
await writer.drain()
izzylog.info(f'{writer.get_extra_info("peername")[0]}: [{database}] {data.command} {data.args} {data.kwargs}')
if data.command == 'delete':
writer.close()
break
except ConnectionResetError:
pass
if session:
session.close()
writer.close()
def login(self, password):
return self.password == password
def run_command(self, session, database, command, *args, **kwargs):
if command == 'update':
return self.cmd_update(*args, **kwargs)
if command == 'dropdb':
return self.cmd_delete(session, database)
elif command == 'createdb':
return self.cmd_createdb(session, database, *args)
elif command == 'test':
return
elif command == 'trans_state':
return {'trans_state': session.dirty}
cmd = getattr(session, command, None)
if not cmd:
return {'error': f'Command not found: {command}'}
return cmd(*args, **kwargs)
def cmd_delete(self, session, database):
session.rollback()
session.close()
self.delete(database)
def cmd_createdb(self, session, database, name, tables):
if session.fetch('databases', name=name):
raise ValueError('Database already exists:', database)
session.insert('databases', name=name, layout=json.dumps(tables))
db = self.open(name, new=True)
db.SetupTables(tables)
db.CreateDatabase()
self[name] = db
def cmd_update(self, table=None, rowid=None, row=None, **data):
if row:
row = DotDict(row)
return self.update(table, rowid, row, **data)
class ServerError(Exception):
pass

View file

@ -0,0 +1,229 @@
import codecs, traceback, os, json, xml
from colour import Color as Colour
from functools import partial
from hamlish_jinja import HamlishExtension
from izzylib import izzylog, DotDict, Path
from jinja2 import Environment, FileSystemLoader, ChoiceLoader, select_autoescape, Markup
from os import listdir, makedirs
from os.path import isfile, isdir, getmtime, abspath
from xml.dom import minidom
try:
from sanic import response as Response
except ModuleNotFoundError:
Response = None
class Template(Environment):
def __init__(self, search=[], global_vars={}, context=None, autoescape=True):
self.search = FileSystemLoader([])
super().__init__(
loader=self.search,
extensions=[HamlishExtension],
lstrip_blocks=True,
trim_blocks=True
)
self.autoescape = autoescape
self.func_context = context
self.hamlish_file_extensions=('.haml',)
self.hamlish_enable_div_shortcut=True
self.hamlish_mode = 'indented'
for path in search:
self.add_search_path(Path(path))
self.globals.update({
'markup': Markup,
'cleanhtml': lambda text: ''.join(xml.etree.ElementTree.fromstring(text).itertext()),
'color': Color,
'lighten': partial(color_func, 'lighten'),
'darken': partial(color_func, 'darken'),
'saturate': partial(color_func, 'saturate'),
'desaturate': partial(color_func, 'desaturate'),
'rgba': partial(color_func, 'rgba')
})
self.globals.update(global_vars)
def add_search_path(self, path, index=None):
if not path.exists:
raise FileNotFoundError(f'Cannot find search path: {path}')
if path not in self.search.searchpath:
loader = os.fspath(path)
if index != None:
self.search.searchpath.insert(index, loader)
else:
self.search.searchpath.append(loader)
def set_context(self, context):
if not hasattr(context, '__call__'):
izzylog.error('Context is not callable')
return
if not isinstance(context({}), dict):
izzylog.error('Context does not return a dict or dict-like object')
return
self.func_context = context
def add_env(self, k, v):
self.globals[k] = v
def del_env(self, var):
if not self.globals.get(var):
raise ValueError(f'"{var}" not in global variables')
del self.var[var]
def update_env(self, data):
if not isinstance(data, dict):
raise ValueError(f'Environment data not a dict')
self.globals.update(data)
def add_filter(self, funct, name=None):
name = funct.__name__ if not name else name
self.filters[name] = funct
def del_filter(self, name):
if not self.filters.get(name):
raise valueError(f'"{name}" not in global filters')
del self.filters[name]
def update_filter(self, data):
if not isinstance(data, dict):
raise ValueError(f'Filter data not a dict')
self.filters.update(data)
def render(self, tplfile, context_data={}, headers={}, cookies={}, request=None, pprint=False):
if not isinstance(context_data, dict):
raise TypeError(f'context for {tplfile} not a dict: {type(context)} {context}')
context = DotDict(self.globals)
context.update(context_data)
context['request'] = request if request else {'headers': headers, 'cookies': cookies}
if self.func_context:
# Backwards compat
try:
context = self.func_context(context)
except TypeError:
context = self.func_context(context, {})
if context == None:
izzylog.warning('Template context was set to "None"')
context = {}
result = self.get_template(tplfile).render(context)
if pprint and any(map(tplfile.endswith, ['haml', 'html', 'xml'])):
return minidom.parseString(result).toprettyxml(indent=" ")
else:
return result
def response(self, request, tpl, ctype='text/html', status=200, **kwargs):
if not Response:
raise ModuleNotFoundError('Sanic is not installed')
html = self.render(tpl, request=request, **kwargs)
return Response.HTTPResponse(body=html, status=status, content_type=ctype, headers=kwargs.get('headers', {}))
class Color(Colour):
def __init__(self, color):
if isinstance(color, str):
super().__init__(f'#{str(color)}' if not color.startswith('#') else color)
elif isinstance(color, Colour):
super().__init__(str(color))
else:
raise TypeError(f'Color has to be a string or Color class, not {type(color)}')
def __repr__(self):
return self.__str__()
def __str__(self):
return self.hex_l
def lighten(self, multiplier):
return self.alter('lighten', multiplier)
def darken(self, multiplier):
return self.alter('darken', multiplier)
def saturate(self, multiplier):
return self.alter('saturate', multiplier)
def desaturate(self, multiplier):
return self.alter('desaturate', multiplier)
def rgba(self, multiplier):
return self.alter('rgba', multiplier)
def multi(self, multiplier):
if multiplier >= 100:
return 100
elif multiplier <= 0:
return 0
return multiplier / 100
def alter(self, action, multiplier):
new_color = Color(self)
if action == 'lighten':
new_color.luminance += ((1 - self.luminance) * self.multi(multiplier))
elif action == 'darken':
new_color.luminance -= (self.luminance * self.multi(multiplier))
elif action == 'saturate':
new_color.saturation += ((1 - self.saturation) * self.multi(multiplier))
elif action == 'desaturate':
new_color.saturation -= (self.saturation * self.multi(multiplier))
elif action == 'rgba':
red = self.red*255
green = self.green*255
blue = self.blue*255
trans = self.multi(multiplier)
return f'rgba({red:0.2f}, {green:0.2f}, {blue:0.2f}, {trans})'
return new_color
def color_func(action, color, multi):
return Color(color).alter(action, multi)

6
pyproject.toml Normal file
View file

@ -0,0 +1,6 @@
[build-system]
requires = [
"setuptools >= 38.3.0",
"wheel"
]
build-backend = "setuptools.build_meta"

View file

@ -1,10 +0,0 @@
colour>=0.1.5
envbash>=1.2.0
Hamlish-Jinja==0.3.3
Jinja2>=2.10.1
jinja2-markdown>=0.0.3
Mastodon.py>=1.5.0
pycryptodome>=3.9.1
python-magic>=0.4.18
sanic>=19.12.2
watchdog>=0.8.3

70
setup.cfg Normal file
View file

@ -0,0 +1,70 @@
[metadata]
name = IzzyLib
version = 0.7.0
author = Zoey Mae
author_email = zoey@barkshark.xyz
url = https://git.barkshark.xyz/izaliamae/izzylib
description = Functions and classes that I use often
license = CNPL
license_file = LICENSE
platform = any
keywords = python http activitypub sql database html css
classifiers =
Development Status :: 3 - Alpha
Intended Audience :: Developers
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python 3.6
Programming Language :: Python 3.7
Programming Language :: Python 3.8
Programming Language :: Python 3.9
Topic :: Software Development :: Libraries :: Python Modules
project_urls =
Bug Tracker = https://git.barkshark.xyz/izaliamae/izzylib/issues
Documentation = https://git.barkshark.xyz/izaliamae/izzylib/wiki
Source Code = https://git.barkshark.xyz/izaliamae/izzylib
[options]
include_package_data = true
python_requires = >= 3.6
packages =
izzylib
izzylib.dbus
izzylib.hasher
izzylib.http_server
izzylib.http_urllib_client
izzylib.sql
izzylib.template
setup_requires =
setuptools >= 38.3.0
[options.package_data]
izzylib = izzylib/*
[options.extras_require]
dbus =
dasbus==1.6
hasher =
argon2-cffi == 21.1.0
http_server =
sanic == 21.6.2
envbash == 1.2.0
http_urllib_client =
pillow == 8.3.2
pycryptodome == 3.10.1
urllib3 == 1.26.6
tldextract == 3.1.2
sql =
SQLAlchemy == 1.4.23
SQLAlchemy-Paginator == 0.2
template =
colour == 0.1.5
Hamlish-Jinja == 0.3.3
Jinja2 == 3.0.1
Markdown == 3.3.4
[bdist_wheel]
universal = false
[sdist]
formats = zip, gztar

42
setup.py Executable file → Normal file
View file

@ -1,40 +1,2 @@
#!/usr/bin/env python3
from setuptools import setup
import sys
from IzzyLib import __version__ as v
version = '.'.join([str(i) for i in v])
setup(
name="IzzyLib",
version=version,
packages=['IzzyLib'],
python_requires='>=3.6.0',
install_requires=[req.replace('\n', '') for req in open('requirements.txt').readlines()],
include_package_data=False,
author='Zoey Mae',
author_email='admin@barkshark.xyz',
description='a collection of often-used functions and classes',
keywords='web http server database postgresql',
url='https://git.barkshark.xyz/izaliamae/izzylib',
project_urls={
'Bug Tracker': 'https://git.barkshark.xyz/izaliamae/izzylib/issues',
'Documentation': 'https://git.barkshark.xyz/izaliamae/izzylib/wiki',
'Source Code': 'https://git.barkshark.xyz/izaliamae/izzylib'
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Information Technology',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Topic :: Internet :: WWW/HTTP'
]
)
import setuptools
setuptools.setup()