move some submodules to separate modules

This commit is contained in:
Izalia Mae 2022-02-22 19:31:47 -05:00
parent 1e62836754
commit 0f439ff916
85 changed files with 1705 additions and 6310 deletions

View file

@ -7,7 +7,7 @@ https://git.barkshark.xyz/izaliamae/izzylib
import os, sys, traceback
assert sys.version_info >= (3, 7)
__version_tpl__ = (0, 7, 1)
__version_tpl__ = (0, 7, 3)
__version__ = '.'.join([str(v) for v in __version_tpl__])
from . import logging
@ -15,52 +15,17 @@ from . import logging
izzylog = logging.logger['IzzyLib']
izzylog.set_config('level', os.environ.get('IZZYLOG_LEVEL', 'INFO'))
from .path import Path
from .dotdict import DotDict, LowerDotDict, DefaultDotDict, MultiDotDict, JsonEncoder
from .dotdict import (
DotDict,
DefaultDotDict,
LowerDotDict,
MultiDotDict,
JsonEncoder
)
from .path import *
from .dotdict import *
from .misc import *
from .cache import CacheDecorator, LruCache, TtlCache
from .config import BaseConfig, JsonConfig, YamlConfig
from .connection import Connection
from .http_client import HttpClient, HttpResponse
Config = JsonConfig
def log_import_error(package, *message):
izzylog.debug(*message)
path = Path(__file__).resolve().parent.join(package)
if path.exists and izzylog.get_config('level') == logging.Levels.DEBUG:
traceback.print_exc()
#try:
#from izzylib.sql import SqlColumn, CustomRows, SqlSession, SqlDatabase, Tables, SqliteClient, SqliteColumn, SqliteServer, SqliteSession
#except ImportError:
#log_import_error('sql', 'Failed to import SQL classes. Connecting to SQL databases is disabled')
#try:
#from izzylib.tinydb import TinyDatabase, TinyRow, TinyRows
#except ImportError:
#log_import_error('tinydb', 'Failed to import TinyDB classes. TinyDB database is disabled')
#try:
#from izzylib.template import Template, Color
#except ImportError:
#log_import_error('template', 'Failed to import http template classes. Jinja and HAML templates disabled')
#try:
#from izzylib.http_urllib_client import *
#except ImportError:
#log_import_error('http_urllib_client', 'Failed to import Requests http client classes. Requests http client is disabled')
#try:
#from izzylib.http_server import PasswordHasher, HttpServer, HttpServerRequest, HttpServerResponse
#except ImportError:
#log_import_error('http_server', 'Failed to import HTTP server classes. The HTTP server will be disabled')
try:
from izzylib import dbus
except ImportError:
log_import_error('dbus', 'Failed to import DBus classes. DBus access will be disabled')
from .cache import *
from .config import *
from .connection import *

View file

@ -1,771 +0,0 @@
import json, mimetypes, traceback
from datetime import datetime, timezone
from functools import partial
from typing import Union
from xml.etree.ElementTree import fromstring
from .dotdict import DotDict
from .misc import DateString, Url, boolean
pubstr = 'https://www.w3.org/ns/activitystreams#Public'
actor_types = ['Application', 'Group', 'Organization', 'Person', 'Service']
activity_types = [
'Accept', 'Add', 'Announce', 'Arrive', 'Block', 'Create', 'Delete', 'Dislike',
'Flag', 'Follow', 'Ignore', 'Invite', 'Join', 'Leave', 'Like', 'Listen',
'Move', 'Offer', 'Question', 'Reject', 'Read', 'Remove', 'TentativeAccept',
'TentativeReject', 'Travel', 'Undo', 'Update', 'View'
]
link_types = ['Mention']
object_types = [
'Article', 'Audio', 'Document', 'Event', 'Image', 'Note', 'Page', 'Place',
'Profile', 'Relationship', 'Tombstone', 'Video'
]
url_keys = [
'attributedTo', 'url', 'href', 'object', 'id', 'actor', 'partOf', 'target'
]
def parse_privacy_level(to: list=[], cc: list=[], followers=None):
if pubstr in to and followers in cc:
return 'public'
elif followers in to and pubstr in cc:
return 'unlisted'
elif pubstr not in to and pubstr not in cc and followers in cc:
return 'private'
elif not tuple(item for item in [*to, *cc] if item not in [pubstr, followers]):
return 'direct'
else:
logging.warning('Not sure what this privacy level is')
logging.debug(f'to: {json.dumps(to)}')
logging.debug(f'cc: {json.dumps(cc)}')
logging.debug(f'followers: {followers}')
def generate_privacy_fields(privacy='public', followers=None, to=[], cc=[]):
if privacy == 'public':
to = [pubstr, *to]
cc = [followers, *to]
elif privacy == 'unlisted':
to = [followers, *to]
cc = [pubstr, *to]
elif privacy == 'private':
cc = [followers, *cc]
elif privacy == 'direct':
pass
else:
raise ValueError(f'Unknown privacy level: {privacy}')
return to, cc
class Object(DotDict):
def __setitem__(self, key, value):
if type(key) == str and key in url_keys:
value = Url(value)
elif key == 'object' and isinstance(key, dict):
value = Object(value)
super().__setitem__(key, value)
@classmethod
def new_activity(cls, id: str, type: str, actor_src: Union[str, dict], object: Union[str, dict], to: list=[pubstr], cc: list=[]):
assert type in activity_types
activity = cls({
'@context': 'https://www.w3.org/ns/activitystreams',
'id': id,
'object': object,
'type': type,
'actor': actor_src
})
if to:
activity.to = to
if cc:
activity.cc = cc
return activity
@classmethod
def new_note(cls, id, url, actor, content, **kwargs):
assert False not in map(isinstance, [id, actor, url], [Url])
if kwargs.get('date'):
date = DateString.from_datetime(kwargs['date'], 'activitypub')
else:
date = DateString.now('activitypub')
return cls({
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#",
#"votersCount": "toot:votersCount",
#"litepub": "http://litepub.social/ns#",
#"directMessage": "litepub:directMessage"
}
],
"id": id,
"type": "Note",
"summary": kwargs.get('summary'),
#"inReplyTo": kwargs.get('replyto'),
"published": date,
"url": url,
"attributedTo": actor,
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
f'{actor}/followers'
],
"sensitive": kwargs.get('sensitive', False),
"content": f'{content}',
#"contentMap": {
#"en": content
#},
#"attachment": [],
#"tag": [],
#"replies": {
#"id": f"{id}/replies",
#"type": "Collection",
#"first": {
#"type": "CollectionPage",
#"next": f"{id}/replies?only_other_accounts=true&page=true",
#"partOf": f"{id}/replies",
#"items": []
#}
#}
})
@classmethod
def new_actor(cls, actor, handle, pubkey, published=None, table={}, full=True, **kwargs):
actor_type = kwargs.get('type', 'Person').title()
assert actor_type in actor_types
actor = Url(actor)
data = cls({
'@context': [
'https://www.w3.org/ns/activitystreams',
'https://w3id.org/security/v1',
{
'schema': 'http://schema.org',
'toot': 'https://joinmastodon.org/ns#',
#'Device': 'toot:Device',
#'Ed25519Signature': 'toot:Ed25519Signature',
#'Ed25519Key': 'toot:Ed25519Key',
#'Curve25519Key': 'toot:Curve25519Key',
#'EncryptedMessage': 'toot:EncryptedMessage',
#'publicKeyBase64': 'toot:publicKeyBase64',
#'deviceId': 'toot:deviceId',
#'messageFranking': 'toot:messageFranking',
'messageType': 'toot:messageType',
#'cipherText': 'toot:cipherText',
#'suspended': 'toot:suspended',
"claim": {
"@type": "@id",
"@id": "toot:claim"
}
}
],
'id': actor,
'type': actor_type,
'inbox': kwargs.get('inbox', f'{actor}'),
'outbox': f'{actor}/outbox',
'preferredUsername': handle,
'url': kwargs.get('url', actor),
'manuallyApprovesFollowers': kwargs.get('locked', False),
'discoverable': kwargs.get('discoverable', False),
'published': published or DateString.now('activitypub'),
'publicKey': {
'id': f'{actor}#main-key',
'owner': actor,
'publicKeyPem': pubkey
},
'endpoints': {
'sharedInbox': kwargs.get('shared_inbox', f'https://{actor.host}/inbox')
}
})
for key, value in table.items():
data.attachment.append(PropertyValue(key, value))
if kwargs.get('avatar_url'):
data.icon = Object.new_image(kwargs.get('avatar_url'), kwargs.get('avatar_type'))
if full:
data.update({
'name': kwargs.get('display_name', handle),
'summary': kwargs.get('bio'),
'featured': f'{actor}/collections/featured',
'tags': f'{actor}/collections/tags',
'following': f'{actor}/following',
'followers': f'{actor}/followers',
'tag': [],
'attachment': [],
})
data['@context'][2].update({
'manuallyApprovesFollowers': 'as:manuallyApprovesFollowers',
'discoverable': 'toot:discoverable',
'PropertyValue': 'schema:PropertyValue',
'value': 'schema:value',
'Emoji': 'toot:Emoji',
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"alsoKnownAs": {
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"movedTo": {
"@id": "as:movedTo",
"@type": "@id"
},
"claim": {
"@type": "@id",
"@id": "toot:claim"
},
"focalPoint": {
"@container": "@list",
"@id": "toot:focalPoint"
}
})
return data
# not complete
@classmethod
def new_actor_old(cls, actor, handle, pubkey, published=None, table={}, full=True, **kwargs):
actor_type = kwargs.get('type', 'Person').title()
assert actor_type in actor_types
actor = Url(actor)
data = cls({
'@context': [
'https://www.w3.org/ns/activitystreams',
'https://w3id.org/security/v1',
{
'schema': 'http://schema.org',
'toot': 'https://joinmastodon.org/ns#',
'manuallyApprovesFollowers': 'as:manuallyApprovesFollowers',
'PropertyValue': 'schema:PropertyValue',
'value': 'schema:value',
'IdentityProof': 'toot:IdentityProof',
'discoverable': 'toot:discoverable',
'Device': 'toot:Device',
'Ed25519Signature': 'toot:Ed25519Signature',
'Ed25519Key': 'toot:Ed25519Key',
'Curve25519Key': 'toot:Curve25519Key',
'EncryptedMessage': 'toot:EncryptedMessage',
'publicKeyBase64': 'toot:publicKeyBase64',
'deviceId': 'toot:deviceId',
'messageFranking': 'toot:messageFranking',
'messageType': 'toot:messageType',
'cipherText': 'toot:cipherText',
'suspended': 'toot:suspended',
'Emoji': 'toot:Emoji',
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"alsoKnownAs": {
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"movedTo": {
"@id": "as:movedTo",
"@type": "@id"
},
"claim": {
"@type": "@id",
"@id": "toot:claim"
},
"fingerprintKey": {
"@type": "@id",
"@id": "toot:fingerprintKey"
},
"identityKey": {
"@type": "@id",
"@id": "toot:identityKey"
},
"devices": {
"@type": "@id",
"@id": "toot:devices"
},
"focalPoint": {
"@container": "@list",
"@id": "toot:focalPoint"
}
}
],
'id': actor,
'type': actor_type,
'following': f'{actor}/following',
'followers': f'{actor}/followers',
'inbox': kwargs.get('inbox', f'{actor}'),
'outbox': f'{actor}/outbox',
'featured': f'{actor}/collections/featured',
'featuredTags': f'{actor}/collections/tags',
'preferredUsername': handle,
'name': kwargs.get('display_name', handle),
'summary': kwargs.get('bio'),
'url': kwargs.get('url', actor),
'manuallyApprovesFollowers': kwargs.get('locked', False),
'discoverable': kwargs.get('discoverable', False),
'published': published or DateString.now('activitypub'),
'devices': f'{actor}/collections/devices',
'publicKey': {
'id': f'{actor}#main-key',
'owner': actor,
'publicKeyPem': pubkey
},
'tag': [],
'attachment': [],
'endpoints': {
'sharedInbox': kwargs.get('shared_inbox', f'https://{actor.host}/inbox')
}
})
for key, value in table.items():
data.attachment.append(PropertyValue(key, value))
if kwargs.get('avatar_url'):
data.icon = Object.new_image(kwargs.get('avatar_url'), kwargs.get('avatar_type'))
if kwargs.get('header_url'):
data.image = Object.new_image(kwargs.get('header_url'), kwargs.get('header_type'))
# need to add data when "full" is true
if not full:
del data.featured
del data.featuredTags
del data.devices
del data.following
del data.followers
del data.outbox
return data
@classmethod
def new_follow(cls, id, actor, target):
return cls({
'@context': 'https://www.w3.org/ns/activitystreams',
'id': id,
'type': 'Follow',
'actor': actor,
'object': target
})
@classmethod
def new_emoji(cls, id, name, url, image):
return cls({
'@context': 'https://www.w3.org/ns/activitystreams',
'id': id,
'type': 'Emoji',
'name': name,
'updated': updated or DateTime.now('activitypub'),
'icon': image
})
@property
def privacy_level(self):
return parse_privacy_level(
self.get('to', []),
self.get('cc', []),
self.get('attributedTo', '') + '/followers'
)
@property
def shared_inbox(self):
try: return self.endpoints.shared_inbox
except AttributeError: pass
@property
def pubkey(self):
try: return self.publicKey.publicKeyPem
except AttributeError: pass
@property
def handle(self):
return self['preferredUsername']
@property
def display_name(self):
return self.get('name')
@property
def type(self):
return self['type'].capitalize()
@property
def info_table(self):
return DotDict({p['name']: p['value'] for p in self.get('attachment', {})})
@property
def domain(self):
return self.id.host
@property
def bio(self):
return self.get('summary')
@property
def avatar(self):
return self.icon.url
@property
def header(self):
return self.image.url
class Collection(Object):
@classmethod
def new_replies(cls, statusid):
return cls({
'@context': 'https://www.w3.org/ns/activitystreams',
'id': f'{statusid}/replies',
'type': 'Collection',
'first': {
'type': 'CollectionPage',
'next': f'{statusid}/replies?only_other_accounts=true&page=true',
'partOf': f'{statusid}/replies',
'items': []
}
})
@classmethod
def new_collection(cls, outbox, min_id=0, total=0):
return cls({
'@context': 'https://www.w3.org/ns/activitystreams',
'id': outbox,
'type': 'OrderedCollection',
'totalItems': total,
'first': f'{outbox}?page=true',
'last': f'{outbox}?min_id=0&page=true'
})
@classmethod
def new_page(cls, outbox, min_id, max_id, *items):
return cls({
'@context': [
'https://www.w3.org/ns/activitystreams',
{
'sensitive': 'as:sensitive',
'toot': 'http://joinmastodon.org/ns#',
'votersCount': 'toot:votersCount',
'litepub': 'http://litepub.social/ns#',
'directMessage': 'litepub:directMessage',
}
],
'id': f'{outbox}?page=true',
'type': 'OrderedCollectionPage',
'next': f'{outbox}?max_id={max_id}&page=true',
'prev': f'{outbox}?min_id={min_id}&page=true',
'partOf': outbox,
'orderedItems': items
})
### sub-objects ###
class PropertyValue(DotDict):
def __init__(self, key, value):
super().__init__({
'type': 'PropertyValue',
'name': key,
'value': value
})
def __setitem__(self, key, value):
key = key.lower()
assert key in ['type', 'name', 'value']
assert type(value) == str
super().__setitem__(key, value)
def set_pair(self, key, value):
self.name = key
self.value = value
class Media(Object):
@classmethod
def new(cls, type, url, mime=None):
return cls(
type = 'Image',
mediaType = mime or mimetypes.guess_type(url)[0] or 'image/png',
url = url
)
@classmethod
def new_image(cls, url, mime=None):
return cls.new('Image', url, mime)
@classmethod
def new_video(cls, url, mime=None):
return cls.new('Video', url, mime)
@classmethod
def new_audio(cls, url, mime=None):
return cls.new('Audio', url, mime)
class Emoji(DotDict):
@classmethod
def new(cls, id, name, image):
return cls({
'id': id,
'type': Emoji,
'name': f':{name}:',
'icon': image
})
### Not activitypub objects, but related ###
class Nodeinfo(DotDict):
@property
def name(self):
return self.software.name
@property
def version(self):
return self.software.version
@property
def repo(self):
return self.software.repository
@property
def homepage(self):
return self.software.homepage
@property
def users(self):
return self.usage.users.total
@property
def posts(self):
return self.usage.localPosts
@classmethod
def new_20(cls, name, version, **metadata):
return cls.new(name, version, '2.0', **metadata)
@classmethod
def new_21(cls, name, version, **metadata):
return cls.new(name, version, '2.1', **metadata)
@classmethod
def new(cls, name, version, niversion='2.1', **kwargs):
assert niversion in ['2.0', '2.1']
open_regs = boolean(kwargs.pop('open_regs', True))
posts = int(kwargs.pop('posts', 0))
users = int(kwargs.pop('users', 0))
users_halfyear = int(kwargs.pop('halfyear', 0))
users_month = int(kwargs.pop('month', 0))
comments = int(kwargs.pop('comments', 0))
repository = kwargs.pop('repository', None)
homepage = kwargs.pop('homepage', None)
data = cls(
version = niversion,
openRegistrations = open_regs,
software = {
'name': name.lower().replace(' ', '-'),
'version': version
},
usage = {
'users': {
'total': users
}
},
protocols = [
'activitypub'
],
services = {
'inbound': kwargs.pop('inbound', []),
'outbound': kwargs.pop('outbound', [])
}
)
if niversion == '2.1':
if repository:
data.software.repository = repository
if homepage:
data.software.homepage = homepage
if users_halfyear:
data.users.activeHalfyear = halfyear
if users_month:
data.users.activeMonth = month
if posts:
data.usage.localPosts = posts
if comments:
data.usage.localComments = comments
if kwargs:
data.metadata = kwargs
return data
class WellknownNodeinfo(DotDict):
def url(self, version='2.1'):
assert version in ['2.0', '2.1']
for link in self.links:
if link['rel'].endswith(version):
return link['href']
@classmethod
def new(cls, path, version='2.1'):
data = cls(links=[])
data.append(path, version)
return data
def append(self, path, version='2.1'):
assert version in ['2.0', '2.1']
self.links.append({
'rel': f'http://nodeinfo.dispora.software/ns/schema/{version}',
'href': path
})
class Hostmeta(str):
def __new__(cls, text):
return str.__new__(cls, text)
@classmethod
def new(cls, domain):
return cls(f'<?xml version="1.0" encoding="UTF-8"?><XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0"><Link rel="lrdd" template="https://{domain}/.well-known/webfinger?resource={{uri}}"/></XRD>')
@property
def link(self):
return Url(fromstring(self)[0].attrib['template'])
class Webfinger(DotDict):
@property
def profile(self):
for link in self.links:
if link['rel'] == 'http://webfinger.net/rel/profile-page':
return link['href']
@property
def actor(self):
for link in self.links:
if link['rel'] == 'self':
return link['href']
@property
def fullname(self):
return self.subject[5:]
@property
def handle(self):
return self.fullname.split('@')[0]
@property
def domain(self):
return self.fullname.split('@')[1]
@classmethod
def new(cls, handle, domain, actor, profile=None):
data = cls(
subject = f'acct:{handle}@{domain}',
aliases = [actor],
links = [
{
'rel': 'self',
'type': 'application/activity+json',
'href': actor
}
]
)
if profile:
data.aliases.append(profile)
data.links.append({
'rel': 'http://webfinger.net/rel/profile-page',
'type': 'text/html',
'href': profile
})
return data

View file

@ -8,6 +8,13 @@ from hashlib import sha1
from .dotdict import DotDict
__all__ = [
'BaseCache',
'TtlCache',
'LruCache'
]
def parse_ttl(ttl):
if not ttl:
return 0
@ -50,10 +57,6 @@ class DefaultValue(object):
class BaseCache(OrderedDict):
_get = OrderedDict.get
_items = OrderedDict.items
def __init__(self, maxsize=1024, ttl=None):
self.ttl = parse_ttl(ttl)
self.maxsize = maxsize
@ -68,7 +71,7 @@ class BaseCache(OrderedDict):
def items(self):
return [[k, v.data] for k,v in self._items()]
return [[k, v.data] for k,v in super().items()]
def remove(self, key):
@ -90,7 +93,7 @@ class BaseCache(OrderedDict):
def get(self, key):
item = self._get(key)
item = super().get(key)
if not item:
return
@ -105,7 +108,7 @@ class BaseCache(OrderedDict):
self[key]['timestamp'] = timestamp + self.ttl
self.move_to_end(key)
return item
return item['data']
## This doesn't work for some reason

View file

@ -9,6 +9,13 @@ except ImportError:
yaml = None
__all__ = [
'BaseConfig',
'JsonConfig',
'YamlConfig'
]
class BaseConfig(DotDict):
def __init__(self, config_file=None, value_parser=None, **defaults):
self._defaults = defaults
@ -57,18 +64,11 @@ class BaseConfig(DotDict):
def load(self):
try:
self.set_data(self._load_config() or {})
return True
except FileNotFoundError:
izzylog.warning('Cannot find path to config file:', self._config_file)
return False
self.set_data(self._load_config() or {})
def save(self, indent='\t'):
self._config_file.parent().mkdir()
def save(self):
self._config_file.parent.mkdir()
self._save_config()

View file

@ -1,9 +1,42 @@
import enum
import socket
__all__ = [
'Connection',
'Protocol'
]
Protocol = enum.Enum('Protocol', [
'TCP',
'UDP',
'BROADCAST',
'MULTICAST'
]
)
class Connection(socket.socket):
def __init__(self, address='127.0.0.1', port=8080, tcp=True):
super().__init__(socket.AF_INET, socket.SOCK_STREAM if tcp else socket.SOCK_DGRAM)
def __init__(self, address='127.0.0.1', port=8080, proto=Protocol.TCP, timeout=60, ttl=1):
if type(proto) == str:
proto = Protocol[proto.upper()]
elif type(proto) != Protocol:
raise TypeError(f'proto must be a Protocol enum or a string')
if proto == Protocol.TCP:
super().__init__(socket.AF_INET, socket.SOCK_STREAM)
else:
super().__init__(socket.AF_INET, socket.SOCK_DGRAM)
if proto == protocol.BROADCAST:
self.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, ttl)
elif proto == protocol.MULTICAST:
self.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl)
self.address = address
self.port = port
@ -17,12 +50,24 @@ class Connection(socket.socket):
self.close()
def send(self, msg):
def serve(self):
self.bind([self.address, self.port])
while True:
data, address = self.read()
def read(self, size=8192):
return self.recv(size)
def write(self, msg):
if isinstance(msg, str):
msg = msg.encode('utf-8')
self.sendall(msg)
def recieve(self, size=8192):
return self.recv(size)
if __name__ == '__main__':
conn = Connection('192.168.2.11', '80', proto=Protocol.TCP, timeout=5)
conn.write('')

View file

@ -5,6 +5,15 @@ from urllib.parse import unquote_plus, quote
from .path import Path
__all__ = [
'DotDict',
'DefaultDotDict',
'LowerDotDict',
'MultiDotDict',
'JsonEncoder'
]
class DotDict(dict):
non_dict_vars = []
@ -75,6 +84,13 @@ class DotDict(dict):
return data
@classmethod
def new_from_fd(cls, fd):
data = cls(fd.read())
fd.close()
return data
def copy(self):
return DotDict(self)

View file

@ -1,3 +1,8 @@
__all__ = [
'NoConnectionError'
]
class DBusClientError(Exception):
pass
@ -44,3 +49,20 @@ class NoTableLayoutError(Exception):
class UpdateAllRowsError(Exception):
'Raise when an UPDATE tries to modify all rows in a table'
class HttpError(Exception):
def __init__(self, response):
self.response = response
super().__init__(f'HTTP ERROR {self.status} for "{response.url}": {self.message[:100]}')
@property
def status(self):
return self.response.status
@property
def message(self):
return self.response.text

View file

@ -1,78 +0,0 @@
import argon2, os
from .misc import time_function_pprint
class PasswordHasher:
'''
Argon2 password hasher and validator
Attributes:
config (dict): The settings used for the hasher
Methods:
get_config(key): Get the value of a config options
set_config(key, value): Set a config option
hash(password): hash a password and return the digest as a hex string
verify(hash, password): verify a password and the password hash match
iteration_test(string, passes, iterations): Time the hashing functionality
'''
aliases = {
'iterations': 'time_cost',
'memory': 'memory_cost',
'threads': 'parallelism'
}
def __init__(self, iterations=16, memory=100, threads=os.cpu_count(), type=argon2.Type.ID):
if not argon2:
raise ValueError('password hashing disabled')
self.config = {
'time_cost': iterations,
'memory_cost': memory * 1024,
'parallelism': threads,
'encoding': 'utf-8',
'type': type,
}
self.hasher = argon2.PasswordHasher(**self.config)
def get_config(self, key):
key = self.aliases.get(key, key)
value = self.config[key]
return value / 1024 if key == 'memory_cost' else value
def set_config(self, key, value):
key = self.aliases.get(key, key)
self.config[key] = value * 1024 if key == 'memory_cost' else value
self.hasher = argon2.PasswordHasher(**self.config)
def hash(self, password: str):
return self.hasher.hash(password)
def verify(self, passhash: str, password: str):
try:
return self.hasher.verify(passhash, password)
except argon2.exceptions.VerifyMismatchError:
return False
def iteration_test(self, string='hecking heck', passes=3, iterations=[8,16,24,32,40,48,56,64]):
original_iter = self.get_config('iterations')
for iteration in iterations:
self.set_config('iterations', iteration)
print('\nTesting hash iterations:', iteration)
time_function_pprint(self.verify, self.hash(string), string, passes=passes)
self.set_config('iterations', original_iter)

View file

@ -1,166 +0,0 @@
import functools, json, sys
from base64 import b64decode, b64encode
from datetime import datetime
from functools import cached_property
from io import BytesIO
from ssl import SSLCertVerificationError
from urllib.error import HTTPError
from urllib.request import Request, urlopen
from . import izzylog, __version__
from .dotdict import DefaultDotDict, DotDict
from .exceptions import HttpFileDownloadedError
from .misc import Url
from .path import Path
try:
from PIL import Image
except ImportError:
izzylog.verbose('Pillow module not found. Image downloading is disabled')
Image = False
methods = ['connect', 'delete', 'get', 'head', 'options', 'patch', 'post', 'put', 'trace']
class HttpClient:
def __init__(self, headers={}, useragent=f'IzzyLib/{__version__}', appagent=None, proxy_type='https', proxy_host=None, proxy_port=None):
proxy_ports = {
'http': 80,
'https': 443
}
if proxy_type not in ['http', 'https']:
raise ValueError(f'Not a valid proxy type: {proxy_type}')
self.headers=headers
self.agent = f'{useragent} ({appagent})' if appagent else useragent
self.proxy = DotDict({
'enabled': True if proxy_host else False,
'ptype': proxy_type,
'host': proxy_host,
'port': proxy_ports[proxy_type] if not proxy_port else proxy_port
})
def __build_request(self, url, data=None, headers={}, method='GET'):
new_headers = self.headers.copy()
new_headers.update(headers)
parsed_headers = {k.lower(): v for k,v in new_headers.items()}
if not parsed_headers.get('user-agent'):
parsed_headers['user-agent'] = self.agent
if isinstance(data, dict):
data = json.dumps(data)
if isinstance(data, str):
data = data.encode('UTF-8')
request = Request(url, data=data, headers=parsed_headers, method=method)
if self.proxy.enabled:
request.set_proxy(f'{self.proxy.host}:{self.proxy.port}', self.proxy.ptype)
return request
def request(self, *args, **kwargs):
request = self.__build_request(*args, **kwargs)
try:
response = urlopen(request)
except HTTPError as e:
response = e.fp
return HttpResponse(response)
def file(self, url, filepath, *args, filename=None, size=2048, create_dirs=True, **kwargs):
filepath = Path(filepath)
path = filepath.parent
if not path.exists and not create_dirs:
raise FileNotFoundError(f'Path does not exist: {path}')
path.mkdir()
if filepath.exists:
kwargs['headers']['range'] = f'bytes={filepath.size}'
resp = self.request(url, *args, stream=True, **kwargs)
if not resp.headers.get('content-length'):
raise HttpFileDownloadedError('File already downloaded fully')
if resp.status != 200:
raise HttpFileDownloadedError(f'Failed to download {url}: {resp.status}, body: {resp.body}')
with filepath.open('ab') as fd:
for chunk in resp.chunks(size):
fd.write(chunk)
return True
def image(self, url, filepath, *args, filename=None, ext='png', dimensions=(50, 50), create_dirs=True, **kwargs):
if not Image:
raise ValueError('Pillow module is not installed')
filepath = Path(filepath)
path = filepath.parent
if not path.exists and not create_dirs:
raise FileNotFoundError(f'Path does not exist: {path}')
path.mkdir()
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
raise HttpFileDownloadedError(f'Failed to download {url}: {resp.status}, body: {resp.body}')
if not filename:
filename = Path(url).stem
byte = BytesIO()
image = Image.open(BytesIO(resp.body))
image.thumbnail(dimensions)
image.save(byte, format=ext.upper())
with path.join(filename).open('wb') as fd:
fd.write(byte.getvalue())
def json(self, *args, headers={}, activity=True, **kwargs):
json_type = 'activity+json' if activity else 'json'
headers.update({
'accept': f'application/{json_type}'
})
return self.request(*args, headers=headers, **kwargs)
class HttpResponse(object):
def __init__(self, response):
self.body = response.read()
self.headers = DefaultDotDict({k.lower(): v.lower() for k,v in response.headers.items()})
self.status = response.status
self.url = Url(response.url)
@cached_property
def text(self):
return self.body.decode('UTF-8')
@cached_property
def json(self):
return DotDict(self.text)
def json_pretty(self, indent=4):
return json.dumps(self.json, indent=indent)

View file

@ -0,0 +1,28 @@
content_types = {
'json': 'application/json',
'activity': 'application/activity+json',
'css': 'text/css',
'html': 'text/html',
'js': 'application/javascript',
'png': 'image/png',
'jpeg': 'image/jpeg',
'gif': 'image/gif'
}
http_methods = {
'CONNECT',
'DELETE',
'GET',
'HEAD',
'OPTIONS',
'PATCH',
'POST',
'PUT',
'TRACE'
}
from .client import Client
from .request import Request
from .response import Response

View file

@ -0,0 +1,163 @@
import functools, json, sys
from base64 import b64decode, b64encode
from datetime import datetime
from functools import cached_property, partial
from io import BytesIO
from ssl import SSLCertVerificationError
from urllib.error import HTTPError
from urllib.request import urlopen
from . import http_methods
from .config import Config
from .request import Request
from .. import izzylog, __version__
from ..dotdict import DefaultDotDict, DotDict
from ..exceptions import HttpFileDownloadedError
from ..misc import Url
from ..path import Path
try:
from PIL import Image
except ImportError:
izzylog.verbose('Pillow module not found. Image downloading is disabled')
Image = False
class Client:
def __init__(self, **kwargs):
self.cfg = Config(**kwargs)
for method in http_methods:
self.__set_method(method)
def __set_method(self, method):
setattr(self, method.lower(), partial(self.request, method=method.upper()))
def build_request(self, *args, **kwargs):
request = self.cfg.request_class(*args, **kwargs)
request._set_params(self.cfg)
return request
def send_request(self, request):
if not isinstance(request, Request):
raise TypeError(f'Must be a barkshark_http_async.request.Request object (or subclassed), not {type(request).__name__}')
if not request._params_set:
request._set_params(self.cfg)
try:
response = urlopen(request)
except HTTPError as e:
response = e.fp
return self.cfg.response_class(response)
def request(self, *args, **kwargs):
request = self.build_request(*args, **kwargs)
return self.send_request(request)
def json(self, *args, headers={}, activity=True, **kwargs):
for key in list(headers.keys()):
if key.lower() == 'accept':
del headers[key]
json_type = 'activity+json' if activity else 'json'
headers['Accept'] = f'application/{json_type}'
return self.request(*args, headers=headers, **kwargs)
def file(self, url, filepath, *args, filename=None, overwrite=False, create_dirs=True, chunk_size=2048, **kwargs):
filepath = Path(filepath)
tmppath = filepath.parent.joinpath(filepath.name + '.dltemp')
if not overwrite and filepath.exists:
raise FileExistsError(f'File already exists: {filepath}')
if not filepath.parent.exists:
if not create_dirs:
raise FileNotFoundError(f'Path does not exist: {filepath.parent}')
filepath.parent.mkdir()
if tmpath.exists:
kwargs['headers']['range'] = f'bytes={tmppath.size}'
resp = self.request(url, *args, stream=True, **kwargs)
if not resp.headers.get('content-length'):
try: tmppath.delete()
except: pass
raise FileExistsError('File already downloaded fully')
if resp.status != 200:
raise HttpError(resp)
with tmppath.open('ab') as fd:
for chunk in resp.chunks(chunk_size):
fd.write(chunk)
shutil.move(tmppath, filepath)
return filepath
def image(self, url, filepath, *args, filename=None, ext='png', dimensions=(50, 50), create_dirs=True, **kwargs):
if not Image:
raise ValueError('Pillow module is not installed')
filepath = Path(filepath)
if not filepath.parent.exists and not create_dirs:
raise FileNotFoundError(f'Path does not exist: {filepath.parent}')
filepath.parent.mkdir()
resp = self.request(url, *args, **kwargs)
if resp.status != 200:
raise HttpError(resp)
if not filename:
filename = Path(url).stem
byte = BytesIO()
image = Image.open(BytesIO(resp.body))
image.thumbnail(dimensions)
image.save(byte, format=ext.upper())
with filepath.join(filename).open('wb') as fd:
fd.write(byte.getvalue())
class HttpResponse(object):
def __init__(self, response):
self.body = response.read()
self.headers = DefaultDotDict({k.lower(): v.lower() for k,v in response.headers.items()})
self.status = response.status
self.url = Url(response.url)
@cached_property
def text(self):
return self.body.decode('UTF-8')
@cached_property
def json(self):
return DotDict(self.text)
def json_pretty(self, indent=4):
return json.dumps(self.json, indent=indent)

View file

@ -0,0 +1,56 @@
from .request import Request
from .response import Response
from .. import __version__
from ..config import BaseConfig
from ..dotdict import DotDict
proxy_ports = {
'http': 80,
'https': 443
}
class Config(BaseConfig):
def __init__(self, **kwargs):
super().__init__(
appagent = None,
timeout = 60,
request_class = Request,
response_class = Response,
headers = DotDict({'User-Agent': f'IzzyLib/{__version__}'}),
proxy_type = 'https',
proxy_host = None,
proxy_port = None
)
if kwargs.get('proxy_type') not in [None, *proxy_ports.keys()]:
raise ValueError(f'Not a valid proxy type: {proxy_type}')
appagent = kwargs.pop('appagent', None)
useragent = kwargs.pop('useragent', None)
if appagent:
self.set_appagent(appagent)
elif useragent:
self.set_useragent(useragent)
self.update(kwargs)
if self.proxy_host and not self.proxy_port:
self.proxy_port = proxy_ports[self.proxy_type]
@property
def agent(self):
return self.headers['User-Agent']
def set_appagent(self, agent):
self.set_useragent(f'IzzyLib/{__version__} ({agent})')
def set_useragent(self, user_agent):
self.headers['User-Agent'] = user_agent

View file

@ -0,0 +1,137 @@
import json
from datetime import datetime
from urllib.parse import urlencode
from urllib.request import Request as PyRequest
from . import http_methods, content_types
from ..misc import Url, boolean
try: import magic
except ImportError: magic = None
class Request(PyRequest):
def __init__(self, url, body=None, headers={}, cookies={}, method='GET'):
self.url = Url(url)
if method.upper() not in http_methods:
raise ValueError(f'Invalid HTTP method: {method}')
if self.url.proto not in ['http', 'https', 'ws', 'wss']:
raise ValueError(f'Invalid protocol in url: {self.url.proto}')
super().__init__(url, headers=headers, method=method.upper())
self.body = body
self._params_set = False
def __getitem__(self, key):
return self.get_header(key.capitalize())
def __setitem__(self, key, value):
self.set_header(key.capitalize(), value)
def __delitem__(self, key):
self.unset_header(key.capitalize())
def _set_params(self, config):
self.headers.update(config.headers)
if config.proxy_host:
self.set_proxy(f'{config.proxy_host}:{config.proxy_port}', config.proxy_type)
self._params_set = True
@property
def body(self):
return self.data
@body.setter
def body(self, data):
if isinstance(data, str):
data = data.encode('utf-8')
elif isinstance(data, bytearray):
data = bytes(data)
elif any(map(isinstance, [data], [dict, list, tuple, set])):
data = json.dumps(data).encode('utf-8')
elif not isinstance(data, bytes):
data = str(data).encode('utf-8')
self.data = data
self.set_header('Content-Length', str(len(data)))
@property
def host(self):
return self.url.host
@host.setter
def host(self, value):
pass
@property
def port(self):
return self.url.port or http_ports[self.url.proto]
@property
def secure(self):
return self.url.proto in ['https', 'wss']
def set_chunked(self, value):
if boolean(value):
self.set_header('Encoding', 'chunked')
else:
self.unset_header('Encoding')
def set_header(self, key, value):
self.add_header(key, value)
def unset_header(self, key):
try:
self.remove_header(key)
except:
pass
def update_headers(self, data={}, **kwargs):
kwargs.update(data)
for k, v in kwargs.items():
self.set_header(k, v)
def set_type(self, content_type):
self.set_header('Content-Type', content_types.get(content_type, content_type))
def set_type_from_body(self):
if not self.body:
return
self.set_type(magic.from_buffer(self.body, mime=True))
def sign_headers(self, privkey, keyid):
raise ImportError(f'Not implemented yet')
if not sign_request:
raise ImportError(f'Could not import HTTP signatures. Header signing disabled')
return sign_request(self, privkey, keyid)

View file

@ -0,0 +1,96 @@
from io import BytesIO
from ..dotdict import DotDict
from ..http_utils import Cookies, Headers
from ..misc import Url
class Response:
def __init__(self, response):
self.__response = response
self.__body = b''
self.__url = Url(response.url)
headers = []
cookies = []
for key, value in response.getheaders():
if key.lower in ['set-cookie']:
cookies.append(value)
else:
headers.append((key, value))
self.headers = Headers(headers, readonly=True)
self.cookies = Cookies(cookies, readonly=True)
def __getitem__(self, key):
return self.get_header(key.capitalize())
@property
def body(self):
if not self.__body:
self.read()
return self.__body
# todo: fetch encoding from headers if possible
@property
def encoding(self):
return 'utf-8'
@property
def status(self):
return self.__response.status
@property
def url(self):
return self.__url
@property
def version(self):
vers = self.__response.version
if vers == 10:
return 1.0
elif vers == 11:
return 1.1
elif vers == 20:
return 2.0
print('Warning! Invalid HTTP version:', type(vers).__name__, vers)
return vers
@property
def bytes(self):
return self.__body
@property
def text(self):
return self.body.decode(self.encoding)
@property
def json(self):
return DotDict(self.body)
def get_header(self, name):
return self.headers.get(name.lower())
def read(self, amount=None):
data = self.__response.read(amount)
self.__body += data
return data

View file

@ -1,461 +0,0 @@
:root {
--text: #eee;
--hover: {{primary.desaturate(50).lighten(50)}};
--primary: {{primary}};
--background: {{background}};
--ui: {{primary.desaturate(25).lighten(5)}};
--ui-background: {{background.lighten(7.5)}};
--shadow-color: {{black.rgba(25)}};
--shadow: 0 4px 4px 0 var(--shadow-color), 3px 0 4px 0 var(--shadow-color);
--negative: {{negative}};
--negative-dark: {{negative.darken(85)}};
--positive: {{positive}};
--positive-dark: {{positive.darken(85)}};
--message: var(--positive);
--error: var(--negative);
--gap: 15px;
--easing: cubic-bezier(.6, .05, .28, .91);
--trans-speed: {{speed}}ms;
}
body {
color: var(--text);
background-color: var(--background);
font-family: sans undertale;
font-size: 16px;
margin: 15px 0;
}
a, a:visited {
color: var(--primary);
text-decoration: none;
}
a:hover {
color: var(--hover);
text-decoration: underline;
}
input:not([type='checkbox']), select, textarea {
color: var(--text);
background-color: var(--background);
border: 1px solid var(--background);
box-shadow: 0 2px 2px 0 var(--shadow-color);
padding: 5px;
}
input:hover, select:hover, textarea:hover {
border-color: var(--hover);
}
input:focus, select:focus, textarea:focus {
outline: 0;
border-color: var(--primary);
}
details:focus, summary:focus {
outline: 0;
}
/* Classes */
.button {
display: inline-block;
padding: 5px;
background-color: {{primary.darken(85)}};
text-align: center;
box-shadow: var(--shadow);
}
.button:hover {
background-color: {{primary.darken(65)}};
text-decoration: none;
}
.grid-container {
display: grid;
grid-template-columns: auto;
grid-gap: var(--gap);
}
.grid-item {
display: inline-grid;
}
.flex-container {
display: flex;
flex-wrap; wrap;
}
.menu {
list-style-type: none;
padding: 0;
margin: 0;
}
.menu li {
display: inline-block;
text-align: center;
min-width: 60px;
background-color: {{background.lighten(20)}};
}
.menu li a {
display: block;
padding-left: 5px;
padding-right: 5px;
}
.menu li:hover {
background-color: {{primary.lighten(25).desaturate(25)}};
}
.menu li a:hover {
text-decoration: none;
color: {{primary.darken(90).desaturate(50)}};
}
.section {
padding: 8px;
background-color: var(--ui-background);
box-shadow: var(--shadow);
}
.shadow {
box-shadow: 0 4px 4px 0 var(--shadow-color), 3px 0 4px 0 var(--shadow-color);
}
.message {
line-height: 2em;
display: block;
}
/* # this is kinda hacky and needs to be replaced */
.tooltip:hover::after {
position: relative;
padding: 8px;
bottom: 35px;
border-radius: 5px;
white-space: nowrap;
border: 1px solid var(--text);
color: var(--text);
background-color: {{primary.desaturate(50).darken(75)}};
box-shadow: var(--shadow);
/*z-index: -1;*/
}
/* ids */
#title {
font-size: 36px;
font-weight: bold;
text-align: center;
}
#message, #error {
padding: 10px;
color: var(--background);
margin-bottom: var(--gap);
text-align: center;
}
#message {
background-color: var(--message);
}
#error {
background-color: var(--error);
}
#body {
width: 790px;
margin: 0 auto;
}
#header {
display: flex;
margin-bottom: var(--gap);
text-align: center;
font-size: 2em;
line-height: 40px;
font-weight: bold;
}
#header > div {
/*display: inline-block;*/
height: 40px;
}
#header .page-title {
text-align: {% if menu_left %}right{% else %}left{% endif %};
white-space: nowrap;
overflow: hidden;
width: 100%;
}
#content-body .title {
text-align: center;
font-size: 2em;
font-weight: bold;
color: var(--primary);
}
#footer {
margin-top: var(--gap);
display: flex;
grid-gap: 5px;
font-size: 0.80em;
line-height: 20px;
}
#footer > div {
height: 20px;
}
#footer .avatar img {
margin: 0 auto;
}
#footer .user {
white-space: nowrap;
overflow: hidden;
width: 100%;
}
#footer .source {
white-space: nowrap;
}
#logreg input, textarea {
display: block;
margin: 8px auto;
}
#logreg textarea, input:not([type='submit']) {
width: 50%;
}
/* Main menu */
#btn {
cursor: pointer;
transition: left 500ms var(--easing);
}
#btn {
transition: background-color var(--trans-speed);
width: 55px;
margin-left: var(--gap);
background-image: url('/framework/static/menu.svg');
background-size: 50px;
background-position: center center;
background-repeat: no-repeat;
}
#btn div {
transition: transform var(--trans-speed) ease, opacity var(--trans-speed), background-color var(--trans-speed);
}
#btn.active {
margin-left: 0;
position: fixed;
z-index: 5;
top: 12px;
{% if menu_left %}right: calc(100% - 250px + 12px){% else %}right: 12px{% endif %};
background-color: {{primary.darken(75)}};
color: {{background}};
}
/*#btn.active div {
width: 35px;
height: 2px;
margin-bottom: 8px;
}*/
#btn.active:parent {
grid-template-columns: auto;
}
#menu {
position: fixed;
z-index: 4;
overflow: auto;
top: 0px;
opacity: 0;
padding: 20px 0px;
width: 250px;
height: 100%;
transition: all var(--trans-speed) ease;
{% if menu_left %}left{% else %}right{% endif %}: -250px;
}
#menu.active {
{% if menu_left %}left{% else %}right{% endif %}: 0;
opacity: 1;
}
#menu #items {
/*margin-top: 50px;*/
margin-bottom: 30px;
}
#menu a:hover {
text-decoration: none;
}
#menu {
font-weight: bold;
}
#menu .item {
display: block;
position: relative;
font-size: 2em;
transition: all var(--trans-speed);
padding-left: 20px;
}
#menu .title-item {
color: var(--primary);
}
#items .sub-item {
padding-left: 40px;
}
#items .item:not(.title-item):hover {
padding-left: 40px;
}
#items .sub-item:hover {
padding-left: 60px !important;
}
/*#menu details .item:hover {
padding-left: 60px;
}*/
#items summary {
cursor: pointer;
color: var(--primary);
}
#items details[open]>.item:not(details) {
animation-name: fadeInDown;
animation-duration: var(--trans-speed);
}
#items summary::-webkit-details-marker {
display: none;
}
#items details summary:after {
content: " +";
}
#items details[open] summary:after {
content: " -";
}
#btn, #btn * {
will-change: transform;
}
#menu {
will-change: transform, opacity;
}
@keyframes fadeInDown {
0% {
opacity: 0;
transform: translateY(-1.25em);
}
100% {
opacity: 1;
transform: translateY(0);
}
}
{% for name in cssfiles %}
{% include 'style/' + name + '.css' %}
{% endfor %}
/* responsive design */
@media (max-width: 810px) {
body {
margin: 0;
}
#body {
width: auto;
}
}
@media (max-width: 610px) {
.settings .grid-container {
grid-template-columns: auto;
}
.settings .label {
text-align: center;
}
#logreg textarea, input:not([type='submit']) {
width: calc(100% - 16px);
}
}
/* scrollbar */
body {scrollbar-width: 15px; scrollbar-color: var(--primary) {{background.darken(10)}};}
::-webkit-scrollbar {width: 15px;}
::-webkit-scrollbar-track {background: {{background.darken(10)}};}
/*::-webkit-scrollbar-button {background: var(--primary);}
::-webkit-scrollbar-button:hover {background: var(--text);}*/
::-webkit-scrollbar-thumb {background: var(--primary);}
::-webkit-scrollbar-thumb:hover {background: {{primary.lighten(25)}};}
/* page font */
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Bold'),
url('/framework/static/nunito/NunitoSans-SemiBold.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-SemiBold.ttf') format('ttf');
font-weight: bold;
font-style: normal;
}
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Light Italic'),
url('/framework/static/nunito/NunitoSans-ExtraLightItalic.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-ExtraLightItalic.ttf') format('ttf');
font-weight: normal;
font-style: italic;
}
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Bold Italic'),
url('/framework/static/nunito/NunitoSans-Italic.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-Italic.ttf') format('ttf');
font-weight: bold;
font-style: italic;
}
@font-face {
font-family: 'sans undertale';
src: local('Nunito Sans Light'),
url('/framework/static/nunito/NunitoSans-Light.woff2') format('woff2'),
url('/framework/static/nunito/NunitoSans-Light.ttf') format('ttf');
font-weight: normal;
font-style: normal;
}

View file

@ -1,51 +0,0 @@
<!DOCTYPE html>
%html
%head
%title << {{cfg.title}}: {{page}}
%link(rel='shortcut icon', type='image/png', href='{{cfg.tpl_favicon_path}}')
%link(rel='stylesheet' type='text/css' href='/framework/style.css')
%link(rel='manifest' href='/framework/manifest.json')
%meta(charset='UTF-8')
%meta(name='viewport' content='width=device-width, initial-scale=1')
-block head
%body
#body
#header.flex-container
-if menu_left
#btn.section
.page-title.section -> %a.title(href='/') << {{cfg.title}}
-else
.page-title.section -> %a.title(href='/') << {{cfg.title}}
#btn.section
-if message
#message.section << {{message}}
-if error
#error.secion << {{error}}
#menu.section
.title-item.item << Menu
#items
-if not len(cfg.menu):
-include 'menu.haml'
-else:
-for label, path_data in cfg.menu.items()
-if path_data[1] == 1000 and request.user_level == 0:
.item -> %a(href='{{path_data[0]}}') << {{label}}
-elif request.user_level >= path_data[1]
.item -> %a(href='{{path_data[0]}}') << {{label}}
#content-body.section
-block content
#footer.grid-container.section
.avatar
.user
.source
%a(href='{{cfg.git_repo}}' target='_new') << {{cfg.name}}/{{cfg.version}}
%script(type='application/javascript' src='/framework/static/menu.js')

View file

@ -1,8 +0,0 @@
-extends 'base.haml'
-set page = 'Error'
-block content
%center
%font size='8'
HTTP {{response.status}}
%br
=error_message

View file

@ -1 +0,0 @@
.item -> %a(href='/') << Home

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

View file

@ -1,29 +0,0 @@
const sidebarBox = document.querySelector('#menu'),
sidebarBtn = document.querySelector('#btn'),
pageWrapper = document.querySelector('html');
header = document.querySelector('#header')
sidebarBtn.addEventListener('click', event => {
sidebarBtn.classList.toggle('active');
sidebarBox.classList.toggle('active');
});
pageWrapper.addEventListener('click', event => {
itemId = event.srcElement.id
itemClass = event.srcElement.className
indexId = ['menu', 'btn', 'items'].indexOf(itemId)
indexClass = ['item', 'item name', 'items'].indexOf(itemClass)
if (sidebarBox.classList.contains('active') && (indexId == -1 && indexClass == -1)) {
sidebarBtn.classList.remove('active');
sidebarBox.classList.remove('active');
}
});
window.addEventListener('keydown', event => {
if (sidebarBox.classList.contains('active') && event.keyCode === 27) {
sidebarBtn.classList.remove('active');
sidebarBox.classList.remove('active');
}
});

View file

@ -1,84 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
sodipodi:docname="menu.svg"
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
id="svg8"
version="1.1"
viewBox="0 0 132.29167 79.375002"
height="300"
width="500">
<defs
id="defs2" />
<sodipodi:namedview
inkscape:window-maximized="1"
inkscape:window-y="36"
inkscape:window-x="36"
inkscape:window-height="990"
inkscape:window-width="1644"
units="px"
showgrid="true"
inkscape:document-rotation="0"
inkscape:current-layer="layer2"
inkscape:document-units="mm"
inkscape:cy="151.34478"
inkscape:cx="232.18877"
inkscape:zoom="1.4"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
borderopacity="1.0"
bordercolor="#666666"
pagecolor="#ffffff"
id="base"
inkscape:snap-text-baseline="true"
inkscape:snap-intersection-paths="true"
inkscape:snap-bbox="true"
inkscape:bbox-nodes="true"
fit-margin-top="0"
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0">
<inkscape:grid
dotted="true"
id="grid1402"
type="xygrid"
originx="-7.9375001"
originy="-27.781234" />
</sodipodi:namedview>
<metadata
id="metadata5">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
id="layer2"
inkscape:groupmode="layer"
transform="translate(-7.9374999,-27.781233)">
<path
style="fill:none;fill-opacity:1;stroke:#cfcfcf;stroke-width:13.2292;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 15.875,67.468765 c 116.41667,0 116.41667,0 116.41667,0 z"
id="path1590" />
<path
style="fill:none;fill-opacity:1;stroke:#cfcfcf;stroke-width:13.2292;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 15.875,35.718766 c 116.41667,0 116.41667,0 116.41667,0 z"
id="path1590-7" />
<path
style="fill:none;fill-opacity:1;stroke:#cfcfcf;stroke-width:13.2292;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 15.875,99.218766 c 116.41667,0 116.41667,0 116.41667,0 z"
id="path1590-7-8" />
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.8 KiB

View file

@ -1,44 +0,0 @@
Copyright 2016 The Nunito Project Authors (contact@sansoxygen.com),
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide development of collaborative font projects, to support the font creation efforts of academic and linguistic communities, and to provide a free and open framework in which fonts may be shared and improved in partnership with others.
The OFL allows the licensed fonts to be used, studied, modified and redistributed freely as long as they are not sold by themselves. The fonts, including any derivative works, can be bundled, embedded, redistributed and/or sold with any software provided that any reserved names are not used by derivative works. The fonts and derivatives, however, cannot be released under any other type of license. The requirement for fonts to remain under this license does not apply to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright Holder(s) under this license and clearly marked as such. This may include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the copyright statement(s).
"Original Version" refers to the collection of Font Software components as distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting, or substituting -- in part or in whole -- any of the components of the Original Version, by changing formats or by porting the Font Software to a new environment.
"Author" refers to any designer, engineer, programmer, technical writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to use, study, copy, merge, embed, modify, redistribute, and sell modified and unmodified copies of the Font Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components, in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled, redistributed and/or sold with any software, provided that each copy contains the above copyright notice and this license. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine-readable metadata fields within text or binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font Name(s) unless explicit written permission is granted by the corresponding Copyright Holder. This restriction only applies to the primary font name as presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font Software shall not be used to promote, endorse or advertise any Modified Version, except to acknowledge the contribution(s) of the Copyright Holder(s) and the Author(s) or with their explicit written permission.
5) The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this license, and must not be distributed under any other license. The requirement for fonts to remain under this license does not apply to any document created using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.

View file

@ -1,22 +0,0 @@
http_methods = ['CONNECT', 'DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT', 'TRACE']
applications = {}
def get_app(name='default'):
return applications[name]
def set_app(app):
applications[app.name] = app
return app
def create_app(appname, **kwargs):
return set_app(Application(appname=appname, **kwargs))
from .application import Application, Blueprint
from .middleware import MediaCacheControl
from .request import Request
from .response import Response
from .view import View, Static

View file

@ -1,399 +0,0 @@
import asyncio, signal, socket, sys, time, traceback
from functools import partial
from http_router import Router, MethodNotAllowed, NotFound
from . import http_methods, error
from .config import Config
from .response import Response
#from .router import Router
from .view import Static, Manifest, Robots, Style
from .transport import Transport
from .. import logging
from ..dotdict import DotDict
from ..exceptions import MethodNotHandledException, NoBlueprintForPath
from ..misc import signal_handler
from ..path import Path
try:
from ..sql2 import Database
except ImportError:
Database = NotImplementedError('Failed to import SQL database class')
try:
from jinja2.exceptions import TemplateNotFound
from ..template import Template
except ImportError:
TemplateNotFound = NotImplementedError('heck')
Template = NotImplementedError('Failed to import HTML Template class')
frontend = Path(__file__).resolve().parent.parent.join('http_frontend')
class ApplicationBase:
ctx = DotDict()
def __init__(self, appname='default', views=[], middleware=[], dbtype=None, dbargs={}, dbclass=Database, **kwargs):
self.name = appname
self.cfg = Config(**kwargs)
self.db = None
self.router = Router(trim_last_slash=True)
self.middleware = DotDict({'request': [], 'response': []})
self.routes = {}
for view in views:
self.add_view(view)
for mw in middleware:
self.add_middleware(mw)
if dbtype or dbargs:
if isinstance(Database, Exception):
raise Database from None
self.db = dbclass(dbtype, **dbargs, app=self)
def __getitem__(self, key):
return self.ctx[key]
def __setitem__(self, key, value):
self.ctx[key] = value
def get_route(self, path, method='GET'):
return self.router(path, method.upper())
def add_route(self, handler, path, method='GET'):
self.router.bind(handler, path, methods=[method.upper()])
self.routes[f'{method.upper()}:{path}'] = handler
def compare_routes(self, route, path, method='GET'):
try:
return self.get_route(path, method) == self.routes[f'{method.upper()}:{route}']
except:
return False
async def run_handler(self, request, response, path, method=None, **kwargs):
handler = self.get_route(path, method or request.method)
return await handler.target(request, response, **kwargs)
def add_view(self, view):
paths = view.__path__ if isinstance(view.__path__, list) else [view.__path__]
view_class = view(self)
for path in paths:
for method in http_methods:
try:
self.add_route(view_class.get_handler(method), path, method)
except MethodNotHandledException:
pass
def add_static(self, path, src):
if Path(src).isdir:
path = Path(path).join('{path:.*}')
self.add_route(Static(src), path)
def add_middleware(self, handler, attach=None):
if not asyncio.iscoroutinefunction(handler):
raise TypeError('Middleware handler must be a coroutine function or method')
if not attach:
try:
arg_len = handler.__code__.co_argcount
if arg_len == 1:
attach = 'request'
elif arg_len == 2:
attach = 'response'
else:
raise TypeError(f'Middleware handler must have 1 (request) or 2 (response) arguments, not {arg_len}')
except Exception as e:
raise e from None
assert attach in ['request', 'response']
mwlist = self.middleware[attach]
if handler in mwlist:
return logging.error(f'Middleware handler already added to {attach}: {handler}')
mwlist.append(handler)
async def handle_request(self, request, response, path=None):
if request.host not in self.cfg.hosts and not request.path.startswith('/framework'):
raise error.BadRequest(f'Host not handled on this server: {request.host}')
handler = self.get_route(path or request.path, request.method)
request._params = handler.params
await self.handle_middleware(request)
if handler.params:
handler_response = await handler.target(request, response, **handler.params)
else:
handler_response = await handler.target(request, response)
if isinstance(handler_response, dict):
response = self.cfg.response_class(**handler_response)
elif isinstance(handler_response, Response):
response = handler_response
elif not handler_response:
pass
else:
raise error.InternalServerError()
await self.handle_middleware(request, response)
return response
async def handle_middleware(self, request, response=None):
for middleware in self.middleware['response' if response else 'request']:
if response:
await middleware(request, response)
else:
await middleware(request)
class Application(ApplicationBase):
def __init__(self, loop=None, **kwargs):
super().__init__(**kwargs)
if loop:
self.loop = loop
else:
try:
self.loop = asyncio.get_running_loop()
except RuntimeError:
self.loop = asyncio.new_event_loop()
self._blueprints = {}
self._server = None
self._tasks = []
if self.cfg.tpl_default:
if type(Template) == NotImplementedError:
raise Template
self.template = Template(
self.cfg.tpl_search,
self.cfg.tpl_globals,
self.cfg.tpl_context,
self.cfg.tpl_autoescape
)
self.template.add_env('app', self)
self.template.add_env('cfg', self.cfg)
self.template.add_env('len', len)
self.template.add_search_path(frontend)
self.add_view(Manifest)
#self.add_view(Robots)
self.add_view(Style)
self.add_static('/framework/static/', frontend.join('static'))
else:
self.template = None
def add_blueprint(self, bp):
assert bp.prefix not in self._blueprints.values()
self._blueprints[bp.prefix] = bp
def get_blueprint_for_path(self, path):
for bppath, bp in self._blueprints.items():
if path.startswith(bppath):
return bp
raise NoBlueprintForPath(path)
def render(self, *args, **kwargs):
return self.template.render(*args, **kwargs)
async def create_task(self, log=True):
if self.cfg.socket:
if log:
logging.info(f'Starting server on {self.cfg.socket}')
return await asyncio.start_unix_server(
self.handle_client,
path = self.cfg.socket
)
else:
if log:
logging.info(f'Starting server on {self.cfg.listen}:{self.cfg.port}')
return await asyncio.start_server(
self.handle_client,
host = self.cfg.listen,
port = self.cfg.port,
family = socket.AF_INET,
reuse_address = True,
reuse_port = True
)
def stop(self, *_):
if not self._server:
print('server not running')
return
self._server.close()
for task in self._tasks:
task.cancel()
self._tasks.remove(task)
signal_handler(None)
def start(self, *tasks, log=True):
if self._server:
return
if self.cfg.socket:
if log:
logging.info(f'Starting server on {self.cfg.socket}')
server = asyncio.start_unix_server(
self.handle_client,
path = self.cfg.socket
)
else:
if log:
logging.info(f'Starting server on {self.cfg.listen}:{self.cfg.port}')
server = asyncio.start_server(
self.handle_client,
host = self.cfg.listen,
port = self.cfg.port,
family = socket.AF_INET,
reuse_address = True,
reuse_port = True
)
signal_handler(self.stop)
self._server = self.loop.run_until_complete(server)
for task in tasks:
asyncio.ensure_future(task, loop=self.loop)
self.loop.run_until_complete(self.handle_run_server())
async def handle_run_server(self):
while self._server.is_serving():
await asyncio.sleep(0.1)
await self._server.wait_closed()
self._server = None
logging.info('Server stopped')
async def handle_client(self, reader, writer):
transport = Transport(self, reader, writer)
request = None
response = None
try:
request = self.cfg.request_class(self, transport)
response = self.cfg.response_class(request=request)
try:
await request.parse_headers()
except asyncio.exceptions.IncompleteReadError as e:
request = None
raise e from None
try:
# this doesn't work all the time for some reason
blueprint = self.get_blueprint_for_path(request.path)
response = await blueprint.handle_request(request, response, blueprint.prefix)
except NoBlueprintForPath:
response = await self.handle_request(request, response)
#except Exception as e:
#traceback.print_exc()
except NotFound:
response = self.cfg.response_class(request=request).set_error('Not Found', 404)
except MethodNotAllowed:
response = self.cfg.response_class(request=request).set_error('Method Not Allowed', 405)
except error.RedirError as e:
response = self.cfg.response_class.new_redir(e.path, e.status)
except error.HttpError as e:
response = self.cfg.response_class(request=request).set_error(e.message, e.status)
except TemplateNotFound as e:
response = self.cfg.response_class(request=request).set_error(f'Template not found: {e}', 500)
except:
traceback.print_exc()
if not response.streaming:
## Don't use a custom response class here just in case it caused the error
response = Response(request=request).set_error('Server Error', 500)
if not response.streaming:
try:
response.headers.update(self.cfg.default_headers)
await transport.write(response.compile())
if request and request.log and not request.path.startswith('/framework'):
logging.info(f'{request.remote} {request.method} {request.path} {response.status} {len(response.body)} {request.agent}')
except:
traceback.print_exc()
await transport.close()
class Blueprint(ApplicationBase):
def __init__(self, prefix, **kwargs):
super().__init__(**kwargs)
self.prefix = prefix
## might keep this
def set_response(request, resp_class, func, *args, **kwargs):
try:
return getattr(resp_class, func)(*args, **kwargs)
except:
traceback.print_exc()
return Response(request=request).set_error('Server Error', 500)

View file

@ -1,91 +0,0 @@
from .request import Request
from .response import Response
from .. import __version__
from ..config import BaseConfig
from ..misc import boolean
class Config(BaseConfig):
_startup = True
def __init__(self, **kwargs):
super().__init__(
name = 'IzzyLib Http Server',
title = None,
version = '0.0.1',
git_repo = 'https://git.barkshark.xyz/izaliamae/izzylib',
socket = None,
listen = 'localhost',
host = None,
web_host = None,
alt_hosts = [],
port = 8080,
proto = 'http',
access_log = True,
timeout = 60,
default_headers = {},
request_class = Request,
response_class = Response,
sig_handler = None,
sig_handler_args = [],
sig_handler_kwargs = {},
tpl_search = [],
tpl_globals = {},
tpl_context = None,
tpl_autoescape = True,
tpl_default = True,
tpl_favicon_path = '/framework/static/icon64.png'
)
self._startup = False
self.set_data(kwargs)
self.default_headers['server'] = f'{self.name}/{__version__}'
@property
def hosts(self):
return (f'{self.listen}:{self.port}', self.host, self.web_host, *self.alt_hosts)
def parse_value(self, key, value):
if self._startup:
return value
if key == 'listen':
if not self.host:
self.host = value
if not self.web_host:
self.web_host = value
elif key == 'host':
if not self.web_host or self.web_host == self.listen:
self.web_host = value
elif key == 'port' and not isinstance(value, int):
raise TypeError(f'{key} must be an integer')
elif key == 'socket':
value = Path(value)
elif key in ['access_log', 'tpl_autoescape', 'tpl_default'] and not isinstance(value, bool):
raise TypeError(f'{key} must be a boolean')
elif key in ['alt_hosts', 'sig_handler_args', 'tpl_search'] and not isinstance(value, list):
raise TypeError(f'{key} must be a list')
elif key in ['sig_handler_kwargs', 'tpl_globals'] and not isinstance(value, dict):
raise TypeError(f'{key} must be a dict')
elif key == 'tpl_context' and not getattr(value, '__call__', None):
raise TypeError(f'{key} must be a callable')
elif key == 'request_class' and not issubclass(value, Request):
raise TypeError(f'{key} must be a subclass of izzylib.http_server_async.Request')
elif key == 'response_class' and not issubclass(value, Response):
raise TypeError(f'{key} must be a subclass of izzylib.http_server_async.Response')
return value

View file

@ -1,65 +0,0 @@
from functools import partial
class HttpError(Exception):
def __init__(self, message, status=500):
super().__init__(f'HTTP Error {status}: {message}')
self.status = status
self.message = message
class RedirError(Exception):
def __init__(self, path, status=301):
super().__init__(f'HTTP Error {status}: {path}')
self.status = status
self.path = path
## 200 Errors
Ok = partial(HttpError, status=200)
Created = partial(HttpError,status=201)
Accepted = partial(HttpError,status=202)
NoContent = partial(HttpError,status=204)
ResetContent = partial(HttpError,status=205)
PartialContent = partial(HttpError,status=206)
## 300 Errors
NotModified = partial(HttpError, status=304)
## 400 Errors
BadRequest = partial(HttpError, status=400)
Unauthorized = partial(HttpError, status=401)
Forbidden = partial(HttpError, status=403)
NotFound = partial(HttpError, status=404)
MethodNotAllowed = partial(HttpError, status=405)
RequestTimeout = partial(HttpError,status=408)
Gone = partial(HttpError,status=410)
LengthRequired = partial(HttpError,status=411)
PreconditionFailed = partial(HttpError,status=412)
PayloadTooLarge = partial(HttpError,status=413)
UriTooLong = partial(HttpError,status=414)
UnsupportedMediaType = partial(HttpError,status=415)
RangeNotSatisfiable = partial(HttpError,status=416)
Teapot = partial(HttpError, status=418)
UpgradeRequired = partial(HttpError,status=426)
TooManyRequests = partial(HttpError,status=429)
RequestHeaderFieldsTooLarge = partial(HttpError,status=431)
UnavailableForLegalReasons = partial(HttpError,status=451)
## 500 Errors
InternalServerError = partial(HttpError, status=500)
NotImplemented = partial(HttpError,status=501)
BadGateway = partial(HttpError,status=502)
ServiceUnavailable = partial(HttpError,status=503)
GatewayTimeout = partial(HttpError,status=504)
HttpVersionNotSuported = partial(HttpError,status=505)
NetworkAuthenticationRequired = partial(HttpError,status=511)
## Redirects
MovedPermanently = partial(RedirError, status=301)
Found = partial(RedirError, status=302)
SeeOther = partial(RedirError, status=303)
TemporaryRedirect = partial(RedirError, status=307)
PermanentRedirect = partial(RedirError, status=309)

View file

@ -1,14 +0,0 @@
media_types = [
'application/octet-stream'
]
media_main_types = [
'audio',
'font',
'image',
'video'
]
async def MediaCacheControl(request, response):
if response.content_type in media_types or any(map(response.content_type.startswith, media_main_types)):
response.headers['Cache-Control'] = 'public,max-age=2628000,immutable'

View file

@ -1,343 +0,0 @@
from datetime import datetime, timezone, timedelta
from .. import logging, boolean
from ..dotdict import DotDict
from ..path import Path
UtcTime = timezone.utc
LocalTime = datetime.now(UtcTime).astimezone().tzinfo
cookie_params = {
'Expires': 'expires',
'Max-Age': 'maxage',
'Domain': 'domain',
'Path': 'path',
'SameSite': 'samesite',
'Secure': 'secure',
'HttpOnly': 'httponly'
}
request_methods = [
'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
'CONNECT', 'OPTIONS', 'TRACE', 'PATCH'
]
class Cookies(DotDict):
def __setitem__(self, key, value):
if type(value) != CookieItem:
value = CookieItem(key, value)
super().__setitem__(key, value)
class Headers(DotDict):
def __getattr__(self, key):
return self[key.replace('_', '-')]
def __setattr__(self, key, value):
self[key.replace('_', '-')] = value
def __getitem__(self, key):
return super().__getitem__(key.title())
def __setitem__(self, key, value):
key = key.title()
if key in ['Cookie', 'Set-Cookie']:
logging.warning('Do not set the "Cookie" or "Set-Cookie" headers')
return
elif key == 'Date':
value = DateItem(value)
try:
self[key].append(value)
except KeyError:
super().__setitem__(key, HeaderItem(key, value))
def get(self, key, default=None):
return super().get(key.title(), default)
def as_dict(self):
data = {}
for k,v in self.items():
data[k] = str(v)
return data
def getone(self, key, default=None):
try:
return self[key].one()
except (KeyError, IndexError):
return default
def setall(self, key, value):
self[key].set(value)
#def update(self, data):
#for k,v in data.items():
#self.__setitem__(k,v)
class CookieItem:
def __init__(self, key, value, **kwargs):
self.key = key
self.value = value
self.args = DotDict()
for k,v in kwargs.items():
if k not in cookie_params.values():
raise AttributeError(f'Not a valid cookie parameter: {key}')
setattr(self, k, v)
def __str__(self):
text = f'{self.key}={self.value}'
if self.expires:
text += f'; Expires={self.expires.strftime("%a, %d %b %Y %H:%M:%S GMT")}'
if self.maxage != None:
text += f'; Max-Age={self.maxage}'
if self.domain:
text += f'; Domain={self.domain}'
if self.path:
text += f'; Path={self.path}'
if self.samesite:
text += f'; SameSite={self.samesite}'
if self.secure:
text += f'; Secure'
if self.httponly:
text += f'; HttpOnly'
return text
@classmethod
def from_string(cls, data):
kwargs = {}
for idx, pairs in enumerate(data.split(';')):
try:
k, v = pairs.split('=', 1)
v = v.strip()
except:
k, v = pairs, True
k = k.replace(' ', '')
if isinstance(v, str) and v.startswith('"') and v.endswith('"'):
v = v[1:-1]
if idx == 0:
key = k
value = v
elif k in cookie_params.keys():
kwargs[cookie_params[k]] = v
else:
logging.info(f'Invalid key/value pair for cookie: {k} = {v}')
return cls(key, value, **kwargs)
@property
def expires(self):
return self.args.get('Expires')
@expires.setter
def expires(self, data):
if isinstance(data, str):
data = datetime.strptime(data, '%a, %d %b %Y %H:%M:%S GMT').replace(tzinfo=UtcTime)
elif isinstance(data, int) or isinstance(data, float):
data = datetime.fromtimestamp(data).replace(tzinfo=UtcTime)
elif isinstance(data, datetime):
if not data.tzinfo:
data = data.replace(tzinfo=UtcTime)
elif isinstance(data, timedelta):
data = datetime.now(UtcTime) + data
else:
raise TypeError(f'Expires must be a http date string, timestamp, or datetime object, not {data.__class__.__name__}')
self.args['Expires'] = data
@property
def maxage(self):
return self.args.get('Max-Age')
@maxage.setter
def maxage(self, data):
if isinstance(data, int):
pass
elif isinstance(date, timedelta):
data = data.seconds
elif isinstance(date, datetime):
data = (datetime.now() - date).seconds
else:
raise TypeError(f'Max-Age must be an integer, timedelta object, or datetime object, not {data.__class__.__name__}')
self.args['Max-Age'] = data
@property
def domain(self):
return self.args.get('Domain')
@domain.setter
def domain(self, data):
if not isinstance(data, str):
raise ValueError(f'Domain must be a string, not {data.__class__.__name__}')
self.args['Domain'] = data
@property
def path(self):
return self.args.get('Path')
@path.setter
def path(self, data):
if not isinstance(data, str):
raise ValueError(f'Path must be a string or izzylib.Path object, not {data.__class__.__name__}')
self.args['Path'] = Path(data)
@property
def secure(self):
return self.args.get('Secure')
@secure.setter
def secure(self, data):
self.args['Secure'] = boolean(data)
@property
def httponly(self):
return self.args.get('HttpOnly')
@httponly.setter
def httponly(self, data):
self.args['HttpOnly'] = boolean(data)
@property
def samesite(self):
return self.args.get('SameSite')
@samesite.setter
def samesite(self, data):
if isinstance(data, bool):
data = 'Strict' if data else 'None'
elif isinstance(data, str) and data.title() in ['Strict', 'Lax', 'None']:
data = data.title()
else:
raise TypeError(f'SameSite must be a boolean or one of Strict, Lax, or None, not {data.__class__.__name__}')
self.args['SameSite'] = data
self.args['Secure'] = True
def as_dict(self):
data = DotDict({self.key: self.value})
data.update(self.args)
return data
def set_defaults(self):
for key in list(self.args.keys()):
del self.args[key]
def set_delete(self):
self.args.pop('Expires', None)
self.maxage = 0
return self
class HeaderItem(list):
def __init__(self, key, values):
super().__init__()
self.update(values)
self.key = key
def __str__(self):
return ','.join(str(v) for v in self)
def set(self, *values):
self.clear()
for value in values:
self.append(value)
def one(self):
return self[0]
def update(self, *items):
for item in items:
self.append(item)
class DateItem(str):
_datetime = None
def __new__(cls, date):
new_date = str.__new__(cls, date)
new_date._datetime = datetime.strptime(date, '%a, %d %b %Y %H:%M:%S GMT').replace(tzinfo=UtcTime)
return new_date
@property
def utc(self):
return self._datetime.astimezone(UtcTime)
@property
def local(self):
return self._datetime.astimezone(LocalTime)

View file

@ -1,224 +0,0 @@
import asyncio, email, traceback
from datetime import datetime, timezone
from urllib.parse import unquote_plus
from .misc import Cookies, Headers, CookieItem
from ..dotdict import DotDict, MultiDotDict
try: from ..http_signatures import verify_headers
except ImportError: verify_headers = None
UtcTime = timezone.utc
LocalTime = datetime.now(UtcTime).astimezone().tzinfo
class Request:
__slots__ = [
'_body', '_form', '_method', '_app', '_params',
'address', 'path', 'version', 'headers', 'cookies',
'query', 'raw_query', 'transport', 'log'
]
ctx = DotDict()
def __init__(self, app, transport):
super().__init__()
self._app = app
self._body = b''
self._form = DotDict()
self._method = None
self._params = None
self.transport = transport
self.headers = Headers()
self.cookies = Cookies()
self.query = DotDict()
self.address = transport.client_address
self.path = None
self.version = None
self.raw_query = None
self.log = True
self.verified = False
def __getitem__(self, key):
return self.ctx[key]
def __setitem__(self, key, value):
self.ctx[key] = value
def __getattr__(self, key):
if key in self.__slots__:
return super().__getattribute__(self, key)
try:
return self.ctx[key]
except:
raise AttributeError(key)
def __setattr__(self, key, value):
try:
super().__setattr__(key, value)
except:
self.ctx[key] = value
@property
def app(self):
return self._app or get_app()
@app.setter
def app(self, app):
self._app = app
@property
def agent(self):
return self.headers.getone('User-Agent', 'no agent')
@property
def accept(self):
return self.headers.getone('Accept', '')
@property
def content_type(self):
return self.headers.getone('Content-Type', '')
@property
def date(self):
date_str = self.headers.getone('Date')
if date_str:
date = datetime.strptime(date_str, '%a, %d %b %Y %H:%M:%S GMT')
date = date.replace(tzinfo=UtcTime)
return date.astimezone(LocalTime)
# not sure if this should stay
return datetime.now(LocalTime)
@property
def host(self):
return self.headers.getone('Host')
@property
def length(self):
return int(self.headers.getone('Content-Length', 0))
@property
def remote(self):
return self.headers.getone('X-Real-Ip', self.headers.getone('X-Forwarded-For', self.address))
@property
def method(self):
return self._method
@method.setter
def method(self, data):
self._method = data.upper()
@property
def params(self):
return self._params
async def body(self):
if not self._body and self.length:
self._body = await self.transport.read(self.length)
return self._body
async def text(self):
return (await self.body()).decode('utf-8')
async def dict(self):
logging.warning('Request.dict will be removed in a future update')
return DotDict(await self.body())
async def json(self):
return DotDict(await self.body())
async def form(self):
if not self._form and 'application/x-www-form-urlencoded' in self.content_type:
for line in unquote_plus(await self.text()).split('&'):
try: key, value = line.split('=', 1)
except: key, value = line, None
self._form[key] = value
return self._form
async def parse_headers(self):
data = (await self.transport.readuntil(b'\r\n\r\n')).decode('utf-8')
for idx, line in enumerate(data.splitlines()):
if idx == 0:
self.method, path, self.version = line.split()
try: self.path, self.raw_query = path.split('?', 1)
except: self.path = path
if self.raw_query:
self.query.from_query(self.raw_query)
else:
try: key, value = line.split(': ', 1)
except: continue
if key.lower() == 'cookie':
for cookie in value.split(';'):
try:
item = CookieItem.from_string(cookie)
except:
traceback.print_exc()
continue
self.cookies[item.key] = item
continue
self.headers[key] = value
def new_response(self, *args, **kwargs):
return self.app.cfg.response_class(*args, **kwargs)
async def verify_signature(self, actor):
if not verify_headers:
raise ImportError('Failed to import verify_headers from izzylib.http_signatures.')
self.verified = verify_headers(
headers = {k: self.headers.getone(k) for k in self.headers.keys()},
method = self.method,
path = self.path,
actor = actor,
body = await self.body()
)
return self.verified

View file

@ -1,236 +0,0 @@
import json, traceback
from datetime import datetime
from . import get_app
from .misc import Cookies, Headers, CookieItem
from ..dotdict import MultiDotDict
class Response:
__slots__ = ['_app', '_body', 'headers', 'cookies', 'status', 'request']
def __init__(self, body=b'', status=200, headers={}, cookies={}, content_type='text/plain', request=None):
self._app = None
self._body = b''
self.headers = Headers(headers)
self.cookies = Cookies(cookies)
self.body = body
self.status = status
self.content_type = content_type
self.request = request
@property
def app(self):
return self._app or get_app()
@app.setter
def app(self, app):
self._app = app
@property
def body(self):
return self._body
@body.setter
def body(self, data):
self._body = self._parse_body_data(data)
@property
def content_type(self):
return self.headers.getone('Content-Type')
@content_type.setter
def content_type(self, data):
try:
self.headers['Content-Type'].set(data)
except KeyError:
self.headers['Content-Type'] = data
@property
def content_length(self):
return len(self.body)
@property
def streaming(self):
return self.headers.getone('Transfer-Encoding') == 'chunked'
def append(self, data):
self._body += self._parse_body_data(data)
def delete_cookie(self, cookie):
cookie.set_delete()
self.cookies[cookie.key] = cookie
@classmethod
def new_html(cls, body, **kwargs):
response = cls(**kwargs)
response.set_html(body)
return response
@classmethod
def new_json(cls, body, activity=False, **kwargs):
response = cls(**kwargs)
response.set_json(body, activity)
return response
@classmethod
def new_error(cls, message, status=500, **kwargs):
response = cls(**kwargs)
response.set_error(message, status)
return response
@classmethod
def new_redir(cls, path, status=302, **kwargs):
response = cls(**kwargs)
response.set_redir(path, status)
return response
def set_text(self, body=b'', status=None):
self.body = body
if status:
self.status = status
return self
def set_html(self, body=b'', status=None):
self.content_type = 'text/html'
self.body = body
if status:
self.status = status
return self
def set_template(self, template, context={}, content_type='text/html', status=None, request=None):
if not request:
request = self.request
if status:
self.status = status
self.body = request.app.render(template, context_data=context, request=request)
self.content_type = content_type
return self
def set_json(self, body={}, status=None, activity=False):
self.content_type = 'application/activity+json' if activity else 'application/json'
self.body = body
if status:
self.status = status
return self
def set_redir(self, path, status=302):
self.headers['Location'] = path
self.status = status
return self
def set_error(self, message, status=500):
try:
if self.request and 'json' in self.request.headers.getone('accept', ''):
return self.set_json({'error': message, 'code': status}, status=status)
return self.set_template('error.haml',
context = {
'error_message': message,
'response': self
},
status = status
)
except AttributeError:
pass
except Exception:
traceback.print_exc()
self.body = f'HTTP Error {status}: {message}'
self.status = status
return self
async def set_streaming(self, transport, headers={}):
self.headers.update(headers)
self.headers.update(transport.app.cfg.default_headers)
self.headers.setall('Transfer-encoding', 'chunked')
transport.write(self._compile_headers())
def set_cookie(self, key, value, **kwargs):
self.cookies[key] = CookieItem(key, value, **kwargs)
def compile(self):
return self._compile_headers() + self.body
def _compile_headers(self):
data = bytes(f'HTTP/1.1 {self.status}', 'utf-8')
for k,v in self.headers.items():
if k == 'Content-Length':
return
for value in v:
data += bytes(f'\r\n{k}: {value}', 'utf-8')
for cookie in self.cookies.values():
data += bytes(f'\r\nSet-Cookie: {cookie}', 'utf-8')
if not self.headers.get('Date'):
data += bytes(f'\r\nDate: {datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT")}', 'utf-8')
data += bytes(f'\r\nContent-Length: {len(self.body)}', 'utf-8')
data += b'\r\n\r\n'
return data
def _parse_body_data(self, data):
if isinstance(data, str):
data = data.encode('utf-8')
elif isinstance(data, bytearray):
data = bytes(data)
elif any(map(isinstance, [data], [dict, list, tuple])):
data = json.dumps(data).encode('utf-8')
elif not isinstance(data, bytes):
data = str(data).encode('utf-8')
return data

View file

@ -1,65 +0,0 @@
import asyncio
from ..dotdict import DotDict
class Transport:
def __init__(self, app, reader, writer):
self.app = app
self.reader = reader
self.writer = writer
@property
def client_address(self):
return self.writer.get_extra_info('peername')[0]
@property
def client_port(self):
return self.writer.get_extra_info('peername')[1]
@property
def closed(self):
return self.writer.is_closing()
async def read(self, length=2048, timeout=None):
return await asyncio.wait_for(self.reader.read(length), timeout or self.app.cfg.timeout)
async def readuntil(self, bytes, timeout=None):
return await asyncio.wait_for(self.reader.readuntil(bytes), timeout or self.app.cfg.timeout)
async def write(self, data):
if isinstance(data, DotDict):
data = data.to_json()
elif any(map(isinstance, [data], [dict, list, tuple])):
data = json.dumps(data)
# not sure if there's a better type to use, but this should be fine for now
elif any(map(isinstance, [data], [float, int])):
data = str(data)
elif isinstance(data, bytearray):
data = str(data)
elif not any(map(isinstance, [data], [bytes, str])):
raise TypeError('Data must be or a str, bytes, bytearray, float, it, dict, list, or tuple')
if isinstance(data, str):
data = data.encode('utf-8')
self.writer.write(data)
await self.writer.drain()
async def close(self):
if self.closed:
return
self.writer.close()
await self.writer.wait_closed()

View file

@ -1,135 +0,0 @@
import mimetypes
from . import http_methods, error
from ..dotdict import DotDict
from ..path import Path
from ..exceptions import (
InvalidMethodException,
MethodNotHandledException
)
try: import magic
except ImportError: magic = None
try:
from ..template import Color
default_theme = DotDict(
primary = Color('#e7a'),
secondary = Color('#a7e'),
background = Color('#191919'),
positive = Color('#aea'),
negative = Color('#e99'),
white = Color('#eee'),
black = Color('#111'),
speed = 250
)
except ModuleNotFoundError:
pass
class View:
__path__ = ''
def __init__(self, app):
self.app = app
def get_handler(self, method):
if method.upper() not in http_methods:
raise InvalidMethodException(method)
try:
return getattr(self, method.lower())
except AttributeError:
raise MethodNotHandledException(method)
#def get(self, request):
#pass
def Static(src):
src = Path(src)
async def StaticHandler(request, response, path=None):
src_path = src if not path else src.join(path)
try:
with open(src_path, 'rb') as fd:
data = fd.read()
if magic:
mime = mimetypes.guess_type(path)[0] or magic.from_buffer(data[:2048], mime=True)
else:
mime = mimetypes.guess_type(path)
except FileNotFoundError:
raise error.NotFound('Static file not found')
except IsADirectoryError:
index = src_path.join('index.html')
if not index.isfile:
raise error.NotFound('Static file not found')
with open(index, 'rb') as fd:
data = fd.read()
mime = 'text/html'
response.body = data
response.content_type = mime
return StaticHandler
### Frontend Template Views ###
class Manifest(View):
__path__ = '/framework/manifest.json'
async def get(self, request, response):
data = {
'name': self.cfg.name,
'short_name': self.cfg.name.replace(' ', ''),
'description': 'UvU',
'icons': [
{
'src': "/framework/static/icon512.png",
'sizes': '512x512',
'type': 'image/png'
},
{
'src': "/framework/static/icon64.png",
'sizes': '64x64',
'type': 'image/png'
}
],
'theme_color': str(response.default_theme.primary),
'background_color': str(response.default_theme.background),
'display': 'standalone',
'start_url': '/',
'scope': f'{self.cfg.proto}://{self.cfg.web_host}'
}
response.set_json(data)
class Robots(View):
__path__ = '/robots.txt'
async def get(self, request, response):
data = '# Disallow all\nUser-agent: *\nDisallow: /'
response.body = data
class Style(View):
__path__ = '/framework/style.css'
async def get(self, request, response):
response.body = self.app.render('base.css', default_theme)
response.content_type = 'text/css'

View file

@ -1,218 +0,0 @@
import json, sys
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
from base64 import b64decode, b64encode
from datetime import datetime
from functools import lru_cache
from tldextract import extract
from . import izzylog
from .dotdict import DefaultDotDict, DotDict
from .misc import Url
def generate_rsa_key():
privkey = RSA.generate(2048)
key = DotDict({'PRIVKEY': privkey, 'PUBKEY': privkey.publickey()})
key.update({'privkey': key.PRIVKEY.export_key().decode(), 'pubkey': key.PUBKEY.export_key().decode()})
return key
def parse_signature(signature: str):
return Signature(signature)
def verify_headers(headers: dict, method: str, path: str, actor: dict, body=None):
'''Verify a header signature
headers: A dictionary containing all the headers from a request
method: The HTTP method of the request
path: The path of the HTTP request
actor (optional): A dictionary containing the activitypub actor and the link to the pubkey used for verification
body (optional): The body of the request. Only needed if the signature includes the digest header
fail (optional): If set to True, raise an error instead of returning False if any step of the process fails
'''
headers = {k.lower(): headers[k] for k in headers}
headers['(request-target)'] = f'{method.lower()} {path}'
signature = Signature(headers.get('signature'))
digest = headers.get('digest')
missing_headers = [k for k in headers if k in ['date', 'host'] if headers.get(k) == None]
if not signature:
raise AssertionError('Missing signature')
## Add digest header to missing headers list if it doesn't exist
if method.lower() == 'post' and not digest:
missing_headers.append('digest')
## Fail if missing date, host or digest (if POST) headers
if missing_headers:
raise AssertionError(f'Missing headers: {missing_headers}')
## Fail if body verification fails
if digest:
digest_hash = parse_body_digest(headers.get('digest'))
if not verify_string(body, digest_hash.sig, digest_hash.alg):
raise AssertionError('Failed body digest verification')
pubkey = actor.publicKey['publicKeyPem']
return sign_pkcs_headers(pubkey, {k:v for k,v in headers.items() if k in signature.headers}, sig=signature)
def verify_request(request, actor: dict):
'''Verify a header signature from a SimpleASGI request
request: The request with the headers to verify
actor: A dictionary containing the activitypub actor and the link to the pubkey used for verification
'''
return verify_headers(
headers = request.headers,
method = request.method,
path = request.path,
actor = actor,
body = request.body
)
### Helper functions that shouldn't be used directly ###
def parse_body_digest(digest):
if not digest:
raise AssertionError('Empty digest')
parsed = DotDict()
alg, sig = digest.split('=', 1)
parsed.sig = sig
parsed.alg = alg.replace('-', '')
return parsed
def sign_pkcs_headers(key: str, headers: dict, sig=None):
if sig:
head_items = [f'{item}: {headers[item]}' for item in sig.headers]
else:
head_items = [f'{k.lower()}: {v}' for k,v in headers.items()]
head_string = '\n'.join(head_items)
head_bytes = head_string.encode('UTF-8')
KEY = RSA.importKey(key)
pkcs = PKCS1_v1_5.new(KEY)
h = SHA256.new(head_bytes)
if sig:
try:
return pkcs.verify(h, b64decode(sig.signature))
except ValueError:
return False
else:
return pkcs.sign(h)
def sign_request(request, privkey, keyid):
assert isinstance(request.body, bytes)
request.set_header('(request-target)', f'{request.method.lower()} {request.url.path}')
request.set_header('host', request.url.host)
request.set_header('date', datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'))
if request.body:
body_hash = b64encode(SHA256.new(request.body).digest()).decode("UTF-8")
request.set_header('digest', f'SHA-256={body_hash}')
request.set_header('content-length', str(len(request.body)))
sig = {
'keyId': keyid,
'algorithm': 'rsa-sha256',
'headers': ' '.join([k.lower() for k in request.headers.keys()]),
'signature': b64encode(sign_pkcs_headers(privkey, request.headers)).decode('UTF-8')
}
sig_items = [f'{k}="{v}"' for k,v in sig.items()]
sig_string = ','.join(sig_items)
request.set_header('signature', sig_string)
request.unset_header('(request-target)')
request.unset_header('host')
return request
def verify_string(string, enc_string, alg='SHA256', fail=False):
if type(string) != bytes:
string = string.encode('UTF-8')
body_hash = b64encode(SHA256.new(string).digest()).decode('UTF-8')
if body_hash == enc_string:
return True
if fail:
raise AssertionError('String failed validation')
else:
return False
class Signature(str):
__parts = {}
def __init__(self, signature: str):
if not signature:
raise AssertionError('Missing signature header')
split_sig = signature.split(',')
for part in split_sig:
key, value = part.split('=', 1)
value = value.replace('"', '')
self.__parts[key.lower()] = value.split() if key == 'headers' else value
def __new__(cls, signature: str):
return str.__new__(cls, signature)
def __new2__(cls, signature: str):
data = str.__new__(cls, signature)
data.__init__(signature)
return
def __getattr__(self, key):
return self.__parts[key]
@property
def sig(self):
return self.__parts['signature']
@property
def actor(self):
return Url(self.keyid.split('#')[0])
@property
def domain(self):
return self.actor.host
@property
def top_domain(self):
return '.'.join(extract(self.domain)[1:])

349
izzylib/http_utils.py Normal file
View file

@ -0,0 +1,349 @@
from .dotdict import DotDict
from .misc import DateString
cookie_fields = {
'expires': 'Expires',
'max_age': 'Max-Age',
'domain': 'Domain',
'path': 'Path',
'secure': 'Secure',
'httponly': 'HttpOnly',
'samesite': 'SameSite'
}
samesite_values = {
'String',
'Lax',
'None'
}
def parse_header_key_name(key):
return key.replace('_', '-').title()
def parse_cookie_key_name(key):
return cookie_fields.get(key.replace('_', '-').lower(), key)
if key not in cookie_fields.values():
raise KeyError(f'Invalid cookie key: {key}')
return key
class Headers(DotDict):
__readonly = False
def __init__(self, data=(), readonly=False):
super().__init__()
for key, value in data:
self[key] = value
self.__readonly = readonly
def __getitem__(self, key):
return super().__getitem__(parse_header_key_name(key))
def __setitem__(self, key, value):
if key.startswith('_'):
super().__setattr__(key, value)
return
if self.readonly:
raise AssertionError('Headers are read-only')
key = parse_header_key_name(key)
if key in ['Cookie', 'Set-Cookie']:
izzylog.warning('Do not set the "Cookie" or "Set-Cookie" headers')
return
elif key == 'Date':
value = DateString(value, 'http')
try:
super().__getitem__(key).update(value)
except KeyError:
super().__setitem__(key, HeaderItem(key, value))
def __delitem__(self, key):
if self.readonly:
raise AssertionError('Headers are read-only')
super().__delitem__(parse_header_key_name(key))
@property
def readonly(self):
return self.__readonly
def get_one(self, key):
return self[key].one()
def set(self, key, value):
try:
self[key].set(value)
except:
self[key] = value
def update(self, data={}, **kwargs):
kwargs.update(data)
for key, value in kwargs.items():
self[key] = value
class Cookies(DotDict):
__readonly = False
def __init__(self, cookies=[], readonly=False):
super().__init__()
for cookie in cookies:
self.new_from_string(cookie)
self.__readonly = readonly
def __setattr__(self, key, value):
if key.startswith('_'):
super().__setattr__(key, value)
return
if self.readonly:
raise AssertionError('Cookies are read-only')
if isinstance(value, str):
value = CookieItem.from_string(value)
elif not isinstance(value, CookieItem):
raise TypeError(f'Cookie must be a str or CookieItem, not a {type(value).__name__}')
super().__setattr__(key, value)
def __delattr__(self, key):
if self.readonly:
raise AssertionError('Cookies are read-only')
super().__delattr__(key)
@property
def readonly(self):
return self.__readonly
def new_from_string(self, string):
cookie = CookieItem.new_from_string(string)
self[cookie.name] = cookie
return cookie
def update(self, data={}, **kwargs):
kwargs.update(data)
for key, value in kwargs.items():
self[key] = value
class HeaderItem(list):
def __init__(self, key, *values):
super().__init__(values)
self.key = key
def __str__(self):
return ','.join(str(v) for v in self)
def set(self, *values):
self.clear()
for value in values:
self.append(value)
def one(self):
return self[0]
def update(self, *items):
for item in items:
self.append(item)
class CookieItem(DotDict):
def __init__(self, key, value, **kwargs):
super().__init__(kwargs)
try:
parse_cookie_key_name(key)
raise ValueError(f'The key name for a cookie cannot be {key}')
except KeyError:
pass
self.__key = key
self.__value = value
def __getitem__(self, key):
return parse_cookie_key_name(key)
def __setitem__(self, key, value):
key = parse_cookie_key_name(key)
if key == 'Expires':
value = self._parse_expires(value)
elif key == 'Max-Age':
value = self._parse_max_age(value)
elif key == 'SameSite':
value = self._parse_samesite(value)
elif key in ['Secure', 'HttpOnly']:
value = boolean(value)
super().__setitem__(key, value)
def __delitem__(self, key):
super().__delitem(parse_cookie_key_name(key))
def __str__(self):
text = f'{self.key}={self.value}'
try: text += f'; Expires={self.expires}'
except KeyError: pass
try: text += f'; Max-Age={self.maxage}'
except KeyError: pass
try: text += f'; Domain={self.domain}'
except KeyError: pass
try: text += f'; Path={self.path}'
except KeyError: pass
try: text += f'; SameSite={self.samesite}'
except KeyError: pass
if self.get('secure'):
text += f'; Secure'
if self.get('httponly'):
text += f'; HttpOnly'
return text
def _parse_expires(self, data):
if type(data) == str:
data = DateString.new_http(data)
elif type(data) in [int, float]:
data = DateString.from_timestamp(data, 'http')
elif type(data) == datetime:
data = DateString.from_datetime(data, 'http')
elif type(data) == timedelta:
data = DateSTring.from_datetime(datetime.now(timezone.utc) + data, 'http')
elif type(data) != DateString:
raise TypeError(f'Expires must be a http date string, timestamp, datetime, or DateString object, not {type(data).__name__}')
return data
def _parse_max_age(self, data):
if isinstance(data, float):
data = int(data)
elif isinstance(date, timedelta):
data = data.seconds
elif isinstance(date, datetime):
data = (datetime.now() - date).seconds
elif not isinstance(data, int):
raise TypeError(f'Max-Age must be an integer, timedelta object, or datetime object, not {data.__class__.__name__}')
return data
def _parse_samesite(self, data):
if isinstance(data, bool):
data = 'Strict' if data else 'None'
elif isinstance(data, str):
if data.title() not in samesite_values:
raise ValueError(f'Valid SameSite values are Strict, Lax, or None, not {data}')
else:
raise TypeError(f'SameSite must be a boolean or string, not {data.__class__.__name__}')
return data.title()
@classmethod
def from_string(cls, data):
kwargs = {}
for idx, pairs in enumerate(data.split(';')):
try:
k, v = pairs.split('=', 1)
v = v.strip()
except:
k, v = pairs, True
k = k.replace(' ', '')
if isinstance(v, str) and v.startswith('"') and v.endswith('"'):
v = v[1:-1]
if idx == 0:
key = k
value = v
elif k in cookie_params.keys():
kwargs[cookie_params[k]] = v
else:
logging.info(f'Invalid key/value pair for cookie: {k} = {v}')
return cls(key, value, **kwargs)
def as_dict(self):
return DotDict({self.key: self.value}, **self)
def set_defaults(self):
for key in list(self.args.keys()):
del self.args[key]
def set_delete(self):
self.args.pop('Expires', None)
self.maxage = 0
return self

View file

@ -43,10 +43,18 @@ class Log:
def parse_level(self, level):
try:
return Levels(int(level))
except ValueError:
return Levels[level.upper()]
try: return Levels(int(level))
except ValueError: return Levels[level.upper()]
def set_level_from_env(self, env_name):
level = env.get(env_name)
if not level:
self.verbose('Invalid environmental variable:', env_name)
return
self.set_config('level', level)
def update_config(self, **data):
@ -121,6 +129,7 @@ merp = DefaultLog.merp
log = DefaultLog.log
'''aliases for the default logger's config functions'''
set_level_from_env = DefaultLog.set_level_from_env
update_config = DefaultLog.update_config
set_config = DefaultLog.set_config
get_config = DefaultLog.get_config

View file

@ -1,7 +1,338 @@
import json
from . import __version__
from .config import BaseConfig
from .dotdict import DotDict
from .exceptions import HttpError
from .http_client_async import HttpClient
from .misc import DateString, Url
class MastodonApi:
def __init__(self, domain, **kwargs):
self.cfg = BaseConfig(
appname = kwargs.get('appname', 'MastoAPI Client'),
domain = domain,
token = kwargs.get('token'),
vapid_key = kwargs.get('vapid_key'),
redirect = kwargs.get('redirect', 'urn:ietf:wg:oauth:2.0:oob'),
scopes = Scopes(*kwargs.get('scopes', [])),
website = kwargs.get('website')
)
self._client = HttpClient(appagent=self.cfg.appname)
@property
def scopes(self):
return self.cfg.scopes
def __send_request(self, endpoint, data=None, query={}, token=None, method='GET'):
if method.upper() not in ['GET', 'POST']:
raise ValueError(f'Method must be a GET or POST, not a {method}')
url = Url.new(self.cfg.domain, path=endpoint, query=query)
headers = {}
if token:
headers['Authorization'] = f'Bearer {token}'
if method.upper() in ['POST']:
headers['Content-Type'] = 'application/json'
request = self._client.create_request(url, data, headers, method=method)
response = self._client.run_request(request)
body = self._client.read_body(response)
print(body)
if response.status != 200:
print(type(response.status), response.status)
raise HttpError(response.status, body)
try:
return DotDict(body)
except:
try:
message = DotDict(body).error
except:
message = body
raise HttpError(response.status, message)
def api(self, endpoint, *args, **kwargs):
return self.__send_request(f'/api/v1/{endpoint}', *args, **kwargs)
def oauth(self, endpoint, *args, **kwargs):
return self.__send_request(f'/oauth/{endpoint}', *args, **kwargs)
def me(self):
return Account(self.api('accounts/verify_credentials', token=self.cfg.token))
def create_app(self):
data = DotDict(
client_name = self.cfg.appname,
redirect_uris = self.cfg.redirect
)
if self.cfg.scopes:
data.scopes = self.cfg.scopes.compile()
if self.cfg.website:
data.website = Url(self.cfg.website)
return self.api('apps', data=data, method='POST')
def create_token(self, type, id, secret, auth_code=None, username=None, password=None):
data = DotDict(
grant_type = type,
client_id = id,
client_secret = secret,
redirect_uri = self.cfg.redirect,
scope = self.cfg.scopes.compile()
)
if auth_code:
data.code = auth_code
if username and password:
data.username = username
data.password = password
return self.oauth('token', data, method='POST')
def login_client(self, id, secret):
return self.create_token('client_credentials', id, secret)
def login_password(self, id, secret, username, password):
#auth = self.oauth('authorize', data=dict(
#response_type = 'code',
#client_id = id,
#redirect_uri = self.cfg.redirect,
#scope = self.cfg.scopes.compile(),
#username = username,
#password = password
#))
#print(auth)
return self.create_token('password', id, secret, username=username, password=password)
## Does not work atm for some reason (returns 406 Not Acceptable)
def authorize(self, id, force_login=False):
data = DotDict(
response_type = 'code',
client_id = id,
redirect_uri = self.cfg.redirect,
scope = self.cfg.scopes.compile()
)
if force_login:
data.force_login = True
return self.oauth('authorize', query=data)
def register(self, username, email, password, locale='en-us', reason=None):
app = self.create_app()
data = DotDict(
username = username,
email = email,
password = password,
locale = locale,
agreement = True
)
if reason:
data.reason = reason
token = self.login_client(app.client_id, app.client_secret)
return self.api('accounts', data, token=token.access_token, method='POST')
class Scopes:
def __init__(self, *scopes):
self.__scopes = []
for scope in scopes:
self.add_string(scope)
def __str__(self):
return self.compile()
def __iter__(self):
for scope in self.__scopes:
yield scope
def __getitem__(self, key):
return self.__scopes[key]
def __delitem__(self, key):
del self.__scopes[key]
def __len__(self):
return len(self.__scopes)
def add(self, type, name=None, admin=False):
if self.index(type, name, admin) != None or self.index(type, admin=admin) != None:
return
if not name:
rem_scopes = []
for scope in self:
if scope.type == type and scope.admin == admin:
rem_scopes.append(scope)
for scope in rem_scopes:
self.remove(scope)
scope = Scope(type, name, admin)
self.__scopes.append(scope)
return scope
def add_list(self, *scopes):
for scope in scopes:
self.add(*scope)
def add_string(self, string):
item = Scope(string)
self.add(item.type, item.name, item.admin)
def index(self, type, name=None, admin=False):
for idx, scope in enumerate(self):
if scope.type == type and scope.name == name and scope.admin == admin:
return idx
def delete(self, type, name=None, admin=False):
index = self.index(type, name, admin)
if index == None:
return
del self[index]
def remove(self, item):
self.__scopes.remove(item)
def compile(self):
return ' '.join(scope.compile() for scope in self)
class Scope:
BASE = {'accounts', 'blocks', 'bookmarks', 'favourites', 'filters', 'follows', 'lists', 'mutes', 'notifications', 'statuses'}
READ = {'search', *BASE}
WRITE = {'conversations', 'media', *BASE}
ADMIN = {'accounts', 'reports'}
def __init__(self, type, name=None, admin=False):
self.type = type
self.name = name
self.admin = bool(admin)
def __str__(self):
return f'Scope(type={self.type}, name={self.name}, admin={self.admin})'
@classmethod
def new_from_string(cls, string):
split = string.split(':')
if len(split) == 1:
type = split[0]
name = None
admin = False
elif len(split) == 2:
type = split[0]
name = split[1]
admin = False
elif len(split) == 3:
type = split[1]
name = split[2]
admin = True
else:
raise TypeError(f'Invalid scope: {string}')
if type.lower() not in ['read', 'write']:
raise ValueError(f'Invalid scope type: {type}')
if admin:
if name not in self.ADMIN:
raise ValueError(f'Invalid admin scope name: {name}')
elif name and name not in getattr(self, name.upper()):
raise ValueError(f'Invalid {type} scope name: {name}')
return cls(type, name, admin)
def compile(self):
string = ''
if self.admin:
string += 'admin:'
string += self.type
if self.name:
string += f':{self.name}'
return string
class Account(DotDict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.created_at = DateString.new_activitypub(self.created_at)
self.last_status_at = DateString(self.last_status_at, 'activitypub-date')
@property
def created(self):
return self.created_at
@property
def last_status(self):
return self.last_status_at
@property
def info_table(self):
return DotDict()
class InfoTable(list):
def __init__(self, account, table):
super().__init__(table)
@ -77,26 +408,3 @@ class InfoTable(list):
def index(self, key):
return self._index.index(key)
class Account(DotDict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.created_at = DateString.new_activitypub(self.created_at)
self.last_status_at = DateString(self.last_status_at, 'activitypub-date')
@property
def created(self):
return self.created_at
@property
def last_status(self):
return self.last_status_at
@property
def info_table(self):
return DotDict()

View file

@ -16,7 +16,7 @@ from datetime import datetime, timezone
from getpass import getpass, getuser
from importlib import util
from subprocess import Popen, PIPE
from urllib.parse import urlparse
from urllib.parse import urlparse, quote, ParseResult
from . import izzylog
from .dotdict import DotDict
@ -44,6 +44,8 @@ __all__ = [
'timestamp',
'var_name',
'ArgParser',
'Argument',
'Boolean',
'DateString',
'Url'
]
@ -129,6 +131,14 @@ def catch_kb_interrupt(function, *args, **kwargs):
izzylog.verbose('Bye! UvU')
def class_name(cls):
try:
return cls.__name__
except AttributeError:
return type(cls).__name__
def get_current_user_info():
data = DotDict({
'name': getuser(),
@ -213,8 +223,8 @@ def import_from_path(mod_path):
mod_path = Path(mod_path)
if mod_path.is_dir():
path = mod_path.joinpath('__init__.py')
if mod_path.is_dir:
path = mod_path.join('__init__.py')
name = path.name
else:
@ -600,6 +610,15 @@ class Argument(DotDict):
self.callback = lambda *cliargs: callback(*cliargs, *args, **kwargs)
class Boolean(str):
def __new__(cls, bool_obj):
return cls(str('true' if boolean(bool_obj) else 'false'))
def __bool__(self):
return True if self == 'true' else False
class DateString(str):
tz_utc = timezone.utc
tz_local = datetime.now(tz_utc).astimezone().tzinfo
@ -623,6 +642,10 @@ class DateString(str):
return getattr(self.dt, key)
def __repr__(self):
return f'DateString({self}, format={self.format})'
@classmethod
def new_activitypub(cls, date):
return cls(date, 'activitypub')
@ -684,13 +707,17 @@ class Url(str):
}
def __init__(self, url):
parsed = urlparse(url)
if isinstance(url, str):
parsed = urlparse(url)
else:
parsed = url
if not all([parsed.scheme, parsed.netloc]):
raise ValueError('Not a valid url')
raise TypeError('Not a valid url')
self._parsed = parsed
self.proto = parsed.scheme
self.host = parsed.netloc
self.port = self.protocols.get(self.proto) if not parsed.port else None
self.path = parsed.path
self.query_string = parsed.query
@ -699,6 +726,30 @@ class Url(str):
self.password = parsed.password
self.anchor = parsed.fragment
try:
self.host = parsed.netloc.split('@')[1]
except:
self.host = parsed.netloc
@property
def path_full(self):
string = self.path
if self.query_string:
string += f'?{query_string}'
return string
def join(self, new_path):
data = self.dict
host = data.pop('host')
data['path'] = data['path'].join(new_path)
return self.new(host, **data)
@property
def dict(self):
@ -707,8 +758,37 @@ class Url(str):
host = self.host,
port = self.port,
path = self.path,
query_string = self.query_string,
query = self.query,
username = self.username,
password = self.password
password = self.password,
anchor = self.anchor
)
@classmethod
def new(cls, host, path='/', proto='https', port=None, query=None, username=None, password=None, anchor=None):
if port == cls.protocols.get(proto):
port = None
url = f'{proto}://'
if username and password:
url += f'{username}:{password}@'
elif username:
url += f'{username}@'
url += host
if port:
url += f':{port}'
url += '/' + path if not path.startswith('/') else path
if query:
url += '?' + '&'.join(f'{quote(key)}={quote(value)}' for key,value in query.items())
if anchor:
url += f'#{anchor}'
return cls(url)

View file

@ -5,6 +5,14 @@ from functools import cached_property
from pathlib import Path as PyPath
linux_prefix = dict(
bin = '.local/bin',
share = '.local/share',
cache = '.cache',
config = '.config'
)
class PathMeta(type):
@property
def home(cls):
@ -24,13 +32,27 @@ class PathMeta(type):
return Path(path).parent
@property
def cache(cls):
return cls.home.join('.cache')
def unix(cls, prefix, author, software):
try:
return cls.home.join(linux_prefix[prefix], author, software)
except KeyError:
raise KeyError(f'Not a valid Linux user directory prefix: {prefix}')
def windows(cls, prefix, author, software):
pass
class Path(str, metaclass=PathMeta):
def __init__(self, path=os.getcwd(), exist=True, missing=True, parents=True):
#if str(path).startswith('~'):
#str.__new__(Path, os.path.expanduser(path))
#else:
#str.__new__(Path, path)
if not (parents or exist):
self.__check_dir(path)
## todo: move these to direct properties of Path
self.config = {
@ -39,6 +61,7 @@ class Path(str, metaclass=PathMeta):
'exist': exist
}
def __enter__(self):
self.fd = self.open('r')
return self.fd
@ -52,7 +75,7 @@ class Path(str, metaclass=PathMeta):
return self.join(key)
def __new__(cls, path):
def __new__(cls, path, *args, **kwargs):
if str(path).startswith('~'):
return str.__new__(cls, os.path.expanduser(path))
@ -62,10 +85,10 @@ class Path(str, metaclass=PathMeta):
def __check_dir(self, path=None):
target = self if not path else Path(path)
if not self.parents and not target.parent.exists:
if not self.parents and not target.parent.exists():
raise FileNotFoundError('Parent directories do not exist:', target)
if not self.exist and target.exists:
if not self.exist and target.exists():
raise FileExistsError('File or directory already exists:', target)
@ -168,8 +191,11 @@ class Path(str, metaclass=PathMeta):
def delete(self):
if not self.exists() and not self.exist:
raise FileNotFoundError(self)
if not self.exists():
if not self.exist:
raise FileNotFoundError(self)
return
if self.isdir:
shutil.rmtree(self, ignore_errors=True)
@ -190,11 +216,13 @@ class Path(str, metaclass=PathMeta):
def glob(self, pattern='*', recursive=True):
paths = PyPath(self).rglob(pattern) if recursive else PyPath(self).glob(pattern)
return tuple(sorted(self.join(path) for path in paths))
for path in paths:
yield self.join(path)
def join(self, *paths):
return Path(os.path.join(self, *paths))
def join(self, *paths, **kwargs):
return Path(os.path.join(self, *paths), **kwargs)
def json_load(self):

View file

@ -1,14 +0,0 @@
## Normal SQL client
from .database import Database, OperationalError, ProgrammingError
from .session import Session
from .table import Column, Table, Tables
from .rows import Row, RowClasses
## Sqlite server
#from .sqlite_server import SqliteClient, SqliteColumn, SqliteServer, SqliteSession
## Compat
SqlDatabase = Database
SqlSession = Session
SqlColumn = Column

View file

@ -1,196 +0,0 @@
import json, pkgutil, sys, threading, time
from contextlib import contextmanager
from datetime import datetime
from sqlalchemy import Table, create_engine
from sqlalchemy.exc import OperationalError, ProgrammingError
from sqlalchemy.engine import URL
from sqlalchemy.schema import MetaData
from .rows import RowClasses
from .session import Session
from .. import izzylog
from ..cache import LruCache
from ..dotdict import DotDict
from ..misc import nfs_check
modules = dict(
postgresql = ['pygresql', 'pg8000', 'psycopg2', 'psycopg3']
)
class Database:
def __init__(self, dbtype='sqlite', open_now=True, **kwargs):
self._connect_args = [dbtype, kwargs]
self.db = None
self.cache = None
self.config = DotDict()
self.meta = MetaData()
self.classes = RowClasses(**kwargs.get('row_classes', {}))
self.cache = None
self.events = {}
self.session_class = kwargs.get('session_class', Session)
self.sessions = {}
if open_now:
self.open()
def _setup_cache(self):
self.cache = DotDict({table: LruCache() for table in self.get_tables()})
@property
def session(self):
return self.session_class(self)
@property
def dbtype(self):
return self.db.url.get_backend_name()
@property
def table(self):
return self.meta.tables
def connect(self, signal, callback, *args, **kwargs):
pass
def get_tables(self):
return list(self.table.keys())
def get_columns(self, table):
return list(col.name for col in self.table[table].columns)
def new_session(self, trans=True):
return self.session_class(self, trans=trans)
## Leaving link to example code for read-only sqlite for later use
## https://github.com/pudo/dataset/issues/136#issuecomment-128693122
def open(self):
dbtype, kwargs = self._connect_args
engine_kwargs = {
'future': True,
#'maxconnections': 25
}
if not kwargs.get('name'):
raise KeyError('Database "name" is not set')
if dbtype == 'sqlite':
database = kwargs['name']
if nfs_check(database):
izzylog.warning('Database file is on an NFS share which does not support locking. Any writes to the database will fail')
engine_kwargs['connect_args'] = {'check_same_thread': False}
elif dbtype == 'postgresql':
ssl_context = kwargs.get('ssl')
if ssl_context:
engine_kwargs['ssl_context'] = ssl_context
if not kwargs.get('host'):
kwargs['unix_socket'] = '/var/run/postgresql'
if kwargs.get('host') and Path(kwargs['host']).exists():
kwargs['unix_socket'] = kwargs.pop('host')
else:
raise TypeError(f'Unsupported database type: {dbtype}')
self.config.update(kwargs)
if dbtype == 'sqlite':
url = URL.create(
drivername='sqlite',
database=kwargs.get('name')
)
else:
try:
for module in modules[dbtype]:
if pkgutil.get_loader(module):
dbtype = f'{dbtype}+{module}'
except KeyError:
pass
url = URL.create(
drivername = dbtype,
username = kwargs.get('user', None),
password = kwargs.get('password', None),
host = kwargs.get('host', None),
port = kwargs.get('port', None),
database = kwargs.get('name'),
)
self.db = create_engine(url, **engine_kwargs)
self.meta = MetaData()
self.meta.reflect(bind=self.db, resolve_fks=True, views=True)
self._setup_cache()
def close(self):
for sid in list(self.sessions):
self.sessions[sid].commit()
self.sessions[sid].close()
self.config = DotDict()
self.cache = DotDict()
self.meta = None
self.db = None
def load_tables(self, **tables):
self.meta = MetaData()
for name, columns in tables.items():
Table(name, self.meta, *columns)
self._setup_cache()
def create_database(self, tables={}):
if tables:
self.load_tables(**tables)
if self.db.url.get_backend_name() == 'postgresql':
predb = create_engine(self.db.engine_string.replace(self.config.name, 'postgres', -1), future=True)
conn = predb.connect()
try:
conn.execute(text(f'CREATE DATABASE {database}'))
except ProgrammingError:
'The database already exists, so just move along'
except Exception as e:
conn.close()
raise e from None
conn.close()
self.meta.create_all(bind=self.db)
def drop_tables(self, *tables):
if not tables:
raise ValueError('No tables specified')
self.meta.drop_all(bind=self.db, tables=tables)
def execute(self, string, **kwargs):
with self.session as s:
s.execute(string, **kwargs)

View file

@ -1,97 +0,0 @@
from .. import izzylog
from ..dotdict import DotDict
class RowClasses(DotDict):
def __init__(self, **classes):
super().__init__()
for k,v in classes.items():
self[k] = v
def get_class(self, name):
return self.get(name, Row)
class Row(DotDict):
def __init__(self, table, row, session):
super().__init__()
if row:
try:
self._update(row._asdict())
except:
self._update(row)
self.__session = session
self.__db = session.db
self.__table_name = table
self.__run__(session)
@property
def db(self):
return self.__db
@property
def table(self):
return self.__table_name
@property
def session(self):
return self.__session
@property
def columns(self):
return self.keys()
## Subclass Row and redefine this function
def __run__(self, s):
pass
def _update(self, *args, **kwargs):
super().update(*args, **kwargs)
def delete(self, s=None):
izzylog.warning('deprecated function: Row.delete')
if s:
return self.delete_session(s)
with self.db.session as s:
return self.delete_session(s)
def delete_session(self, s):
izzylog.warning('deprecated function: Row.delete_session')
return s.remove(table=self.table, row=self)
def update(self, dict_data={}, s=None, **data):
izzylog.warning('deprecated function: Row.update')
dict_data.update(data)
self._update(dict_data)
if s:
return self.update_session(s, **self)
with self.db.session as s:
s.update(row=self, **self)
def update_session(self, s, dict_data={}, **data):
izzylog.warning('deprecated function: Row.update_session')
dict_data.update(data)
self._update(dict_data)
return s.update(table=self.table, row=self, **dict_data)

View file

@ -1,189 +0,0 @@
from sqlalchemy import text, or_
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.session import Session as sqlalchemy_session
from .. import izzylog
from ..dotdict import DotDict
from ..misc import random_gen
class Session(sqlalchemy_session):
def __init__(self, db, trans=False):
super().__init__(bind=db.db, future=True)
self.closed = False
self.trans = trans
self.database = db
self.classes = db.classes
self.cache = db.cache
self.sessionid = random_gen(10)
db.sessions[self.sessionid] = self
# remove in the future
self.db = db
self._setup()
def __enter__(self):
if self.trans:
self.begin()
return self
def __exit__(self, exctype, value, tb):
if self.in_transaction():
if tb:
self.rollback()
self.commit()
self.close()
def _setup(self):
pass
@property
def table(self):
return self.db.table
def commit(self):
if not self.in_transaction():
return
super().commit()
def close(self):
super().close()
self.closed = True
del self.db.sessions[self.sessionid]
self.sessionid = None
def run(self, expression, **kwargs):
result = self.execute(text(expression), params=kwargs)
try:
return result.mappings().all()
except Exception as e:
izzylog.verbose(f'Session.run: {e.__class__.__name__}: {e}')
return result
def count(self, table_name, **kwargs):
if kwargs:
return self.query(self.table[table_name]).filter_by(**kwargs).count()
return self.query(self.table[table_name]).count()
def fetch(self, table, single=True, orderby=None, orderdir='asc', **kwargs):
RowClass = self.classes.get_class(table.lower())
query = self.query(self.table[table]).filter_by(**kwargs)
if orderby:
if orderdir == 'asc':
query = query.order_by(getattr(self.table[table].c, orderby).asc())
elif orderdir == 'desc':
query = query.order_by(getattr(self.table[table].c, orderby).desc())
else:
raise ValueError(f'Unsupported order direction: {orderdir}')
if single:
row = query.first()
return RowClass(table, row, self) if row else None
return [RowClass(table, row, self) for row in query.all()]
def search(self, *args, **kwargs):
kwargs.pop('single', None)
return self.fetch(*args, single=False, **kwargs)
# not finished
def like(self, table, orderby=None, orderdir='asc', **kwargs):
assert len(kwargs)
query = self.query(self.table[table]).filter(or_(self.table[table].filename.like(f'')))
def insert(self, table, return_row=False, **kwargs):
row = self.fetch(table, **kwargs)
if row:
row.update_session(self, **kwargs)
return
if getattr(self.table[table], 'timestamp', None) and not kwargs.get('timestamp'):
kwargs['timestamp'] = datetime.now()
cursor = self.execute(self.table[table].insert().values(**kwargs))
if return_row:
return self.fetch(table, id=cursor.inserted_primary_key[0])
def update(self, table=None, rowid=None, row=None, return_row=False, **kwargs):
if row:
rowid = row.id
table = row.table
if not rowid or not table:
raise ValueError('Missing row ID or table')
row = self.execute(self.table[table].update().where(self.table[table].c.id == rowid).values(**kwargs))
if return_row:
return self.fetch(table, id=rowid)
def remove(self, table=None, rowid=None, row=None):
if row:
rowid = row.id
table = row.table
if not rowid or not table:
raise ValueError('Missing row ID or table')
self.run(f'DELETE FROM {table} WHERE id=:id', id=rowid)
def append_column(self, table, column):
if column.name in self.db.get_columns(table):
logging.warning(f'Table "{table}" already has column "{column.name}"')
return
self.run(f'ALTER TABLE {table} ADD COLUMN {column.compile()}')
def remove_column(self, tbl, col):
table = self.table[tbl]
column = getattr(table, col, None)
columns = self.db.get_columns(tbl)
if col not in columns:
izzylog.info(f'Column "{col}" already exists')
return
columns.remove(col)
coltext = ','.join(columns)
self.run(f'CREATE TABLE {tbl}_temp AS SELECT {coltext} FROM {tbl}')
self.run(f'DROP TABLE {tbl}')
self.run(f'ALTER TABLE {tbl}_temp RENAME TO {tbl}')
def clear_table(self, table):
self.run(f'DELETE FROM {table}')

View file

@ -1,377 +0,0 @@
import asyncio, json, socket, sqlite3, ssl, time, traceback
from .database import Database
from .rows import RowClasses
from .. import izzylib
from ..dotdict import DotDict, JsonEncoder
from ..path import Path
commands = [
'insert', 'update', 'remove', 'query', 'execute', 'dirty', 'count',
'DropTables', 'GetTables', 'AppendColumn', 'RemoveColumn'
]
class SqliteClient(object):
def __init__(self, database: str='metadata', host: str='localhost', port: int=3926, password: str=None, session_class=None):
self.ssl = None
self.data = DotDict({
'host': host,
'port': int(port),
'password': password,
'database': database
})
self.session_class = session_class or SqliteSession
self.classes = RowClasses()
self._setup()
@property
def session(self):
return self.session_class(self)
def setup_ssl(self, certfile, keyfile, password=None):
self.ssl = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
self.ssl.load_cert_chain(certfile, keyfile, password)
def switch_database(self, database):
self.data.database = database
def _setup(self):
pass
class SqliteSession(socket.socket):
def __init__(self, client):
super().__init__(socket.AF_INET, socket.SOCK_STREAM)
self.connected = False
self.client = client
self.classes = client.classes
self.data = client.data
self.begin = lambda: self.send('begin')
self.commit = lambda: self.send('commit')
self.rollback = lambda: self.send('rollback')
for cmd in commands:
self.setup_command(cmd)
def __enter__(self):
self.open()
return self
def __exit__(self, exctype, value, tb):
if tb:
self.rollback()
self.commit()
self.close()
def fetch(self, table, *args, **kwargs):
RowClass = self.classes.get(table.capitalize())
data = self.send('fetch', table, *args, **kwargs)
if isinstance(data, dict):
return RowClass(table, data, self)
elif isinstance(data, list):
return [RowClass(table, row, self) for row in data]
def search(self, *args, **kwargs):
return self.fetch(*args, **kwargs, single=False)
def setup_command(self, name):
setattr(self, name, lambda *args, **kwargs: self.send(name, *args, **kwargs))
def send(self, command, *args, **kwargs):
self.sendall(json.dumps({'database': self.data.database, 'command': command, 'args': list(args), 'kwargs': dict(kwargs)}).encode('utf8'))
data = self.recv(8*1024*1024).decode()
try:
data = DotDict(data)
except ValueError:
data = json.loads(data)
if isinstance(data, dict) and data.get('error'):
raise ServerError(data.get('error'))
return data
def open(self):
try:
self.connect((self.data.host, self.data.port))
except ConnectionRefusedError:
time.sleep(2)
self.connect((self.data.host, self.data.port))
if self.data.password:
login = self.send('login', self.data.password)
if not login.get('message') == 'OK':
izzylog.error('Server error:', login.error)
return
self.connected = True
def close(self):
self.send('close')
super().close()
self.connected = False
def is_transaction(self):
self.send('trans_state')
def is_connected(self):
return self.connected
def _setup(self):
pass
def SqliteColumn(*args, **kwargs):
return {'args': list(args), 'kwargs': dict(kwargs)}
class SqliteServer(DotDict):
def __init__(self, path, host='localhost', port=3926, password=None):
self.server = None
self.database = DotDict()
self.path = Path(path).resolve()
self.ssl = None
self.password = password
self.host = host
self.port = int(port)
self.metadata_layout = {
'databases': [
SqliteColumn('id'),
SqliteColumn('name', 'text', nullable=False),
SqliteColumn('layout', 'text', nullable=False)
]
}
if not self.path.exists():
raise FileNotFoundError('Database directory not found')
if not self.path.isdir():
raise NotADirectoryError('Database directory is a file')
try:
self.open('metadata')
except:
self.setup_metadata()
for path in self.path.listdir(False):
if path.endswith('.sqlite3') and path.stem != 'metadata':
self.open(path.stem)
def open(self, database, new=False):
db = Database(dbtype='sqlite', database=self.path.join(database + '.sqlite3'))
if database != 'metadata' and not new:
with self.get_database('metadata').session() as s:
row = s.fetch('databases', name=database)
if not row:
izzylog.error('Database not found:', database)
return
db.SetupTables(row.layout)
else:
db.SetupTables(self.metadata_layout)
setattr(db, 'name', database)
self[database] = db
return db
def close(self, database):
del self[database]
def delete(self, database):
self.close(database)
path.join(database + '.sqlite3').unlink()
def get_database(self, database):
return self[database]
def asyncio_run(self):
self.server = asyncio.start_server(self.handle_connection, self.host, self.port, ssl=self.ssl)
return self.server
def run(self):
loop = asyncio.get_event_loop()
loop.run_until_complete(self.asyncio_run())
try:
izzylog.info('Starting Sqlite Server')
loop.run_forever()
except KeyboardInterrupt:
print()
izzylog.info('Closing...')
return
def setup_metadata(self):
meta = self.open('metadata')
tables = {
'databases': [
SqliteColumn('id'),
SqliteColumn('name', 'text', nullable=False),
SqliteColumn('layout', 'text', nullable=False)
]
}
db = self.open('metadata')
db.SetupTables(tables)
db.CreateDatabase()
def setup_ssl(self, certfile, keyfile, password=None):
self.ssl = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
self.ssl.load_cert_chain(certfile, keyfile, password)
async def handle_connection(self, reader, writer):
session = None
database = None
valid = None
close = False
try:
while not close:
raw_data = await asyncio.wait_for(reader.read(8*1024*1024), timeout=60)
if not raw_data:
break
try:
data = DotDict(raw_data)
if self.password:
if valid == None and data.command == 'login':
valid = self.login(*data.get('args'))
if not valid:
response = {'error': 'Missing or invalid password'}
elif data.command in ['session']:
response = {'error': 'Invalid command'}
else:
if not database:
database = data.database
if data.command == 'close' and session:
session.commit()
else:
if not session:
session = self[database].session()
session.open()
response = self.run_command(session, database, data.command, *data.get('args'), **data.get('kwargs'))
except Exception as e:
traceback.print_exc()
response = {'error': f'{e.__class__.__name__}: {str(e)}'}
writer.write(json.dumps(response or {'message': 'OK'}, cls=misc.JsonEncoder).encode('utf8'))
await writer.drain()
izzylog.info(f'{writer.get_extra_info("peername")[0]}: [{database}] {data.command} {data.args} {data.kwargs}')
if data.command == 'delete':
writer.close()
break
except ConnectionResetError:
pass
if session:
session.close()
writer.close()
def login(self, password):
return self.password == password
def run_command(self, session, database, command, *args, **kwargs):
if command == 'update':
return self.cmd_update(*args, **kwargs)
if command == 'dropdb':
return self.cmd_delete(session, database)
elif command == 'createdb':
return self.cmd_createdb(session, database, *args)
elif command == 'test':
return
elif command == 'trans_state':
return {'trans_state': session.dirty}
cmd = getattr(session, command, None)
if not cmd:
return {'error': f'Command not found: {command}'}
return cmd(*args, **kwargs)
def cmd_delete(self, session, database):
session.rollback()
session.close()
self.delete(database)
def cmd_createdb(self, session, database, name, tables):
if session.fetch('databases', name=name):
raise ValueError('Database already exists:', database)
session.insert('databases', name=name, layout=json.dumps(tables))
db = self.open(name, new=True)
db.SetupTables(tables)
db.CreateDatabase()
self[name] = db
def cmd_update(self, table=None, rowid=None, row=None, **data):
if row:
row = DotDict(row)
return self.update(table, rowid, row, **data)
class ServerError(Exception):
pass

View file

@ -1,138 +0,0 @@
import json
from sqlalchemy import ForeignKey
from sqlalchemy import (
Column as sqlalchemy_column,
types as Types
)
from ..dotdict import DotDict
ptype = type
SqlTypes = {t.lower(): getattr(Types, t) for t in dir(Types) if not t.startswith('_')}
default_types = {
dict: json.dumps,
list: json.dumps,
tuple: json.dumps,
bool: lambda data: "1" if data else "0"
}
class Column(sqlalchemy_column):
def __init__(self, name, type=None, fkey=None, **kwargs):
if not type and not kwargs:
if name == 'id':
type = 'integer'
kwargs['primary_key'] = True
kwargs['autoincrement'] = True
elif name in ['timestamp', 'created', 'modified', 'accessed']:
type = 'datetime'
else:
raise ValueError('Missing column type and options')
type = (type.lower() if ptype(type) == str else type) or 'string'
if ptype(type) == str:
try:
type = SqlTypes[type.lower()]
except KeyError:
raise KeyError(f'Invalid SQL data type: {type}')
if 'default' in kwargs.keys():
value = kwargs.pop('default')
kwargs['server_default'] = default_types.get(ptype(value), str)(value)
kwargs['nullable'] = False
options = [name, type]
if fkey:
options.append(ForeignKey(fkey))
super().__init__(*options, **kwargs)
def compile(self):
sql = f'{self.name} {self.type}'
if not self.nullable:
sql += ' NOT NULL'
if self.primary_key:
sql += ' PRIMARY KEY'
if self.unique:
sql += ' UNIQUE'
return sql
class Table(list):
__table_name__ = None
def __init__(self, name, *columns, **kwcolumns):
super().__init__()
self.__table_name__ = name
self.columns = []
self.new('id')
for column in columns:
self.add(column)
def new(self, name, *args, **kwargs):
self.add(Column(name, *args, **kwargs))
def add(self, column: Column):
if column.name == 'id':
self.remove('id')
elif column.name in self.columns:
# This needs to be a custom exception. Probably ColumnExistsError
raise ValueError(f'Column already exists: {column.name}')
self.append(column)
self.columns.append(column.name)
def remove(self, name):
for col in self:
if col.name == name:
super().remove(name)
self.columns.remove(name)
class Tables(DotDict):
def __init__(self, *tables, **kwtables):
super().__init__()
for table in tables:
self.add(table)
self[table.__table_name__] = table
for name, table in kwtables.items():
if isinstance(table, list):
self.new(name, *table)
elif isinstance(table, Table):
self.add(table)
else:
raise TypeError(f'Invalid table type for {name}: {type(table).__name__}')
def new(self, name, *columns):
self[name] = Table(name, *columns)
def add(self, table: Table):
self[table.__table_name__] = table
def remove(self, name):
del self[name]

View file

@ -1,6 +0,0 @@
from .database import Database, Connection
from .result import Result
from .row import Row
from .session import Session
from .statements import Comparison, Statement, Select, Insert, Update, Delete, Count
from .table import Column

View file

@ -1,102 +0,0 @@
import sqlite3
from getpass import getuser
from importlib import import_module
from .result import Result
from .row import Row
from .session import Session
from ..config import BaseConfig
from ..path import Path
class Config(BaseConfig):
def __init__(self, **kwargs):
super().__init__(
appname = 'IzzyLib SQL Client',
type = 'sqlite',
module = None,
module_name = None,
tables = {},
row_classes = {},
session_class = Session,
result_class = Result,
host = 'localhost',
port = 0,
database = None,
username = getuser(),
password = None,
minconnections = 4,
maxconnections = 25,
engine_args = {},
auto_trans = True,
connect_function = None,
autocommit = False
)
for k, v in kwargs.items():
self[k] = v
if not self.database:
if self.type == 'sqlite':
self.database = ':memory:'
else:
raise ValueError('Missing database name')
if not self.port:
if self.type == 'postgresql':
self.port = 5432
elif self.type == 'mysql':
self.port = 3306
if not self.module and not self.connect_function:
if self.type == 'sqlite':
self.module = sqlite3
self.module_name = 'sqlite3'
elif self.type == 'postgresql':
for mod in ['pg8000.dbapi', 'pgdb', 'psycopg2']:
try:
self.module = import_module(mod)
self.module_name = mod
break
except ImportError:
pass
elif self.type == 'mysql':
try:
self.module = import_module('mysql.connector')
self.module_name = 'mysql.connector'
except ImportError:
pass
if not self.module:
raise ImportError(f'Cannot find module for "{self.type}"')
self.module.paramstyle = 'qmark'
@property
def dbargs(self):
return {key: self[key] for key in ['host', 'port', 'database', 'username', 'password']}
def parse_value(self, key, value):
if key == 'type':
if value not in ['sqlite', 'postgresql', 'mysql', 'mssql']:
raise ValueError(f'Invalid database type: {value}')
if key == 'port':
if not isinstance(value, int):
raise TypeError('Port is not an integer')
if key == 'row_classes':
for row_class in value.values():
if not issubclass(row_class, Row):
raise TypeError(f'Row classes must be izzylib.sql2.row.Row, not {row_class.__name__}')
return value

View file

@ -1,244 +0,0 @@
import itertools
from .config import Config
from .row import Row
from .table import DbTables
from .types import Types
from .. import izzylog
from ..dotdict import DotDict
from ..exceptions import MaxConnectionsError, NoTableLayoutError, NoConnectionError
from ..path import Path
class Database:
def __init__(self, autoconnect=True, app=None, **kwargs):
tables = kwargs.pop('tables', None)
self.cfg = Config(**kwargs)
self.tables = DbTables(self)
self.types = Types(self)
self.connections = []
self.app = app
if tables:
self.load_tables(tables)
if autoconnect:
self.connect()
def connect(self):
for _ in itertools.repeat(None, self.cfg.minconnections):
self.get_connection()
def disconnect(self):
for conn in self.connections:
conn.disconnect()
self.connections = []
@property
def session(self):
return self.get_connection().session
def new_connection(self):
if len(self.connections) >= self.cfg.maxconnections:
raise MaxConnectionsError('Too many connections')
conn = Connection(self)
conn.connect()
self.connections.append(conn)
return conn
def close_connection(self, conn):
print('close connection')
conn.close_sessions()
conn.disconnect()
if not conn.conn:
try: self.connections.remove(conn)
except: pass
def get_connection(self):
if not len(self.connections):
return self.new_connection()
if len(self.connections) < self.cfg.minconnections:
return self.new_connection()
for conn in self.connections:
if not len(conn.sessions):
return conn
if len(self.connections) < self.cfg.maxconnections:
return self.new_connection()
conns = {(conn, len(conn.sessions)) for conn in self.connections}
return min(conns, key=lambda x: x[1])[0]
def new_predb(self, database='postgres'):
dbconfig = Config(**self.cfg)
dbconfig['database'] = database
dbconfig['autocommit'] = True
return Database(**dbconfig)
def set_row_class(self, name, row_class):
if not issubclass(row_class, Row):
raise TypeError(f'Row classes must be izzylib.sql2.row.Row, not {row_class.__name__}')
self.cfg.row_classes[name] = row_class
def get_row_class(self, name):
return self.cfg.row_classes.get(name, Row)
def load_tables(self, tables=None):
if tables:
self.tables.load_tables(tables)
else:
with self.session as s:
self.tables.load_tables(s.table_layout())
def create_tables(self):
if self.tables.empty:
raise NoTableLayoutError('Table layout not loaded yet')
with self.session as s:
for table in self.tables.names:
s.execute(self.tables.compile_table(table))
def create_database(self):
if self.cfg.type == 'postgresql':
with self.new_predb().session as s:
if not s.raw_execute('SELECT datname FROM pg_database WHERE datname = ?', [self.cfg.database]).fetchone():
s.raw_execute(f'CREATE DATABASE {self.cfg.database}')
elif self.cfg.type != 'sqlite':
raise NotImplementedError(f'Database type not supported yet: {self.cfg.type}')
self.create_tables(tables)
def drop_database(self, database):
if self.cfg.type == 'sqlite':
izzylog.verbose('drop_database not needed for SQLite')
return
with self.session as s:
if self.cfg.type == 'postgresql':
s.raw_execute(f'DROP DATABASE {database}')
else:
raise NotImplementedError(f'Database type not supported yet: {self.cfg.type}')
class Connection:
def __init__(self, db):
self.db = db
self.cfg = db.cfg
self.sessions = []
self.conn = None
self.connect()
if db.tables.empty:
with self.session as s:
db.load_tables(s.table_layout())
@property
def autocommit(self):
return self.conn.autocommit
@property
def session(self):
return self.cfg.session_class(self)
def connect(self):
if self.conn:
return
dbconfig = self.cfg.dbargs
if self.cfg.type == 'sqlite':
if self.cfg.autocommit:
self.conn = self.cfg.module.connect(dbconfig['database'], isolation_level=None)
else:
self.conn = self.cfg.module.connect(dbconfig['database'])
elif self.cfg.type == 'postgresql':
if Path(self.cfg.host).exists():
dbconfig['unix_sock'] = dbconfig.pop('host')
dbconfig['user'] = dbconfig.pop('username')
dbconfig['application_name'] = self.cfg.appname
self.conn = self.cfg.module.connect(**dbconfig)
else:
self.conn = self.cfg.module.connect(**self.cfg.dbargs)
try:
self.conn.autocommit = self.cfg.autocommit
except AttributeError:
if self.cfg.module_name not in ['sqlite']:
izzylog.verbose('Module does not support autocommit:', self.cfg.module_name)
return self.conn
def disconnect(self):
if not self.conn:
return
self.close_sessions()
self.conn.close()
self.conn = None
def close_sessions(self):
for session in self.sessions:
self.close_session(session)
def close_session(session):
try: self.sessions.remove(session)
except: pass
session.close()
if not len(self.sessions) and len(self.db.connections) > self.cfg.minconnections:
self.disconnect()
def cursor(self):
if not self.conn:
raise
return self.conn.cursor()
def dump_database(self, path='database.sql'):
if self.cfg.type == 'sqlite':
path = Path(path)
with path.open('w') as fd:
fd.write('\n\n'.join(list(self.conn.iterdump())[1:-1]))
else:
raise NotImplementedError('Only SQLite supported atm :/')

View file

@ -1,68 +0,0 @@
from .row import Row
class Result:
def __init__(self, session):
self.table = None
self.session = session
self.cursor = session.cursor
try:
self.keys = [desc[0] for desc in session.cursor.description]
except TypeError:
self.keys = []
def __iter__(self):
yield from self.all_iter()
@property
def row_class(self):
return self.session.db.get_row_class(self.table)
@property
def last_row_id(self):
if self.session.cfg.type == 'postgresql':
try:
return self.one().id
except:
return None
return self.cursor.lastrowid
@property
def row_count(self):
return self.cursor.rowcount
def set_table(self, table):
self.table = table
def one(self):
data = self.cursor.fetchone()
if not data:
return
return self.row_class(
self.session,
self.table,
{self.keys[idx]: value for idx, value in enumerate(data)},
)
def all(self):
return [row for row in self.all_iter()]
def all_iter(self):
for row in self.cursor:
yield self.row_class(self.session, self.table,
{self.keys[idx]: value for idx, value in enumerate(row)}
)

View file

@ -1,29 +0,0 @@
from ..dotdict import DotDict
class Row(DotDict):
def __init__(self, session, table, data):
super().__init__(session._parse_data('serialize', table, data))
self._table = table
self._session = session
self.__run__(session)
def __run__(self, session):
pass
@property
def table(self):
return self._table
@property
def rowid(self):
return self.id
@property
def rowid2(self):
return self.get('rowid', self.id)

View file

@ -1,346 +0,0 @@
import json
from pathlib import Path as PyPath
from .result import Result
from .row import Row
from .statements import Select, Insert, Delete, Count, Update, Statement
from .table import SessionTables
from .. import izzylog
from ..dotdict import DotDict
from ..exceptions import NoTransactionError, UpdateAllRowsError
from ..misc import boolean, random_gen
from ..path import Path
class Session:
def __init__(self, conn):
self.db = conn.db
self.cfg = conn.db.cfg
self.conn = conn
self.sid = random_gen()
self.tables = SessionTables(self)
self.cursor = conn.cursor()
self.trans = False
self.__setup__()
def __enter__(self):
return self
def __exit__(self, exctype, excvalue, traceback):
if traceback:
self.rollback()
else:
self.commit()
def __setup__(self):
pass
def close():
if not self.cursor:
return
self.conn.close_session(self)
def _parse_data(self, action, table, kwargs):
data = {}
if self.db.tables:
for key, value in kwargs.items():
try:
coltype = self.db.tables[table][key].type
except KeyError:
data[key] = value
continue
parser = self.db.types.get_type(coltype)
try:
data[key] = parser(action, self.cfg.type, value)
except Exception as e:
izzylog.error(f'Failed to parse data from the table "{table}": {key} = {value}')
izzylog.debug(f'Parser: {parser}, Type: {coltype}')
raise e from None
else:
data = kwargs
return data
def dump_database(self, path):
import sqlparse
path = Path(path)
with path.open('w') as fd:
line = '\n\n'.join(list(self.conn.iterdump())[1:-1])
fd.write(sqlparse.format(line,
reindent = False,
keyword_case = 'upper',
))
def dump_database2(self, path):
path = Path(path)
with path.open('w') as fd:
fd.write('\n\n'.join(list(self.conn.iterdump())[1:-1]))
def begin(self):
if self.trans or self.cfg.autocommit:
return
self.execute('BEGIN')
self.trans = True
def commit(self):
if not self.trans:
return
self.execute('COMMIT')
self.trans = False
def rollback(self):
if not self.trans:
return
self.execute('ROLLBACK')
self.trans = False
def raw_execute(self, string, values=None):
if type(string) == Path:
string = string.read()
elif type(string) == PyPath:
with string.open() as fd:
string = fd.read()
if values:
self.cursor.execute(string, values)
else:
self.cursor.execute(string)
return self.cursor
def execute(self, string, *values):
if isinstance(string, Statement):
raise TypeError('String must be a str not a Statement')
action = string.split()[0].upper()
if not self.trans and action in ['CREATE', 'INSERT', 'UPDATE', 'UPSERT', 'DROP', 'DELETE', 'ALTER']:
if self.cfg.auto_trans:
self.begin()
else:
raise NoTransactionError(f'Command not supported outside a transaction: {action}')
try:
self.raw_execute(string, values)
except Exception as e:
if type(e).__name__ in ['DatabaseError', 'OperationalError']:
print(string, values)
raise e from None
return Result(self)
def run(self, query):
result = self.execute(query.compile(self.cfg.type), *query.values)
if type(query) == Count:
return list(result.one().values())[0]
result.set_table(query.table)
return result
def run_count(self, query):
return list(self.run(query).one().values())[0]
def count(self, table, **kwargs):
if self.db.tables and table not in self.db.tables:
raise KeyError(f'Table does not exist: {table}')
query = Count(table, **kwargs)
return self.run_count(query)
def fetch(self, table, orderby=None, orderdir='ASC', limit=None, offset=None, **kwargs):
if self.db.tables and table not in self.db.tables:
raise KeyError(f'Table does not exist: {table}')
query = Select(table, **kwargs)
if orderby:
query.order(orderby, orderdir)
if limit:
query.limit(limist)
if offset:
query.offset(offset)
return self.run(query)
def insert(self, table, return_row=False, **kwargs):
if self.db.tables and table not in self.db.tables:
raise KeyError(f'Table does not exist: {table}')
result = self.run(Insert(table, **self._parse_data('deserialize', table, kwargs)))
if return_row:
return self.fetch(table, id=result.last_row_id).one()
return result.last_row_id
def update(self, table, data, return_row=False, **kwargs):
query = Update(table, **data)
for pair in kwargs.items():
query.where(*pair)
if not query._where:
raise UpdateAllRowsError(f'Refusing to update all rows in table: {table}')
result = self.run(query)
if return_row:
return self.fetch(table, id=result.last_row_id).one()
else:
return result
def update_row(self, row, return_row=False, **kwargs):
return self.update(row.table, kwargs, id=row.id, return_row=return_row)
def remove(self, table, **kwargs):
if self.db.tables and table not in self.db.tables:
raise KeyError(f'Table does not exist: {table}')
self.run(Delete(table, self._parse_data('deserialize', table, kwargs)))
def remove_row(self, row):
if not row.table:
raise ValueError('Row not associated with a table')
self.remove(row.table, id=row.id)
def create_tables(self, tables=None):
if tables:
self.load_tables(tables)
if not self.tables:
raise NoTableLayoutError('No table layout available')
for table in self.tables.values():
self.execute(table.compile(self.cfg.type))
def table_layout(self):
tables = {}
if self.cfg.type == 'sqlite':
rows = self.execute("SELECT name, sql FROM sqlite_master WHERE type = 'table' AND name NOT LIKE 'sqlite_%'")
for row in rows:
name = row.name
tables[name] = {}
fkeys = {fkey['from']: f'{fkey.table}.{fkey["to"]}' for fkey in self.execute(f'PRAGMA foreign_key_list({name})')}
columns = [col for col in self.execute(f'PRAGMA table_info({name})')]
unique_list = parse_unique(row.sql)
for column in columns:
tables[name][column.name] = dict(
type = column.type.upper(),
nullable = not column.notnull,
default = parse_default(column.dflt_value),
primary_key = bool(column.pk),
foreign_key = fkeys.get(column.name),
unique = column.name in unique_list
)
elif self.cfg.type == 'postgresql':
for row in self.execute("SELECT * FROM information_schema.columns WHERE table_schema not in ('information_schema', 'pg_catalog') ORDER BY table_schema, table_name, ordinal_position"):
table = row.table_name
column = row.column_name
if not tables.get(table):
tables[table] = {}
if not tables[table].get(column):
tables[table][column] = {}
tables[table][column] = dict(
type = row.data_type.upper(),
nullable = boolean(row.is_nullable),
default = row.column_default if row.column_default and not row.column_default.startswith('nextval') else None,
primary_key = None,
foreign_key = None,
unique = None
)
return tables
def parse_unique(sql):
unique_list = []
try:
for raw_line in sql.splitlines():
if 'UNIQUE' not in raw_line:
continue
for line in raw_line.replace('UNIQUE', '').replace('(', '').replace(')', '').split(','):
line = line.strip()
if line:
unique_list.append(line)
except IndexError:
pass
return unique_list
def parse_default(value):
if value == None:
return
if value.startswith("'") and value.endswith("'"):
value = value[1:-1]
else:
try:
value = int(value)
except ValueError:
pass
return value

View file

@ -1,185 +0,0 @@
from ..dotdict import DotDict
Comparison = DotDict(
LESS = lambda key: f'{key} < ?',
GREATER = lambda key: f'{key} > ?',
LESS_EQUAL = lambda key: f'{key} <= ?',
GREATER_EQUAL = lambda key: f'{key} >= ?',
EQUAL = lambda key: f'{key} = ?',
NOT_EQUAL = lambda key: f'{key} != ?',
IN = lambda key: f'{key} IN (?)',
NOT_IN = lambda key: f'{key} NOT IN (?)',
LIKE = lambda key: f'{key} LIKE ?',
NOT_LIKE = lambda key: f'{key} NOT LIKE ?'
)
class Statement:
def __init__(self, table):
self.table = table
self.values = []
self._where = ''
self._order = None
self._limit = None
self._offset = None
def __str__(self):
return self.compile('sqlite')
def where(self, key, value, comparison='equal', operator='and'):
try:
comp = Comparison[comparison.upper().replace('-', '_')]
except KeyError:
raise KeyError(f'Invalid comparison: {comparison}')
prefix = f' {operator} ' if self._where else ' '
self._where += f'{prefix}{comp(key)}'
self.values.append(value)
return self
def order(self, column, direction='ASC'):
direction = direction.upper()
assert direction in ['ASC', 'DESC']
self._order = (column, direction)
return self
def limit(self, limit_num):
self._limit = int(limit_num)
return self
def offset(self, offset_num):
self._offset = int(offset_num)
return self
def compile(self, dbtype):
raise NotImplementedError('Do not use the Statement class directly.')
class Select(Statement):
def __init__(self, table, *columns, **kwargs):
super().__init__(table)
self.columns = columns
for key, value in kwargs.items():
self.where(key, value)
def compile(self, dbtype):
data = f'SELECT'
if self.columns:
columns = ','.join(self.columns)
else:
columns = '*'
data += f' {columns} FROM {self.table}'
if self._where:
data += f' WHERE {self._where}'
if self._order:
col, direc = self._order
data += f' ORDER BY {col} {direc}'
if self._limit:
data += f' LIMIT {self._limit}'
if self._offset:
data += f' OFFSET {self._offset}'
return data
class Insert(Statement):
def __init__(self, table, **kwargs):
super().__init__(table)
self.keys = []
for pair in kwargs.items():
self.add_data(*pair)
def add_data(self, key, value):
self.keys.append(key)
self.values.append(value)
def remove_data(self, key):
index = self.keys.index(key)
del self.keys[index]
del self.values[index]
def compile(self, dbtype):
keys = ','.join(self.keys)
values = ','.join('?' for value in self.values)
data = f'INSERT INTO {self.table} ({keys}) VALUES ({values})'
if dbtype == 'postgresql':
data += f' RETURNING id'
return data
class Update(Statement):
def __init__(self, table, **kwargs):
super().__init__(table)
self.keys = []
for key, value in kwargs.items():
self.keys.append(key)
self.values.append(value)
def compile(self, dbtype):
pairs = ','.join(f'{key} = ?' for key in self.keys)
data = f'UPDATE {self.table} SET {pairs} WHERE {self._where}'
if dbtype == 'postgresql':
data += f' RETURNING id'
return data
class Delete(Statement):
def __init__(self, table, **kwargs):
super().__init__(table)
for key, value in kwargs.items():
self.where(key, value)
def compile(self, dbtype):
return f'DELETE FROM {self.table} WHERE {self._where}'
class Count(Statement):
def __init__(self, table, **kwargs):
super().__init__(table)
for key, value in kwargs.items():
self.where(key, value)
def compile(self, dbtype):
data = f'SELECT COUNT(*) FROM {self.table}'
if self._where:
data += f' WHERE {self._where}'
return data

View file

@ -1,244 +0,0 @@
from ..dotdict import DotDict
class SessionTables:
def __init__(self, session):
self._session = session
self._db = session.db
self._tables = session.db.tables
def __getattr__(self, key):
return SessionTable(session, key, self._tables[key])
def names(self):
return tuple(self._tables.keys())
class SessionTable(DotDict):
def __init__(self, session, name, columns):
super().__init__(columns)
self._name = name
self._session = session
self._db = session.db
@property
def name(self):
return self._name
@property
def columns(self):
return tuple(self.keys())
def fetch(self, **kwargs):
self._check_columns(**kwargs)
return self.session.fetch(self.name, **kwargs)
def insert(self, **kwargs):
self._check_columns(**kwargs)
return self.session.insert(self.name, **kwargs)
def remove(self, **kwargs):
self._check_columns(**kwargs)
return self.session.remove(self.name, **kwargs)
def _check_columns(self, **kwargs):
for key in kwargs.keys():
if key not in self.columns:
raise KeyError(f'Not a column for table "{self.name}": {key}')
class DbTables(DotDict):
def __init__(self, db):
super().__init__()
self._db = db
self._cfg = db.cfg
@property
def empty(self):
return not len(self.keys())
@property
def names(self):
return tuple(self.keys())
def load_tables(self, tables):
for name, columns in tables.items():
self.add_table(name, columns)
def unload_tables(self):
for key in self.names:
del self[key]
def add_table(self, name, columns):
self[name] = {}
if type(columns) == list:
self[name] = {col.name: col for col in columns}
elif isinstance(columns, dict):
for column, data in columns.items():
self[name][column] = DbColumn(self._cfg.type, column, **data)
else:
raise TypeError('Columns must be a list of Column objects or a dict')
def remove_table(self, name):
return self.pop(name)
def get_columns(self, name):
return tuple(self[name].values())
def compile_table(self, table_name, dbtype):
table = self[table_name]
columns = []
foreign_keys = []
for column in self.get_columns(table_name):
columns.append(column.compile(dbtype))
if column.foreign_key:
fkey_table, fkey_col = column.foreign_key
foreign_keys.append(f'FOREIGN KEY ({column.name}) REFERENCES {fkey_table} ({fkey_col})')
return f'CREATE TABLE IF NOT EXISTS {self.name} ({",".join(columns)}{",".join(foreign_keys)})'
def compile_all(self, dbtype):
return [self.compile_table(name, dbtype) for name in self.keys()]
class DbColumn(DotDict):
def __init__(self, dbtype, name, type=None, default=None, primary_key=False, unique=False, nullable=True, autoincrement=False, foreign_key=None):
super().__init__(
name = name,
type = type,
default = default,
primary_key = primary_key,
unique = unique,
nullable = nullable,
autoincrement = autoincrement,
foreign_key = foreign_key
)
if self.name == 'id':
if dbtype == 'sqlite':
self.type = 'INTEGER'
self.autoincrement = True
elif dbtype == 'postgresql':
self.type = 'SERIAL'
self.autoincrement = False
self.primary_key = True
self.unique = False
self.nullable = False
self.default = None
self.foreign_key = None
elif self.name in ['created', 'modified', 'accessed'] and not self.type:
self.type = 'DATETIME'
if not self.type:
raise ValueError(f'Must provide a column type for column: {name}')
try:
self.fkey
except ValueError:
raise ValueError(f'Invalid foreign_key format. Must be "table.column"')
@property
def fkey(self):
try:
return self.foreign_key.split('.')
except AttributeError:
return
def compile(self, dbtype):
line = f'{self.name} {self.type}'
if self.primary_key:
line += ' PRIMARY KEY'
if not self.nullable:
line += ' NOT NULL'
if self.unique:
line += ' UNIQUE'
if self.autoincrement and dbtype != 'postgresql':
line += ' AUTOINCREMENT'
if self.default:
line += f" DEFAULT {parse_default(self.default)}"
return line
class Column(DotDict):
def __init__(self, name, type=None, default=None, primary_key=False, unique=False, nullable=True, autoincrement=False, foreign_key=None):
super().__init__(
name = name,
type = type.upper() if type else None,
default = default,
primary_key = primary_key,
unique = unique,
nullable = nullable,
autoincrement = autoincrement,
foreign_key = foreign_key
)
if self.name == 'id':
self.type = 'SERIAL'
elif self.name in ['created', 'modified', 'accessed'] and not self.type:
self.type = 'DATETIME'
if not self.type:
raise ValueError(f'Must provide a column type for column: {name}')
try:
self.fkey
except ValueError:
raise ValueError(f'Invalid foreign_key format. Must be "table.column"')
@property
def fkey(self):
try:
return self.foreign_key.split('.')
except AttributeError:
return
def parse_default(default):
if isinstance(default, dict) or isinstance(default, list):
default = json.dumps(default)
if type(default) == str:
default = f"'{default}'"
return default

View file

@ -1,219 +0,0 @@
from datetime import date, time, datetime
from .. import izzylog
from ..dotdict import DotDict, LowerDotDict
Standard = {
'INTEGER',
'INT',
'TINYINT',
'SMALLINT',
'MEDIUMINT',
'BIGINT',
'UNSIGNED BIG INT',
'INT2',
'INT8',
'TEXT',
'CHARACTER',
'CHAR',
'VARCHAR',
'BLOB',
'CLOB',
'REAL',
'DOUBLE',
'DOUBLE PRECISION',
'FLOAT',
'NUMERIC',
'DEC',
'DECIMAL',
'BOOLEAN',
'DATE',
'TIME',
'JSON'
}
Sqlite = {
*Standard,
'DATETIME'
}
Postgresql = {
*Standard,
'SMALLSERIAL',
'SERIAL',
'BIGSERIAL',
'VARYING',
'BYTEA',
'TIMESTAMP',
'INTERVAL',
'POINT',
'LINE',
'LSEG',
'BOX',
'PATH',
'POLYGON',
'CIRCLE',
}
Mysql = {
*Standard,
'FIXED',
'BIT',
'YEAR',
'VARBINARY',
'ENUM',
'SET'
}
class Type:
sqlite = None
postgresql = None
mysql = None
def __getitem__(self, key):
if key in ['sqlite', 'postgresql', 'mysql']:
return getattr(self, key)
raise KeyError(f'Invalid database type: {key}')
def __call__(self, action, dbtype, value):
return getattr(self, action)(dbtype, value)
def name(self, dbtype='sqlite'):
return self[dbtype]
def serialize(self, dbtype, value):
return value
def deserialize(self, dbtype, value):
return value
class Json(Type):
sqlite = 'JSON'
postgresql = 'JSON'
mysql = 'JSON'
def serialize(self, dbtype, value):
izzylog.debug(f'serialize {type(self).__name__}: {type(value).__name__}', value)
if type(value) == str:
return DotDict(value)
return value
def deserialize(self, dbtype, value):
izzylog.debug(f'deserialize {type(self).__name__}: {type(value).__name__}', value)
return DotDict(value).to_json()
class Datetime(Type):
sqlite = 'DATETIME'
postgresql = 'TIMESTAMP'
mysql = 'DATETIME'
def serialize(self, dbtype, value):
izzylog.debug(f'serialize {type(self).__name__}: {type(value).__name__}', value)
if type(value) == str:
return datetime.fromisoformat(value)
elif type(value) == int:
return datetime.fromtimestamp(value)
return value
def deserialize(self, dbtype, value):
izzylog.debug(f'deserialize {type(self).__name__}: {type(value).__name__}', value)
if dbtype == 'sqlite':
return value.isoformat()
return value
class Date(Type):
sqlite = 'DATE'
postgresql = 'DATE'
mysql = 'DATE'
def serialize(self, dbtype, value):
izzylog.debug(f'serialize {type(self).__name__}: {type(value).__name__}', value)
if type(value) == str:
return date.fromisoformat(value)
elif type(value) == int:
return date.fromtimestamp(value)
return value
def deserialize(self, dbtype, value):
izzylog.debug(f'deserialize {type(self).__name__}: {type(value).__name__}', value)
if dbtype == 'sqlite':
return value.isoformat()
return value
class Time(Type):
sqlite = 'TIME'
postgresql = 'TIME'
mysql = 'TIME'
def serialize(self, dbtype, value):
izzylog.debug(f'serialize {type(self).__name__}: {type(value).__name__}', value)
if type(value) == str:
return time.fromisoformat(value)
elif type(value) == int:
return time.fromtimestamp(value)
return value
def deserialize(self, dbtype, value):
izzylog.debug(f'deserialize {type(self).__name__}: {type(value).__name__}', value)
if dbtype == 'sqlite':
return value.isoformat()
return value
class Types(DotDict):
def __init__(self, db):
self._db = db
self.set_type(Json, Date, Time, Datetime)
def get_type(self, name):
return self.get(name.upper(), Type())
def set_type(self, *types):
for type_object in types:
typeclass = type_object()
self[typeclass.name(self._db.cfg.type)] = typeclass

191
izzylib/steam.py Normal file
View file

@ -0,0 +1,191 @@
import platform, vdf
from datetime import datetime
from . import izzylog
from .dotdict import DotDict
from .path import Path
data_paths = DotDict(
Linux = [
'~/.steam',
'~/.local/share/Steam',
'~/.local/share/steam'
],
Windows = [
'C:/Program Files (x86)/Steam',
'C:/Program Files/Steam'
],
Darwin = [
'~/Library/Application Support/Steam'
]
)
size_formats = {
'K': 1024,
'M': 1024**2,
'G': 1024**3
}
def get_data_path():
try: library_paths = data_paths[platform.system()]
except KeyError: raise OSError(f'Unsupported operating system: {platform.system()}')
for path in library_paths:
for cfg_path in Path(path).resolve().glob('config/config.vdf'):
return cfg_path.parent.parent
raise FileNotFoundError('Cannot find Steam data path')
def get_libraries(datapath=None):
path = Path(datapath or get_data_path()).join('config', 'libraryfolders.vdf')
if not path.exists():
raise FileNotFoundError(f'Cannot find libraryfolders.vdf: {path}')
steam_dirs = []
with path.open() as fd:
for _, data in vdf.load(fd)['libraryfolders'].items():
try: steam_dirs.append(Library(data['path']))
except TypeError: pass
return steam_dirs
class Library(Path):
def __new__(cls, path):
for subpath in ['steamapps', 'SteamApps']:
try:
return Path.__new__(cls, Path(path).join(subpath, exist=False))
except FileNotFoundError:
pass
print('heck')
raise FileNotFoundError(f'Cannot find steam library')
def __getitem__(self, key):
game = self.game(key)
if not game:
raise KeyError(f'Cannot find game with ID: {key}')
return game
def __iter__(self):
return self.games()
def games(self):
for manifest_file in self.glob('*.acf', False):
yield Game(self, manifest_file)
def games_sorted(self):
return sorted(self.games(), key=lambda x:x.name)
def game(self, gid):
for game in self.games():
if game.id == gid:
return game
class Game(DotDict):
def __init__(self, library, manifest_path):
with manifest_path.open() as fd:
manifest = vdf.load(fd)['AppState']
super().__init__({
'id': int(manifest['appid']),
'name': manifest['name'],
'directory': library.join('common', manifest['installdir']),
'updated': datetime.fromtimestamp(int(manifest['LastUpdated'])),
'install_size': int(manifest['SizeOnDisk']),
'compat': library.join('compatdata', manifest['appid'])
})
def __str__(self):
return f'{self.name} ({self.id})'
@property
def path(self):
return self.directory
@property
def prefix(self):
if self.compat.exists():
return ProtonPrefix(self.compat)
def size(self, format=None):
if format:
calculated_size = self.install_size / size_formats[format]
else:
calculated_size = self.install_size / size_formats['G']
format = 'G'
if calculated_size < 1:
calculated_size = self.install_size / size_formats['M']
format = 'M'
if calculated_size < 1:
calculated_size = self.install_size / size_formats['K']
format = 'K'
return f'{round(calculated_size, 2)} {format}iB'
class ProtonPrefix(Path):
def __new__(cls, path):
if not Path(path).pfx.exists():
raise FileNotFoundError(f'Not a proton prefix: {path}')
return Path.__new__(cls, path)
@property
def version(self):
try:
with self.join('version') as fd:
return fd.read().strip()
except FileNotFoundError:
return False
class CompatibilityTools(Path):
def __new__(cls, path):
if not Path(path).endswith('compatibilitytools.d'):
raise FileNotFoundError(f'Not a compatibility tools directory: {path}')
return Path.__new__(cls, path)
def versions(self):
for path in self.listdir(recursive=False):
with path.version as fd:
version
class Proton(Path):
def __init__(self, path):
with self.join('version') as fd:
self.version = fd.read().strip()
def __new__(cls, path):
if not Path(path).version.exists():
raise FileNotFoundError(f'Not a Proton directory: {path}')
return Path.__new__(cls, path)

View file

@ -1,232 +0,0 @@
import codecs, traceback, os, json, xml
from colour import Color as Colour
from functools import partial
from hamlish_jinja import HamlishExtension
from jinja2 import Environment, FileSystemLoader, ChoiceLoader, select_autoescape, Markup
from os import listdir, makedirs
from os.path import isfile, isdir, getmtime, abspath
from xml.dom import minidom
from . import izzylog
from .dotdict import DotDict
from .path import Path
try:
from sanic import response as Response
except ModuleNotFoundError:
Response = None
class Template(Environment):
def __init__(self, search=[], global_vars={}, context=None, autoescape=True):
self.search = FileSystemLoader([])
super().__init__(
loader=self.search,
extensions=[HamlishExtension],
lstrip_blocks=True,
trim_blocks=True
)
self.autoescape = autoescape
self.func_context = context
self.hamlish_file_extensions=('.haml',)
self.hamlish_enable_div_shortcut=True
self.hamlish_mode = 'indented'
for path in search:
self.add_search_path(Path(path))
self.globals.update({
'markup': Markup,
'cleanhtml': lambda text: ''.join(xml.etree.ElementTree.fromstring(text).itertext()),
'color': Color,
'lighten': partial(color_func, 'lighten'),
'darken': partial(color_func, 'darken'),
'saturate': partial(color_func, 'saturate'),
'desaturate': partial(color_func, 'desaturate'),
'rgba': partial(color_func, 'rgba')
})
self.globals.update(global_vars)
def add_search_path(self, path, index=None):
if not path.exists:
raise FileNotFoundError(f'Cannot find search path: {path}')
if path not in self.search.searchpath:
loader = os.fspath(path)
if index != None:
self.search.searchpath.insert(index, loader)
else:
self.search.searchpath.append(loader)
def set_context(self, context):
if not hasattr(context, '__call__'):
izzylog.error('Context is not callable')
return
if not isinstance(context({}), dict):
izzylog.error('Context does not return a dict or dict-like object')
return
self.func_context = context
def add_env(self, k, v):
self.globals[k] = v
def del_env(self, var):
if not self.globals.get(var):
raise ValueError(f'"{var}" not in global variables')
del self.var[var]
def update_env(self, data):
if not isinstance(data, dict):
raise ValueError(f'Environment data not a dict')
self.globals.update(data)
def add_filter(self, funct, name=None):
name = funct.__name__ if not name else name
self.filters[name] = funct
def del_filter(self, name):
if not self.filters.get(name):
raise valueError(f'"{name}" not in global filters')
del self.filters[name]
def update_filter(self, data):
if not isinstance(data, dict):
raise ValueError(f'Filter data not a dict')
self.filters.update(data)
def render(self, tplfile, context_data={}, headers={}, cookies={}, request=None, pprint=False):
if not isinstance(context_data, dict):
raise TypeError(f'context for {tplfile} not a dict: {type(context)} {context}')
context = DotDict(self.globals)
context.update(context_data)
context['request'] = request if request else {'headers': headers, 'cookies': cookies}
if self.func_context:
# Backwards compat
try:
context = self.func_context(context)
except TypeError:
context = self.func_context(context, {})
if context == None:
izzylog.warning('Template context was set to "None"')
context = {}
result = self.get_template(tplfile).render(context)
if pprint and any(map(tplfile.endswith, ['haml', 'html', 'xml'])):
return minidom.parseString(result).toprettyxml(indent=" ")
else:
return result
def response(self, request, tpl, ctype='text/html', status=200, **kwargs):
if not Response:
raise ModuleNotFoundError('Sanic is not installed')
html = self.render(tpl, request=request, **kwargs)
return Response.HTTPResponse(body=html, status=status, content_type=ctype, headers=kwargs.get('headers', {}))
class Color(Colour):
def __init__(self, color):
if isinstance(color, str):
super().__init__(f'#{str(color)}' if not color.startswith('#') else color)
elif isinstance(color, Colour):
super().__init__(str(color))
else:
raise TypeError(f'Color has to be a string or Color class, not {type(color)}')
def __repr__(self):
return self.__str__()
def __str__(self):
return self.hex_l
def lighten(self, multiplier):
return self.alter('lighten', multiplier)
def darken(self, multiplier):
return self.alter('darken', multiplier)
def saturate(self, multiplier):
return self.alter('saturate', multiplier)
def desaturate(self, multiplier):
return self.alter('desaturate', multiplier)
def rgba(self, multiplier):
return self.alter('rgba', multiplier)
def multi(self, multiplier):
if multiplier >= 100:
return 100
elif multiplier <= 0:
return 0
return multiplier / 100
def alter(self, action, multiplier):
new_color = Color(self)
if action == 'lighten':
new_color.luminance += ((1 - self.luminance) * self.multi(multiplier))
elif action == 'darken':
new_color.luminance -= (self.luminance * self.multi(multiplier))
elif action == 'saturate':
new_color.saturation += ((1 - self.saturation) * self.multi(multiplier))
elif action == 'desaturate':
new_color.saturation -= (self.saturation * self.multi(multiplier))
elif action == 'rgba':
red = self.red*255
green = self.green*255
blue = self.blue*255
trans = self.multi(multiplier)
return f'rgba({red:0.2f}, {green:0.2f}, {blue:0.2f}, {trans})'
return new_color
def color_func(action, color, multi):
return Color(color).alter(action, multi)

90
pyvenv.json Normal file
View file

@ -0,0 +1,90 @@
{
"name": "izzylib",
"modules": {
"argon2-cffi": {
"version": "21.1.0",
"options": [],
"url": null
},
"dasbus": {
"version": "1.6",
"options": [],
"url": null
},
"colour": {
"version": "0.1.5",
"options": [],
"url": null
},
"envbash": {
"version": "1.2.0",
"options": [],
"url": null
},
"hamlish-jinja": {
"version": "0.3.3",
"options": [],
"url": null
},
"http-router": {
"version": "2.6.4",
"options": [],
"url": null
},
"jinja2": {
"version": "3.0.1",
"options": [],
"url": null
},
"markdown": {
"version": "3.3.4",
"options": [],
"url": null
},
"pillow": {
"version": "8.3.2",
"options": [],
"url": null
},
"python-magic": {
"version": "0.4.24",
"options": [],
"url": null
},
"pycryptodome": {
"version": "3.10.1",
"options": [],
"url": null
},
"sanic": {
"version": "21.6.2",
"options": [],
"url": null
},
"sqlalchemy": {
"version": "1.4.23",
"options": [],
"url": null
},
"sqlalchemy-paginator": {
"version": "0.2",
"options": [],
"url": null
},
"sql-metadata": {
"version": "2.3.0",
"options": [],
"url": null
},
"tldextract": {
"version": "3.1.2",
"options": [],
"url": null
},
"urllib3": {
"version": "1.26.6",
"options": [],
"url": null
}
}
}