forgot to add IzzyLib
This commit is contained in:
parent
6d9e5401e0
commit
3bf362556c
11
uncia/Lib/IzzyLib/__init__.py
Normal file
11
uncia/Lib/IzzyLib/__init__.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
'''
|
||||
IzzyLib by Zoey Mae
|
||||
Licensed under the CNPL: https://git.pixie.town/thufie/CNPL
|
||||
https://git.barkshark.xyz/izaliamae/izzylib
|
||||
'''
|
||||
|
||||
import sys
|
||||
assert sys.version_info >= (3, 6)
|
||||
|
||||
|
||||
__version__ = (0, 1, 1)
|
88
uncia/Lib/IzzyLib/cache.py
Normal file
88
uncia/Lib/IzzyLib/cache.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
'''Simple caches that uses ordered dicts'''
|
||||
|
||||
import re
|
||||
|
||||
from datetime import datetime
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
def parse_ttl(ttl):
|
||||
m = re.match(r'^(\d+)([smhdw]?)$', ttl)
|
||||
|
||||
if not m:
|
||||
raise ValueError(f'Invalid TTL length: {ttl}')
|
||||
|
||||
amount = m.group(1)
|
||||
unit = m.group(2)
|
||||
|
||||
if not unit:
|
||||
raise ValueError('Missing numerical length in TTL')
|
||||
|
||||
units = {
|
||||
's': 1,
|
||||
'm': 60,
|
||||
'h': 60 * 60,
|
||||
'd': 24 * 60 * 60,
|
||||
'w': 7 * 24 * 60 * 60
|
||||
}
|
||||
|
||||
multiplier = units.get(unit)
|
||||
|
||||
if not multiplier:
|
||||
raise ValueError(f'Invalid time unit: {unit}')
|
||||
|
||||
return multiplier * int(amount)
|
||||
|
||||
|
||||
class TTLCache(OrderedDict):
|
||||
def __init__(self, maxsize=1024, ttl='1h'):
|
||||
self.ttl = parse_ttl(ttl)
|
||||
self.maxsize = maxsize
|
||||
|
||||
def remove(self, key):
|
||||
if self.get(key):
|
||||
del self[key]
|
||||
|
||||
def store(self, key, value):
|
||||
timestamp = int(datetime.timestamp(datetime.now()))
|
||||
item = self.get(key)
|
||||
|
||||
while len(self) >= self.maxsize and self.maxsize != 0:
|
||||
self.popitem(last=False)
|
||||
|
||||
self[key] = {'data': value, 'timestamp': timestamp + self.ttl}
|
||||
self.move_to_end(key)
|
||||
|
||||
def fetch(self, key):
|
||||
item = self.get(key)
|
||||
timestamp = int(datetime.timestamp(datetime.now()))
|
||||
|
||||
if not item:
|
||||
return
|
||||
|
||||
if timestamp >= self[key]['timestamp']:
|
||||
del self[key]
|
||||
return
|
||||
|
||||
self[key]['timestamp'] = timestamp + self.ttl
|
||||
self.move_to_end(key)
|
||||
return self[key]['data']
|
||||
|
||||
|
||||
class LRUCache(OrderedDict):
|
||||
def __init__(self, maxsize=1024):
|
||||
self.maxsize = maxsize
|
||||
|
||||
def remove(self, key):
|
||||
if key in self:
|
||||
del self[key]
|
||||
|
||||
def store(self, key, value):
|
||||
while len(self) >= self.maxsize and self.maxsize != 0:
|
||||
self.popitem(last=False)
|
||||
|
||||
self[key] = value
|
||||
self.move_to_end(key)
|
||||
|
||||
def fetch(self, key):
|
||||
return self.get(key)
|
56
uncia/Lib/IzzyLib/color.py
Normal file
56
uncia/Lib/IzzyLib/color.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
'''functions to alter colors in hex format'''
|
||||
import re
|
||||
|
||||
from colour import Color
|
||||
|
||||
|
||||
check = lambda color: Color(f'#{str(color)}' if re.search(r'^(?:[0-9a-fA-F]{3}){1,2}$', color) else color)
|
||||
|
||||
def _multi(multiplier):
|
||||
if multiplier >= 1:
|
||||
return 1
|
||||
|
||||
elif multiplier <= 0:
|
||||
return 0
|
||||
|
||||
return multiplier
|
||||
|
||||
def lighten(color, multiplier):
|
||||
col = check(color)
|
||||
col.luminance += ((1 - col.luminance) * _multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
def darken(color, multiplier):
|
||||
col = check(color)
|
||||
col.luminance -= (col.luminance * _multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
|
||||
def saturate(color, multiplier):
|
||||
col = check(color)
|
||||
col.saturation += ((1 - col.saturation) * _multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
|
||||
def desaturate(color, multiplier):
|
||||
col = check(color)
|
||||
col.saturation -= (col.saturation * _multi(multiplier))
|
||||
|
||||
return col.hex_l
|
||||
|
||||
|
||||
def rgba(color, transparency):
|
||||
col = check(color)
|
||||
|
||||
red = col.red*255
|
||||
green = col.green*255
|
||||
blue = col.blue*255
|
||||
trans = _multi(transparency)
|
||||
|
||||
return f'rgba({red:0.2f}, {green:0.2f}, {blue:0.2f}, {trans:0.2f})'
|
||||
|
||||
|
||||
__all__ = ['lighten', 'darken', 'saturate', 'desaturate', 'rgba']
|
204
uncia/Lib/IzzyLib/http.py
Normal file
204
uncia/Lib/IzzyLib/http.py
Normal file
|
@ -0,0 +1,204 @@
|
|||
import traceback, urllib3, json
|
||||
|
||||
from base64 import b64decode, b64encode
|
||||
from urllib.parse import urlparse
|
||||
from datetime import datetime
|
||||
|
||||
import httpsig
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
#from Crypto.Hash import SHA, SHA256, SHA384, SHA512
|
||||
#from Crypto.Signature import PKCS1_v1_5
|
||||
|
||||
from . import logging, __version__
|
||||
from .cache import TTLCache, LRUCache
|
||||
from .misc import formatUTC
|
||||
|
||||
|
||||
version = '.'.join([str(num) for num in __version__])
|
||||
|
||||
|
||||
class Client(urllib3.PoolManager):
|
||||
def __init__(self, pool=100, timeout=30, headers={}, agent=f'IzzyLib/{version}'):
|
||||
super().__init__(num_pools=pool, )
|
||||
self.cache = LRUCache()
|
||||
self.headers = headers
|
||||
|
||||
self.client = urllib3.PoolManager(num_pools=self.pool, timeout=self.timeout)
|
||||
self.headers['User-Agent'] = agent
|
||||
|
||||
|
||||
def __fetch(self, url, headers={}, method='GET', data=None, cached=True):
|
||||
cached_data = self.cache.fetch(url)
|
||||
|
||||
if cached and cached_data:
|
||||
logging.debug(f'Returning cached data for {url}')
|
||||
return cached_data
|
||||
|
||||
if not headers.get('User-Agent'):
|
||||
headers.update({'User-Agent': self.agent})
|
||||
|
||||
logging.debug(f'Fetching new data for {url}')
|
||||
|
||||
try:
|
||||
if data:
|
||||
if isinstance(data, dict):
|
||||
data = json.dumps(data)
|
||||
|
||||
resp = self.client.request(method, url, headers=headers, body=data)
|
||||
|
||||
else:
|
||||
resp = self.client.request(method, url, headers=headers)
|
||||
|
||||
except Exception as e:
|
||||
logging.debug(f'Failed to fetch url: {e}')
|
||||
return
|
||||
|
||||
if cached:
|
||||
logging.debug(f'Caching {url}')
|
||||
self.cache.store(url, resp)
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def raw(self, *args, **kwargs):
|
||||
'''
|
||||
Return a response object
|
||||
'''
|
||||
return self.__fetch(*args, **kwargs)
|
||||
|
||||
|
||||
def text(self, *args, **kwargs):
|
||||
'''
|
||||
Return the body as text
|
||||
'''
|
||||
resp = self.__fetch(*args, **kwargs)
|
||||
|
||||
return resp.data.decode() if resp else None
|
||||
|
||||
|
||||
def json(self, *args, **kwargs):
|
||||
'''
|
||||
Return the body as a dict if it's json
|
||||
'''
|
||||
|
||||
headers = kwargs.get('headers')
|
||||
|
||||
if not headers:
|
||||
kwargs['headers'] = {}
|
||||
|
||||
kwargs['headers'].update({'Accept': 'application/json'})
|
||||
resp = self.__fetch(*args, **kwargs)
|
||||
|
||||
try:
|
||||
data = json.loads(resp.data.decode())
|
||||
|
||||
except Exception as e:
|
||||
logging.debug(f'Failed to load json: {e}')
|
||||
return
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def ParseSig(headers):
|
||||
sig_header = headers.get('signature')
|
||||
|
||||
if not sig_header:
|
||||
logging.verbose('Missing signature header')
|
||||
return
|
||||
|
||||
split_sig = sig_header.split(',')
|
||||
signature = {}
|
||||
|
||||
for part in split_sig:
|
||||
key, value = part.split('=', 1)
|
||||
signature[key.lower()] = value.replace('"', '')
|
||||
|
||||
if not signature.get('headers'):
|
||||
logging.verbose('Missing headers section in signature')
|
||||
return
|
||||
|
||||
signature['headers'] = signature['headers'].split()
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def SignHeaders(headers, keyid, privkey, url, method='GET'):
|
||||
'''
|
||||
Signs headers and returns them with a signature header
|
||||
|
||||
headers (dict): Headers to be signed
|
||||
keyid (str): Url to the public key used to verify the signature
|
||||
privkey (str): Private key used to sign the headers
|
||||
url (str): Url of the request for the signed headers
|
||||
method (str): Http method of the request for the signed headers
|
||||
'''
|
||||
|
||||
RSAkey = RSA.import_key(privkey)
|
||||
key_size = int(RSAkey.size_in_bytes()/2)
|
||||
logging.debug('Signing key size:', key_size)
|
||||
|
||||
parsed_url = urlparse(url)
|
||||
logging.debug(parsed_url)
|
||||
|
||||
raw_headers = {'date': formatUTC(), 'host': parsed_url.netloc, '(request-target)': ' '.join([method, parsed_url.path])}
|
||||
raw_headers.update(dict(headers))
|
||||
header_keys = raw_headers.keys()
|
||||
|
||||
signer = httpsig.HeaderSigner(keyid, privkey, f'rsa-sha{key_size}', headers=header_keys, sign_header='signature')
|
||||
new_headers = signer.sign(raw_headers, parsed_url.netloc, method, parsed_url.path)
|
||||
logging.debug('Signed headers:', new_headers)
|
||||
|
||||
del new_headers['(request-target)']
|
||||
|
||||
return dict(new_headers)
|
||||
|
||||
|
||||
def ValidateSignature(headers, method, path, client=None, agent=None):
|
||||
'''
|
||||
Validates the signature header.
|
||||
|
||||
headers (dict): All of the headers to be used to check a signature. The signature header must be included too
|
||||
method (str): The http method used in relation to the headers
|
||||
path (str): The path of the request in relation to the headers
|
||||
client (pool object): Specify a httpClient to use for fetching the actor. optional
|
||||
agent (str): User agent used for fetching actor data. optional
|
||||
'''
|
||||
|
||||
client = httpClient(agent=agent) if not client else client
|
||||
headers = {k.lower(): v for k,v in headers.items()}
|
||||
|
||||
signature = ParseSig(headers)
|
||||
|
||||
actor_data = client.json(signature['keyid'])
|
||||
logging.debug(actor_data)
|
||||
|
||||
try:
|
||||
pubkey = actor_data['publicKey']['publicKeyPem']
|
||||
|
||||
except Exception as e:
|
||||
logging.verbose(f'Failed to get public key for actor {signature["keyid"]}')
|
||||
return
|
||||
|
||||
valid = httpsig.HeaderVerifier(headers, pubkey, signature['headers'], method, path, sign_header='signature').verify()
|
||||
|
||||
if not valid:
|
||||
if not isinstance(valid, tuple):
|
||||
logging.verbose('Signature validation failed for unknown actor')
|
||||
logging.verbose(valid)
|
||||
|
||||
else:
|
||||
logging.verbose(f'Signature validation failed for actor: {valid[1]}')
|
||||
|
||||
return
|
||||
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def ValidateRequest(request, client=None, agent=None):
|
||||
'''
|
||||
Validates the headers in a Sanic or Aiohttp request (other frameworks may be supported)
|
||||
See ValidateSignature for 'client' and 'agent' usage
|
||||
'''
|
||||
return ValidateSignature(request.headers, request.method, request.path, client, agent)
|
209
uncia/Lib/IzzyLib/logging.py
Normal file
209
uncia/Lib/IzzyLib/logging.py
Normal file
|
@ -0,0 +1,209 @@
|
|||
'''Simple logging module'''
|
||||
|
||||
import sys
|
||||
|
||||
from os import environ as env
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
stdout = sys.stdout
|
||||
|
||||
|
||||
class Log():
|
||||
def __init__(self, config=dict()):
|
||||
'''setup the logger'''
|
||||
if not isinstance(config, dict):
|
||||
raise TypeError(f'config is not a dict')
|
||||
|
||||
self.levels = {
|
||||
'CRIT': 60,
|
||||
'ERROR': 50,
|
||||
'WARN': 40,
|
||||
'INFO': 30,
|
||||
'VERB': 20,
|
||||
'DEBUG': 10,
|
||||
'MERP': 0
|
||||
}
|
||||
|
||||
self.long_levels = {
|
||||
'CRITICAL': 'CRIT',
|
||||
'ERROR': 'ERROR',
|
||||
'WARNING': 'WARN',
|
||||
'INFO': 'INFO',
|
||||
'VERBOSE': 'VERB',
|
||||
'DEBUG': 'DEBUG',
|
||||
'MERP': 'MERP'
|
||||
}
|
||||
|
||||
self.config = {'windows': sys.executable.endswith('pythonw.exe')}
|
||||
self.setConfig(self._parseConfig(config))
|
||||
|
||||
|
||||
def _lvlCheck(self, level):
|
||||
'''make sure the minimum logging level is an int'''
|
||||
try:
|
||||
value = int(level)
|
||||
|
||||
except ValueError:
|
||||
level = self.long_levels.get(level.upper(), level)
|
||||
value = self.levels.get(level)
|
||||
|
||||
if value not in self.levels.values():
|
||||
raise InvalidLevel(f'Invalid logging level: {level}')
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def _getLevelName(self, level):
|
||||
for name, num in self.levels.items():
|
||||
if level == num:
|
||||
return name
|
||||
|
||||
raise InvalidLevel(f'Invalid logging level: {level}')
|
||||
|
||||
|
||||
def _parseConfig(self, config):
|
||||
'''parse the new config and update the old values'''
|
||||
date = config.get('date', self.config.get('date',True))
|
||||
systemd = config.get('systemd', self.config.get('systemd,', True))
|
||||
windows = config.get('windows', self.config.get('windows', False))
|
||||
|
||||
if not isinstance(date, bool):
|
||||
raise TypeError(f'value for "date" is not a boolean: {date}')
|
||||
|
||||
if not isinstance(systemd, bool):
|
||||
raise TypeError(f'value for "systemd" is not a boolean: {date}')
|
||||
|
||||
level_num = self._lvlCheck(config.get('level', self.config.get('level', 'INFO')))
|
||||
|
||||
newconfig = {
|
||||
'level': self._getLevelName(level_num),
|
||||
'levelnum': level_num,
|
||||
'datefmt': config.get('datefmt', self.config.get('datefmt', '%Y-%m-%d %H:%M:%S')),
|
||||
'date': date,
|
||||
'systemd': systemd,
|
||||
'windows': windows,
|
||||
'systemnotif': config.get('systemnotif', None)
|
||||
}
|
||||
|
||||
return newconfig
|
||||
|
||||
|
||||
def setConfig(self, config):
|
||||
'''set the config'''
|
||||
self.config = self._parseConfig(config)
|
||||
|
||||
|
||||
def getConfig(self, key=None):
|
||||
'''return the current config'''
|
||||
if key:
|
||||
if self.config.get(key):
|
||||
return self.config.get(key)
|
||||
else:
|
||||
raise ValueError(f'Invalid config option: {key}')
|
||||
return self.config
|
||||
|
||||
|
||||
def printConfig(self):
|
||||
for k,v in self.config.items():
|
||||
stdout.write(f'{k}: {v}\n')
|
||||
|
||||
stdout.flush()
|
||||
|
||||
|
||||
def setLevel(self, level):
|
||||
self.minimum = self._lvlCheck(level)
|
||||
|
||||
|
||||
def log(self, level, *msg):
|
||||
if self.config['windows']:
|
||||
return
|
||||
|
||||
'''log to the console'''
|
||||
levelNum = self._lvlCheck(level)
|
||||
|
||||
if type(level) == int:
|
||||
level = _getLevelName(level)
|
||||
|
||||
if levelNum < self.config['levelnum']:
|
||||
return
|
||||
|
||||
message = ' '.join([str(message) for message in msg])
|
||||
output = f'{level}: {message}\n'
|
||||
|
||||
if self.config['systemnotif']:
|
||||
self.config['systemnotif'].New(level, message)
|
||||
|
||||
if self.config['date'] and (self.config['systemd'] and not env.get('INVOCATION_ID')):
|
||||
'''only show date when not running in systemd and date var is True'''
|
||||
date = datetime.now().strftime(self.config['datefmt'])
|
||||
output = f'{date} {output}'
|
||||
|
||||
stdout.write(output)
|
||||
stdout.flush()
|
||||
|
||||
|
||||
def critical(self, *msg):
|
||||
self.log('CRIT', *msg)
|
||||
|
||||
def error(self, *msg):
|
||||
self.log('ERROR', *msg)
|
||||
|
||||
def warning(self, *msg):
|
||||
self.log('WARN', *msg)
|
||||
|
||||
def info(self, *msg):
|
||||
self.log('INFO', *msg)
|
||||
|
||||
def verbose(self, *msg):
|
||||
self.log('VERB', *msg)
|
||||
|
||||
def debug(self, *msg):
|
||||
self.log('DEBUG', *msg)
|
||||
|
||||
def merp(self, *msg):
|
||||
self.log('MERP', *msg)
|
||||
|
||||
|
||||
def getLogger(loginst, config=None):
|
||||
'''get a logging instance and create one if it doesn't exist'''
|
||||
Logger = logger.get(loginst)
|
||||
|
||||
if not Logger:
|
||||
if config:
|
||||
logger[loginst] = Log(config)
|
||||
|
||||
else:
|
||||
raise InvalidLogger(f'logger "{loginst}" doesn\'t exist')
|
||||
|
||||
return logger[loginst]
|
||||
|
||||
class InvalidLevel(Exception):
|
||||
'''Raise when an invalid logging level was specified'''
|
||||
|
||||
class InvalidLogger(Exception):
|
||||
'''Raise when the specified logger doesn't exist'''
|
||||
|
||||
|
||||
'''create a default logger'''
|
||||
logger = {
|
||||
'default': Log()
|
||||
}
|
||||
|
||||
DefaultLog = logger['default']
|
||||
|
||||
|
||||
'''aliases for default logger's log output functions'''
|
||||
critical = DefaultLog.critical
|
||||
error = DefaultLog.error
|
||||
warning = DefaultLog.warning
|
||||
info = DefaultLog.info
|
||||
verbose = DefaultLog.verbose
|
||||
debug = DefaultLog.debug
|
||||
merp = DefaultLog.merp
|
||||
|
||||
'''aliases for the default logger's config functions'''
|
||||
setConfig = DefaultLog.setConfig
|
||||
getConfig = DefaultLog.getConfig
|
||||
setLevel = DefaultLog.setLevel
|
||||
printConfig = DefaultLog.printConfig
|
44
uncia/Lib/IzzyLib/misc.py
Normal file
44
uncia/Lib/IzzyLib/misc.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
'''Miscellaneous functions'''
|
||||
import random, string, sys, os
|
||||
|
||||
from os import environ as env
|
||||
from datetime import datetime
|
||||
from pathlib import path
|
||||
from os.path import abspath, dirname, basename, isdir, isfile
|
||||
|
||||
from . import logging
|
||||
|
||||
|
||||
def Boolean(v, return_value=False):
|
||||
if type(v) not in [str, bool, int, type(None)]:
|
||||
raise ValueError(f'Value is not a string, boolean, int, or nonetype: {value}')
|
||||
|
||||
'''make the value lowercase if it's a string'''
|
||||
value = v.lower() if isinstance(v, str) else v
|
||||
|
||||
if value in [1, True, 'on', 'y', 'yes', 'true', 'enable']:
|
||||
'''convert string to True'''
|
||||
return True
|
||||
|
||||
elif value in [0, False, None, 'off', 'n', 'no', 'false', 'disable', '']:
|
||||
'''convert string to False'''
|
||||
return False
|
||||
|
||||
elif return_value:
|
||||
'''just return the value'''
|
||||
return v
|
||||
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def RandomGen(chars=20):
|
||||
if not isinstance(chars, int):
|
||||
raise TypeError(f'Character length must be an integer, not a {type(char)}')
|
||||
|
||||
return ''.join(random.choices(string.ascii_letters + string.digits, k=chars))
|
||||
|
||||
|
||||
def FormatUtc(timestamp=None):
|
||||
date = datetime.fromtimestamp(timestamp) if timestamp else datetime.utcnow()
|
||||
return date.strftime('%a, %d %b %Y %H:%M:%S GMT')
|
190
uncia/Lib/IzzyLib/template.py
Normal file
190
uncia/Lib/IzzyLib/template.py
Normal file
|
@ -0,0 +1,190 @@
|
|||
'''functions for web template management and rendering'''
|
||||
import codecs, traceback, os, json, aiohttp, xml
|
||||
|
||||
from os import listdir, makedirs
|
||||
from os.path import isfile, isdir, getmtime, abspath
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader, ChoiceLoader, select_autoescape, Markup
|
||||
from sanic import response as Response
|
||||
from hamlpy.hamlpy import Compiler
|
||||
from markdown import markdown
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
|
||||
from . import logging
|
||||
from .color import *
|
||||
|
||||
|
||||
class Template(Environment):
|
||||
def __init__(self, build={}, search=[], global_vars={}, autoescape=None):
|
||||
self.autoescape = ['html', 'css'] if not autoescape else autoescape
|
||||
self.search = []
|
||||
self.build = {}
|
||||
|
||||
for source, dest in build.items():
|
||||
self.__addBuildPath(source, dest)
|
||||
|
||||
for path in search:
|
||||
self.__addSearchPath(path)
|
||||
|
||||
self.var = {
|
||||
'markdown': markdown,
|
||||
'markup': Markup,
|
||||
'cleanhtml': remove_tags,
|
||||
'lighten': lighten,
|
||||
'darken': darken,
|
||||
'saturate': saturate,
|
||||
'desaturate': desaturate,
|
||||
'rgba': rgba
|
||||
}
|
||||
|
||||
self.var.update(global_vars)
|
||||
|
||||
super().__init__(
|
||||
loader=ChoiceLoader([FileSystemLoader(path) for path in self.search]),
|
||||
autoescape=select_autoescape(self.autoescape),
|
||||
lstrip_blocks=True,
|
||||
trim_blocks=True
|
||||
)
|
||||
|
||||
|
||||
def __addSearchPath(self, path):
|
||||
tplPath = abspath(str(path))
|
||||
|
||||
if tplPath not in self.search:
|
||||
self.search.append(tplPath)
|
||||
|
||||
|
||||
def __addBuildPath(self, source, destination):
|
||||
src = abspath(str(source))
|
||||
dest = abspath(str(destination))
|
||||
|
||||
if not isdir(src):
|
||||
raise FileNotFoundError('Source path doesn\'t exist: {src}')
|
||||
|
||||
self.build[src] = dest
|
||||
self.__addSearchPath(dest)
|
||||
|
||||
|
||||
def addEnv(self, k, v):
|
||||
self.var[k] = v
|
||||
|
||||
|
||||
def delEnv(self, var):
|
||||
if not self.var.get(var):
|
||||
raise ValueError(f'"{var}" not in global variables')
|
||||
|
||||
del self.var[var]
|
||||
|
||||
|
||||
def render(self, tplfile, context, request=None, headers={}, cookies={}, **kwargs):
|
||||
if not isinstance(context, dict):
|
||||
raise TypeError(f'context for {tplfile} not a dict')
|
||||
|
||||
data = global_variables.copy()
|
||||
data['request'] = request if request else {'headers': headers, 'cookies': cookies}
|
||||
data.update(context)
|
||||
|
||||
return env.get_template(tplfile).render(data)
|
||||
|
||||
|
||||
def response(self, *args, ctype='text/html', status=200, headers={}, **kwargs):
|
||||
html = self.render(*args, **kwargs)
|
||||
return Response.HTTPResponse(body=html, status=status, content_type=ctype, headers=headers)
|
||||
|
||||
|
||||
def buildTemplates(self, src=None):
|
||||
paths = {src: self.search.get(src)} if src else self.search
|
||||
|
||||
for src, dest in paths.items():
|
||||
timefile = f'{dest}/times.json'
|
||||
updated = False
|
||||
|
||||
if not isdir(f'{dest}'):
|
||||
makedirs(f'{dest}')
|
||||
|
||||
if isfile(timefile):
|
||||
try:
|
||||
times = json.load(open(timefile))
|
||||
|
||||
except:
|
||||
times = {}
|
||||
|
||||
else:
|
||||
times = {}
|
||||
|
||||
for filename in listdir(src):
|
||||
fullPath = f'{src}/{filename}'
|
||||
modtime = getmtime(fullPath)
|
||||
base, ext = filename.split('.', 1)
|
||||
|
||||
if ext != 'haml':
|
||||
pass
|
||||
|
||||
elif base not in times or times.get(base) != modtime:
|
||||
updated = True
|
||||
logging.verbose(f"Template '{filename}' was changed. Building...")
|
||||
|
||||
try:
|
||||
destination = f'{dest}/{base}.html'
|
||||
haml_lines = codecs.open(fullPath, 'r', encoding='utf-8').read().splitlines()
|
||||
|
||||
compiler = Compiler()
|
||||
output = compiler.process_lines(haml_lines)
|
||||
outfile = codecs.open(destination, 'w', encoding='utf-8')
|
||||
outfile.write(output)
|
||||
|
||||
logging.info(f"Template '{filename}' has been built")
|
||||
|
||||
except Exception as e:
|
||||
'''I'm actually not sure what sort of errors can happen here, so generic catch-all for now'''
|
||||
traceback.print_exc()
|
||||
logging.error(f'Failed to build {filename}: {e}')
|
||||
|
||||
times[base] = modtime
|
||||
|
||||
if updated:
|
||||
with open(timefile, 'w') as filename:
|
||||
filename.write(json.dumps(times))
|
||||
|
||||
|
||||
def remove_tags(self, text):
|
||||
return ''.join(xml.etree.ElementTree.fromstring(text).itertext())
|
||||
|
||||
|
||||
def setupWatcher(self):
|
||||
watchPaths = [path['source'] for k, path in build_path_pairs.items()]
|
||||
logging.info('Starting template watcher')
|
||||
observer = Observer()
|
||||
|
||||
for tplpath in watchPaths:
|
||||
logging.debug(f'Watching template dir for changes: {tplpath}')
|
||||
observer.schedule(templateWatchHandler(), tplpath, recursive=True)
|
||||
|
||||
self.watcher = observer
|
||||
|
||||
|
||||
def startWatcher(self):
|
||||
if not self.watcher:
|
||||
self.setupWatcher()
|
||||
|
||||
self.watcher.start()
|
||||
|
||||
|
||||
def stopWatcher(self, destroy=False):
|
||||
self.watcher.stop()
|
||||
|
||||
if destroy:
|
||||
self.watcher = None
|
||||
|
||||
|
||||
class TemplateWatchHandler(FileSystemEventHandler):
|
||||
def on_any_event(self, event):
|
||||
filename, ext = os.path.splitext(os.path.relpath(event.src_path))
|
||||
|
||||
if event.event_type in ['modified', 'created'] and ext[1:] == 'haml':
|
||||
logging.info('Rebuilding templates')
|
||||
buildTemplates()
|
||||
|
||||
|
||||
__all__ = ['addSearchPath', 'delSearchPath', 'addBuildPath', 'delSearchPath', 'addEnv', 'delEnv', 'setup', 'renderTemplate', 'aiohttp', 'buildTemplates', 'templateWatcher']
|
Loading…
Reference in a new issue