Add elwig-backend
This commit is contained in:
341
src/elwig-backend
Executable file
341
src/elwig-backend
Executable file
@ -0,0 +1,341 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import Callable, Optional
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
import argparse
|
||||
import datetime
|
||||
import traceback
|
||||
import re
|
||||
import base64
|
||||
import json
|
||||
import sqlite3
|
||||
import urllib.parse
|
||||
import gzip
|
||||
|
||||
|
||||
CNX: sqlite3.Cursor
|
||||
USER_FILE: str
|
||||
|
||||
|
||||
class BadRequestError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Filter:
|
||||
def __init__(self, name: str, value: int or str = None):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def is_int(self) -> bool:
|
||||
return type(self.value) is int
|
||||
|
||||
def is_str(self) -> bool:
|
||||
return type(self.value) is str
|
||||
|
||||
def is_single(self) -> bool:
|
||||
return self.value is None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if self.is_single():
|
||||
return self.name
|
||||
elif self.name == 'kgnr':
|
||||
return f'kgnr={self.value:05}'
|
||||
return f'{self.name}={self.value}'
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.__repr__()
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
return self.__repr__() == other.__repr__()
|
||||
|
||||
@staticmethod
|
||||
def from_str(string: str) -> Filter:
|
||||
f = string.split('=', 1)
|
||||
if len(f) == 2:
|
||||
return Filter(f[0], int(f[1]) if f[1].isdigit() else f[1])
|
||||
return Filter(f[0])
|
||||
|
||||
|
||||
def sqlite_regexp(pattern: str, value: Optional[str]) -> Optional[bool]:
|
||||
return re.match(pattern, value) is not None if value is not None else None
|
||||
|
||||
|
||||
def kmw_to_oe(kmw: float) -> float:
|
||||
return kmw * (4.54 + 0.022 * kmw) if kmw is not None else None
|
||||
|
||||
|
||||
def jdmp(value, is_bool: bool = False) -> str:
|
||||
if is_bool and value:
|
||||
return ' true'
|
||||
elif is_bool and not value:
|
||||
return 'false'
|
||||
return json.dumps(value, ensure_ascii=False)
|
||||
|
||||
|
||||
def get_delivery_schedule_filter_clauses(filters: list[Filter]) -> list[str]:
|
||||
clauses = []
|
||||
for f in filters:
|
||||
if f.name == 'year' and f.is_int():
|
||||
clauses.append(f"s.year = {f.value}")
|
||||
elif f.name == 'sortid' and f.is_str() and len(f.value) == 2 and f.value.isalpha() and f.value.isupper():
|
||||
clauses.append(f"v.sortid = '{f.value}'")
|
||||
elif f.name == 'date' and f.is_str() and re.match(r'[0-9]{4}-[0-9]{2}-[0-9]{2}', f.value) is not None:
|
||||
clauses.append(f"s.date = '{f.value}'")
|
||||
else:
|
||||
raise BadRequestError(f"Invalid filter '{f}'")
|
||||
return clauses
|
||||
|
||||
|
||||
class ElwigApi(BaseHTTPRequestHandler):
|
||||
def send(self, data: str, status_code: int = 200):
|
||||
raw = data.encode('utf-8')
|
||||
self.send_response(status_code)
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.send_header('Access-Control-Allow-Headers', 'Authorization')
|
||||
self.send_header('Access-Control-Allow-Methods', 'HEAD, GET, OPTIONS')
|
||||
if self.headers.get('Accept-Encoding') and len(data) > 64:
|
||||
accept_encoding = [e.strip() for e in self.headers.get('Accept-Encoding').split(',')]
|
||||
if 'gzip' in accept_encoding:
|
||||
raw = gzip.compress(raw)
|
||||
self.send_header('Content-Encoding', 'gzip')
|
||||
self.send_header('Content-Type', 'application/json; charset=UTF-8')
|
||||
self.send_header('Content-Length', str(len(raw)))
|
||||
self.end_headers()
|
||||
if self.request.type != 'HEAD' and self.request.type != 'OPTIONS':
|
||||
self.wfile.write(raw)
|
||||
|
||||
def error(self, status_code: int, message: str = None):
|
||||
self.send(f'{{"message":{jdmp(message)}}}\n', status_code=status_code)
|
||||
|
||||
def authorize(self) -> (str or None, str or None):
|
||||
try:
|
||||
auth = self.headers.get('Authorization')
|
||||
if auth is None or not auth.startswith('Basic '):
|
||||
self.error(401, 'Unauthorized')
|
||||
return None, None
|
||||
auth = base64.b64decode(auth[6:]).split(b':', 1)
|
||||
if len(auth) != 2:
|
||||
self.error(401, 'Invalid Authorization header')
|
||||
return None, None
|
||||
username, password = auth[0].decode('utf-8'), auth[1].decode('utf-8')
|
||||
except:
|
||||
self.error(401, 'Invalid Authorization header')
|
||||
return None, None
|
||||
with open(USER_FILE, 'r') as file:
|
||||
for line in file:
|
||||
(u, r, p) = line.strip().split(':', 2)
|
||||
if u == username:
|
||||
if p == password:
|
||||
return u, r
|
||||
else:
|
||||
self.error(401, 'Unauthorized')
|
||||
return None, None
|
||||
|
||||
def exec_collection(self, sql_query: str, fmt: Callable, filters: list[Filter],
|
||||
offset: int = None, limit: int = None,
|
||||
distinct: tuple[[str], [int]] = None, sub_fmt: Callable = None):
|
||||
with_clause = re.findall(r'(WITH .*?\))[\s\n]*SELECT', sql_query, flags=re.DOTALL)
|
||||
if len(with_clause) > 0 and '.*' not in with_clause[0]:
|
||||
with_clause = with_clause[0]
|
||||
count_query = sql_query.replace(with_clause, '')
|
||||
else:
|
||||
with_clause = None
|
||||
count_query = sql_query
|
||||
count = f"""SELECT COUNT(DISTINCT {" || '|' || ".join(distinct[0])}) FROM""" if distinct else "SELECT COUNT(*) FROM"
|
||||
count_query = re.sub(r"SELECT [^*]+? FROM", count, count_query, count=1, flags=re.DOTALL)
|
||||
count_query = re.sub(r"(OFFSET|LIMIT) [0-9-]+", '', count_query)
|
||||
count_query = re.sub(r"GROUP BY .*", '', count_query)
|
||||
if with_clause:
|
||||
count_query = with_clause + ' ' + count_query
|
||||
count = CNX.execute(count_query).fetchone()
|
||||
count = count[0] if count is not None else 0
|
||||
|
||||
if limit is not None:
|
||||
if "LIMIT " in sql_query:
|
||||
sql_query = re.sub(r"LIMIT [0-9-]+", f"LIMIT {limit}", sql_query)
|
||||
else:
|
||||
sql_query += f" LIMIT {limit}"
|
||||
if offset is not None:
|
||||
if "OFFSET " in sql_query:
|
||||
sql_query = re.sub(r"OFFSET [0-9-]+", f"OFFSET {offset}", sql_query)
|
||||
else:
|
||||
sql_query += f" OFFSET {offset}"
|
||||
rows = CNX.execute(sql_query)
|
||||
|
||||
data = (f'''{{"filters":[{','.join(f'{{"filter":{jdmp(str(f))}}}' for f in filters)}],'''
|
||||
f'"total":{count},"offset":{offset or 0},"limit":{jdmp(limit)},'
|
||||
f'"data":[')
|
||||
first, first_, cur, last = True, True, None, None
|
||||
for r in rows or []:
|
||||
cur = tuple([r[i] for i in distinct[1]]) if distinct else None
|
||||
if not distinct or cur != last:
|
||||
first_ = True
|
||||
if first:
|
||||
first = False
|
||||
else:
|
||||
if distinct and sub_fmt:
|
||||
data += '\n ]}'
|
||||
data += ','
|
||||
data += f'\n ' + fmt(r)
|
||||
if distinct and sub_fmt:
|
||||
if first_:
|
||||
data += '['
|
||||
first_ = False
|
||||
else:
|
||||
data += ','
|
||||
data += f'\n ' + sub_fmt(r)
|
||||
last = cur
|
||||
if distinct and sub_fmt and not first:
|
||||
data += '\n ]}'
|
||||
data += '\n]}\n'
|
||||
self.send(data)
|
||||
|
||||
def do_GET_delivery_schedules(self, filters: list[Filter], offset: int = None, limit: int = None, order: str = None):
|
||||
clauses = get_delivery_schedule_filter_clauses(filters)
|
||||
sql = f"""
|
||||
WITH announcements
|
||||
AS (SELECT year, dsnr, SUM(weight) AS weight
|
||||
FROM delivery_announcement
|
||||
GROUP BY year, dsnr)
|
||||
SELECT s.year, s.dsnr, s.date, s.description, s.max_weight, s.cancelled,
|
||||
COALESCE(a.weight, 0) AS announced_weight,
|
||||
COALESCE(SUM(p.weight), 0) AS delivered_weight,
|
||||
STRFTIME('%Y-%m-%dT%H:%M:%SZ', DATETIME(s.ancmt_from, 'unixepoch')),
|
||||
STRFTIME('%Y-%m-%dT%H:%M:%SZ', DATETIME(s.ancmt_to, 'unixepoch')),
|
||||
b.zwstid, b.name,
|
||||
s.attrid, s.cultid
|
||||
FROM delivery_schedule s
|
||||
LEFT JOIN branch b ON b.zwstid = s.zwstid
|
||||
LEFT JOIN announcements a ON (a.year, a.dsnr) = (s.year, s.dsnr)
|
||||
LEFT JOIN delivery_schedule_wine_variety v ON (v.year, v.dsnr) = (s.year, s.dsnr)
|
||||
LEFT JOIN delivery d ON (d.date, d.zwstid) = (s.date, s.zwstid)
|
||||
LEFT JOIN delivery_part p ON (p.year, p.did) = (d.year, d.did) AND p.sortid = v.sortid
|
||||
"""
|
||||
if len(clauses) > 0:
|
||||
sql += f" WHERE {' AND '.join(clauses)}"
|
||||
sql += " GROUP BY s.year, s.dsnr"
|
||||
sql += " ORDER BY s.year, s.date, s.zwstid, s.description, s.dsnr"
|
||||
|
||||
rows1 = CNX.execute("""
|
||||
SELECT date, zwstid, cultid, SUM(weight)
|
||||
FROM delivery d
|
||||
JOIN delivery_part p ON (p.year, p.did) = (d.year, d.did)
|
||||
WHERE (d.date, d.zwstid, COALESCE(p.cultid, '')) IN
|
||||
(SELECT date, zwstid, COALESCE(cultid, '') FROM delivery_schedule GROUP BY date, zwstid, cultid HAVING COUNT(*) = 1)
|
||||
GROUP BY date, zwstid, cultid
|
||||
""")
|
||||
days1 = {(r[0], r[1], r[2]): r[3] for r in rows1}
|
||||
rows2 = CNX.execute("""
|
||||
SELECT date, zwstid, attrid, SUM(weight)
|
||||
FROM delivery d
|
||||
JOIN delivery_part p ON (p.year, p.did) = (d.year, d.did)
|
||||
WHERE (d.date, d.zwstid, COALESCE(p.attrid, '')) IN
|
||||
(SELECT date, zwstid, COALESCE(attrid, '') FROM delivery_schedule GROUP BY date, zwstid, attrid HAVING COUNT(*) = 1)
|
||||
GROUP BY date, zwstid, attrid
|
||||
""")
|
||||
days2 = {(r[0], r[1], r[2]): r[3] for r in rows2}
|
||||
|
||||
self.exec_collection(
|
||||
sql,
|
||||
lambda r: f'{{"year":{r[0]:4},"dsnr":{r[1]:2},"date":"{r[2]}",'
|
||||
f'"branch":{{"zwstid":{jdmp(r[10])},"name":{jdmp(r[11]):20}}},'
|
||||
f'"description":{jdmp(r[3]):50},'
|
||||
f'"max_weight":{jdmp(r[4]):6},'
|
||||
f'"is_cancelled":{jdmp(r[5], is_bool=True)},'
|
||||
f'"announced_weight":{r[6]:6},'
|
||||
f'"delivered_weight":{days1.get((r[2], r[10], r[13]), days2.get((r[2], r[10], r[12]), r[7] or 0)):6},'
|
||||
f'"announcement_from":{jdmp(r[8])},'
|
||||
f'"announcement_to":{jdmp(r[9])}}}',
|
||||
filters, offset, limit, distinct=(['s.year', 's.dsnr'], [1, 2]))
|
||||
|
||||
def do_HEAD(self):
|
||||
self.do_GET()
|
||||
|
||||
def do_OPTIONS(self):
|
||||
self.send('')
|
||||
|
||||
def do_GET(self):
|
||||
try:
|
||||
username, role = self.authorize()
|
||||
if not username or not role:
|
||||
return
|
||||
|
||||
parts = self.path.split('?', 1)
|
||||
if len(parts) == 1:
|
||||
path, query = parts[0], {}
|
||||
else:
|
||||
path, query = parts[0], {urllib.parse.unquote(s[0]): urllib.parse.unquote(s[-1])
|
||||
for s in [p.split('=', 1) for p in parts[1].split('&')]}
|
||||
filters = [Filter.from_str(f) for f in (query['filters'].split(',') if 'filters' in query else [])]
|
||||
offset = int(query['offset']) if 'offset' in query else None
|
||||
limit = int(query['limit']) if 'limit' in query else None
|
||||
order = query['order'] if 'order' in query else None
|
||||
|
||||
if path == '/wine/varieties':
|
||||
self.exec_collection(
|
||||
"SELECT sortid, type, name, comment FROM wine_variety",
|
||||
lambda r: f'{{"sortid":"{jdmp(r[0])}","type":"{jdmp(r[1])}","name":"{jdmp(r[2])}","comment":{jdmp(r[3])}}}',
|
||||
filters)
|
||||
elif path == '/wine/quality_levels':
|
||||
self.exec_collection(
|
||||
"SELECT qualid, name, min_kmw, predicate FROM wine_quality_level",
|
||||
lambda r: f'{{"qualid":"{jdmp(r[0])}","name":"{jdmp(r[1])}","min_kmw":{jdmp(r[2])},"is_predicate":{jdmp(r[3], is_bool=True)}}}',
|
||||
filters)
|
||||
elif path == '/wine/attributes':
|
||||
self.exec_collection(
|
||||
"SELECT attrid, name FROM wine_attribute",
|
||||
lambda r: f'{{"attrid":"{jdmp(r[0])}","name":"{jdmp(r[1])}"}}',
|
||||
filters)
|
||||
elif path == '/wine/cultivations':
|
||||
self.exec_collection(
|
||||
"SELECT cultid, name, description FROM wine_cultivation",
|
||||
lambda r: f'{{"cultid":"{jdmp(r[0])}","name":"{jdmp(r[1])}","description":{jdmp(r[2])}}}',
|
||||
filters)
|
||||
elif path == '/modifiers':
|
||||
self.exec_collection(
|
||||
"SELECT year, modid, name, ordering FROM modifier",
|
||||
lambda r: f'{{"year":{jdmp(r[0])},"modid":"{jdmp(r[1])}","name":"{jdmp(r[2])}","ordering":{jdmp(r[3])}}}',
|
||||
filters)
|
||||
elif path == '/delivery_schedules':
|
||||
self.do_GET_delivery_schedules(filters, offset, limit, order)
|
||||
else:
|
||||
self.error(404, 'Invalid path')
|
||||
except BadRequestError as e:
|
||||
self.error(400, str(e))
|
||||
except Exception as e:
|
||||
traceback.print_exception(e)
|
||||
self.error(500, str(e))
|
||||
|
||||
|
||||
def main() -> None:
|
||||
global CNX
|
||||
global USER_FILE
|
||||
|
||||
sqlite3.register_adapter(datetime.date, lambda d: d.strftime('%Y-%m-%d'))
|
||||
sqlite3.register_adapter(datetime.time, lambda t: t.strftime('%H:%M:%S'))
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('db', type=str, metavar='DB')
|
||||
parser.add_argument('userfile', type=str, metavar='USER-FILE')
|
||||
parser.add_argument('-p', '--port', type=int, default=8080)
|
||||
args = parser.parse_args()
|
||||
|
||||
USER_FILE = args.userfile
|
||||
CNX = sqlite3.connect(f'file:{args.db}?mode=ro', uri=True)
|
||||
CNX.create_function('REGEXP', 2, sqlite_regexp, deterministic=True)
|
||||
|
||||
server = HTTPServer(('localhost', args.port), ElwigApi)
|
||||
print(f'Listening on http://localhost:{args.port}')
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
server.server_close()
|
||||
print('Good bye!')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
Reference in New Issue
Block a user