2021-08-30 19:55:02 +02:00
|
|
|
import asyncio
|
2021-08-01 15:30:27 +02:00
|
|
|
import datetime
|
2021-11-15 13:11:42 +01:00
|
|
|
import os
|
2022-11-25 14:53:03 +01:00
|
|
|
import re
|
2021-11-15 13:11:42 +01:00
|
|
|
import time
|
2021-03-26 23:10:30 +01:00
|
|
|
from contextlib import asynccontextmanager
|
2021-11-15 13:11:42 +01:00
|
|
|
from typing import Optional
|
|
|
|
|
2022-07-07 14:30:16 +02:00
|
|
|
from loguru import logger
|
2021-08-30 19:55:02 +02:00
|
|
|
from sqlalchemy import create_engine
|
|
|
|
from sqlalchemy_aio.base import AsyncConnection
|
2023-01-09 11:14:44 +01:00
|
|
|
from sqlalchemy_aio.strategy import ASYNCIO_STRATEGY
|
2019-12-13 18:14:25 +01:00
|
|
|
|
2022-10-03 16:46:46 +02:00
|
|
|
from lnbits.settings import settings
|
2019-12-13 18:14:25 +01:00
|
|
|
|
2021-06-22 04:22:52 +02:00
|
|
|
POSTGRES = "POSTGRES"
|
2021-07-02 23:32:58 +02:00
|
|
|
COCKROACH = "COCKROACH"
|
2021-06-22 04:22:52 +02:00
|
|
|
SQLITE = "SQLITE"
|
2019-12-14 02:59:35 +01:00
|
|
|
|
2021-06-22 04:22:52 +02:00
|
|
|
|
|
|
|
class Compat:
|
2021-07-01 18:09:02 +02:00
|
|
|
type: Optional[str] = "<inherited>"
|
|
|
|
schema: Optional[str] = "<inherited>"
|
2021-06-22 04:22:52 +02:00
|
|
|
|
|
|
|
def interval_seconds(self, seconds: int) -> str:
|
2021-07-02 23:32:58 +02:00
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
2021-06-22 04:22:52 +02:00
|
|
|
return f"interval '{seconds} seconds'"
|
|
|
|
elif self.type == SQLITE:
|
|
|
|
return f"{seconds}"
|
|
|
|
return "<nothing>"
|
|
|
|
|
2022-12-02 17:38:36 +01:00
|
|
|
def datetime_to_timestamp(self, date: datetime.datetime):
|
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
|
|
|
return date.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
elif self.type == SQLITE:
|
|
|
|
return time.mktime(date.timetuple())
|
|
|
|
return "<nothing>"
|
|
|
|
|
2021-06-22 04:22:52 +02:00
|
|
|
@property
|
|
|
|
def timestamp_now(self) -> str:
|
2021-07-02 23:32:58 +02:00
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
2021-06-22 04:22:52 +02:00
|
|
|
return "now()"
|
|
|
|
elif self.type == SQLITE:
|
|
|
|
return "(strftime('%s', 'now'))"
|
|
|
|
return "<nothing>"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def serial_primary_key(self) -> str:
|
2021-07-02 23:32:58 +02:00
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
2021-06-22 04:22:52 +02:00
|
|
|
return "SERIAL PRIMARY KEY"
|
|
|
|
elif self.type == SQLITE:
|
|
|
|
return "INTEGER PRIMARY KEY AUTOINCREMENT"
|
|
|
|
return "<nothing>"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def references_schema(self) -> str:
|
2021-07-02 23:32:58 +02:00
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
2021-06-22 04:22:52 +02:00
|
|
|
return f"{self.schema}."
|
|
|
|
elif self.type == SQLITE:
|
|
|
|
return ""
|
|
|
|
return "<nothing>"
|
|
|
|
|
2022-10-07 14:23:57 +02:00
|
|
|
@property
|
|
|
|
def big_int(self) -> str:
|
|
|
|
if self.type in {POSTGRES}:
|
|
|
|
return "BIGINT"
|
|
|
|
return "INT"
|
|
|
|
|
2021-06-22 04:22:52 +02:00
|
|
|
|
|
|
|
class Connection(Compat):
|
|
|
|
def __init__(self, conn: AsyncConnection, txn, typ, name, schema):
|
2021-03-26 23:10:30 +01:00
|
|
|
self.conn = conn
|
2021-06-22 04:22:52 +02:00
|
|
|
self.txn = txn
|
|
|
|
self.type = typ
|
|
|
|
self.name = name
|
|
|
|
self.schema = schema
|
|
|
|
|
|
|
|
def rewrite_query(self, query) -> str:
|
2021-07-02 23:32:58 +02:00
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
2021-06-22 04:22:52 +02:00
|
|
|
query = query.replace("%", "%%")
|
|
|
|
query = query.replace("?", "%s")
|
2022-11-25 15:11:58 +01:00
|
|
|
return query
|
2022-11-25 14:53:03 +01:00
|
|
|
|
2022-11-25 15:11:58 +01:00
|
|
|
def rewrite_values(self, values):
|
|
|
|
# strip html
|
|
|
|
CLEANR = re.compile("<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});")
|
2022-11-25 14:53:03 +01:00
|
|
|
|
2022-11-25 15:11:58 +01:00
|
|
|
def cleanhtml(raw_html):
|
2022-11-25 15:20:39 +01:00
|
|
|
if isinstance(raw_html, str):
|
2022-11-25 14:53:03 +01:00
|
|
|
cleantext = re.sub(CLEANR, "", raw_html)
|
|
|
|
return cleantext
|
2022-11-25 15:11:58 +01:00
|
|
|
else:
|
|
|
|
return raw_html
|
2022-11-25 14:53:03 +01:00
|
|
|
|
2022-11-25 15:11:58 +01:00
|
|
|
# tuple to list and back to tuple
|
2022-11-30 21:08:23 +01:00
|
|
|
value_list = [values] if isinstance(values, str) else list(values)
|
|
|
|
values = tuple([cleanhtml(l) for l in value_list])
|
2022-11-25 15:11:58 +01:00
|
|
|
return values
|
2022-11-25 14:53:03 +01:00
|
|
|
|
2021-03-26 23:10:30 +01:00
|
|
|
async def fetchall(self, query: str, values: tuple = ()) -> list:
|
2022-11-25 15:11:58 +01:00
|
|
|
result = await self.conn.execute(
|
|
|
|
self.rewrite_query(query), self.rewrite_values(values)
|
|
|
|
)
|
2021-03-26 23:10:30 +01:00
|
|
|
return await result.fetchall()
|
|
|
|
|
|
|
|
async def fetchone(self, query: str, values: tuple = ()):
|
2022-11-25 15:11:58 +01:00
|
|
|
result = await self.conn.execute(
|
|
|
|
self.rewrite_query(query), self.rewrite_values(values)
|
|
|
|
)
|
2021-03-26 23:10:30 +01:00
|
|
|
row = await result.fetchone()
|
|
|
|
await result.close()
|
|
|
|
return row
|
|
|
|
|
|
|
|
async def execute(self, query: str, values: tuple = ()):
|
2022-11-25 15:11:58 +01:00
|
|
|
return await self.conn.execute(
|
|
|
|
self.rewrite_query(query), self.rewrite_values(values)
|
|
|
|
)
|
2021-03-26 23:10:30 +01:00
|
|
|
|
|
|
|
|
2021-06-22 04:22:52 +02:00
|
|
|
class Database(Compat):
|
2020-11-21 22:04:39 +01:00
|
|
|
def __init__(self, db_name: str):
|
2021-06-22 04:22:52 +02:00
|
|
|
self.name = db_name
|
|
|
|
|
2022-10-03 16:46:46 +02:00
|
|
|
if settings.lnbits_database_url:
|
|
|
|
database_uri = settings.lnbits_database_url
|
2021-07-02 23:32:58 +02:00
|
|
|
|
|
|
|
if database_uri.startswith("cockroachdb://"):
|
|
|
|
self.type = COCKROACH
|
|
|
|
else:
|
|
|
|
self.type = POSTGRES
|
2021-06-22 04:22:52 +02:00
|
|
|
|
2023-01-09 11:14:44 +01:00
|
|
|
import psycopg2
|
2021-07-01 18:09:02 +02:00
|
|
|
|
2021-11-15 13:11:42 +01:00
|
|
|
def _parse_timestamp(value, _):
|
2022-12-06 16:21:19 +01:00
|
|
|
if value is None:
|
|
|
|
return None
|
2021-11-15 13:11:42 +01:00
|
|
|
f = "%Y-%m-%d %H:%M:%S.%f"
|
|
|
|
if not "." in value:
|
|
|
|
f = "%Y-%m-%d %H:%M:%S"
|
|
|
|
return time.mktime(datetime.datetime.strptime(value, f).timetuple())
|
|
|
|
|
2021-07-02 23:32:58 +02:00
|
|
|
psycopg2.extensions.register_type(
|
|
|
|
psycopg2.extensions.new_type(
|
|
|
|
psycopg2.extensions.DECIMAL.values,
|
|
|
|
"DEC2FLOAT",
|
|
|
|
lambda value, curs: float(value) if value is not None else None,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
psycopg2.extensions.register_type(
|
|
|
|
psycopg2.extensions.new_type(
|
2021-08-12 03:53:34 +02:00
|
|
|
(1082, 1083, 1266),
|
2021-07-02 23:32:58 +02:00
|
|
|
"DATE2INT",
|
|
|
|
lambda value, curs: time.mktime(value.timetuple())
|
|
|
|
if value is not None
|
|
|
|
else None,
|
|
|
|
)
|
2021-06-22 04:22:52 +02:00
|
|
|
)
|
2021-08-01 15:30:27 +02:00
|
|
|
|
|
|
|
psycopg2.extensions.register_type(
|
|
|
|
psycopg2.extensions.new_type(
|
2022-12-02 17:38:36 +01:00
|
|
|
(1184, 1114), "TIMESTAMP2INT", _parse_timestamp
|
2021-08-01 15:30:27 +02:00
|
|
|
)
|
|
|
|
)
|
2021-06-22 04:22:52 +02:00
|
|
|
else:
|
2022-10-03 16:46:46 +02:00
|
|
|
if os.path.isdir(settings.lnbits_data_folder):
|
|
|
|
self.path = os.path.join(
|
|
|
|
settings.lnbits_data_folder, f"{self.name}.sqlite3"
|
|
|
|
)
|
2022-03-27 17:03:24 +02:00
|
|
|
database_uri = f"sqlite:///{self.path}"
|
|
|
|
self.type = SQLITE
|
|
|
|
else:
|
|
|
|
raise NotADirectoryError(
|
2022-10-03 16:46:46 +02:00
|
|
|
f"LNBITS_DATA_FOLDER named {settings.lnbits_data_folder} was not created"
|
|
|
|
f" - please 'mkdir {settings.lnbits_data_folder}' and try again"
|
2022-03-27 17:03:24 +02:00
|
|
|
)
|
2022-07-07 14:30:16 +02:00
|
|
|
logger.trace(f"database {self.type} added for {self.name}")
|
2021-06-22 04:22:52 +02:00
|
|
|
self.schema = self.name
|
|
|
|
if self.name.startswith("ext_"):
|
|
|
|
self.schema = self.name[4:]
|
|
|
|
else:
|
|
|
|
self.schema = None
|
|
|
|
|
2021-08-30 19:55:02 +02:00
|
|
|
self.engine = create_engine(database_uri, strategy=ASYNCIO_STRATEGY)
|
|
|
|
self.lock = asyncio.Lock()
|
2020-01-28 00:30:31 +01:00
|
|
|
|
2021-03-26 23:10:30 +01:00
|
|
|
@asynccontextmanager
|
|
|
|
async def connect(self):
|
|
|
|
await self.lock.acquire()
|
2020-09-02 03:36:52 +02:00
|
|
|
try:
|
2021-03-26 23:10:30 +01:00
|
|
|
async with self.engine.connect() as conn:
|
2021-06-22 04:22:52 +02:00
|
|
|
async with conn.begin() as txn:
|
|
|
|
wconn = Connection(conn, txn, self.type, self.name, self.schema)
|
|
|
|
|
|
|
|
if self.schema:
|
2021-07-02 23:32:58 +02:00
|
|
|
if self.type in {POSTGRES, COCKROACH}:
|
2021-06-22 04:22:52 +02:00
|
|
|
await wconn.execute(
|
|
|
|
f"CREATE SCHEMA IF NOT EXISTS {self.schema}"
|
|
|
|
)
|
|
|
|
elif self.type == SQLITE:
|
|
|
|
await wconn.execute(
|
|
|
|
f"ATTACH '{self.path}' AS {self.schema}"
|
|
|
|
)
|
|
|
|
|
|
|
|
yield wconn
|
2021-03-26 23:10:30 +01:00
|
|
|
finally:
|
|
|
|
self.lock.release()
|
2020-11-21 22:04:39 +01:00
|
|
|
|
|
|
|
async def fetchall(self, query: str, values: tuple = ()) -> list:
|
|
|
|
async with self.connect() as conn:
|
|
|
|
result = await conn.execute(query, values)
|
|
|
|
return await result.fetchall()
|
|
|
|
|
|
|
|
async def fetchone(self, query: str, values: tuple = ()):
|
|
|
|
async with self.connect() as conn:
|
|
|
|
result = await conn.execute(query, values)
|
|
|
|
row = await result.fetchone()
|
|
|
|
await result.close()
|
|
|
|
return row
|
|
|
|
|
|
|
|
async def execute(self, query: str, values: tuple = ()):
|
|
|
|
async with self.connect() as conn:
|
|
|
|
return await conn.execute(query, values)
|
|
|
|
|
2021-03-26 23:10:30 +01:00
|
|
|
@asynccontextmanager
|
|
|
|
async def reuse_conn(self, conn: Connection):
|
|
|
|
yield conn
|