2022-12-02 17:39:56 +01:00
|
|
|
import datetime
|
2022-12-06 10:48:34 +01:00
|
|
|
|
2022-12-06 10:48:16 +01:00
|
|
|
from loguru import logger
|
2023-01-09 11:14:44 +01:00
|
|
|
from sqlalchemy.exc import OperationalError
|
2020-09-04 03:39:52 +02:00
|
|
|
|
2022-10-05 14:17:23 +02:00
|
|
|
from lnbits import bolt11
|
2020-09-04 03:39:52 +02:00
|
|
|
|
|
|
|
|
2020-11-21 22:04:39 +01:00
|
|
|
async def m000_create_migrations_table(db):
|
|
|
|
await db.execute(
|
2020-09-04 03:39:52 +02:00
|
|
|
"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS dbversions (
|
2020-09-04 03:39:52 +02:00
|
|
|
db TEXT PRIMARY KEY,
|
|
|
|
version INT NOT NULL
|
|
|
|
)
|
|
|
|
"""
|
|
|
|
)
|
2020-04-16 15:23:38 +02:00
|
|
|
|
|
|
|
|
2020-11-21 22:04:39 +01:00
|
|
|
async def m001_initial(db):
|
2020-04-16 15:23:38 +02:00
|
|
|
"""
|
|
|
|
Initial LNbits tables.
|
|
|
|
"""
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute(
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS accounts (
|
2020-04-16 15:23:38 +02:00
|
|
|
id TEXT PRIMARY KEY,
|
|
|
|
email TEXT,
|
|
|
|
pass TEXT
|
|
|
|
);
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
|
|
|
)
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute(
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS extensions (
|
2021-06-22 04:22:52 +02:00
|
|
|
"user" TEXT NOT NULL,
|
2020-04-16 15:23:38 +02:00
|
|
|
extension TEXT NOT NULL,
|
2021-06-22 04:22:52 +02:00
|
|
|
active BOOLEAN DEFAULT false,
|
2020-04-16 15:23:38 +02:00
|
|
|
|
2021-06-22 04:22:52 +02:00
|
|
|
UNIQUE ("user", extension)
|
2020-04-16 15:23:38 +02:00
|
|
|
);
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
|
|
|
)
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute(
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS wallets (
|
2020-04-16 15:23:38 +02:00
|
|
|
id TEXT PRIMARY KEY,
|
|
|
|
name TEXT NOT NULL,
|
2021-06-22 04:22:52 +02:00
|
|
|
"user" TEXT NOT NULL,
|
2020-04-16 15:23:38 +02:00
|
|
|
adminkey TEXT NOT NULL,
|
|
|
|
inkey TEXT
|
|
|
|
);
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
|
|
|
)
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute(
|
2021-06-22 04:22:52 +02:00
|
|
|
f"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS apipayments (
|
2020-04-16 15:23:38 +02:00
|
|
|
payhash TEXT NOT NULL,
|
2022-11-16 23:06:21 +01:00
|
|
|
amount {db.big_int} NOT NULL,
|
2020-04-16 15:23:38 +02:00
|
|
|
fee INTEGER NOT NULL DEFAULT 0,
|
|
|
|
wallet TEXT NOT NULL,
|
|
|
|
pending BOOLEAN NOT NULL,
|
|
|
|
memo TEXT,
|
2021-06-22 04:22:52 +02:00
|
|
|
time TIMESTAMP NOT NULL DEFAULT {db.timestamp_now},
|
2020-04-16 15:23:38 +02:00
|
|
|
UNIQUE (wallet, payhash)
|
|
|
|
);
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
|
|
|
)
|
2020-08-19 18:53:27 +02:00
|
|
|
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute(
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
2021-11-15 22:45:13 +01:00
|
|
|
CREATE VIEW balances AS
|
2020-04-16 15:23:38 +02:00
|
|
|
SELECT wallet, COALESCE(SUM(s), 0) AS balance FROM (
|
|
|
|
SELECT wallet, SUM(amount) AS s -- incoming
|
|
|
|
FROM apipayments
|
2021-06-22 04:22:52 +02:00
|
|
|
WHERE amount > 0 AND pending = false -- don't sum pending
|
2020-04-16 15:23:38 +02:00
|
|
|
GROUP BY wallet
|
|
|
|
UNION ALL
|
|
|
|
SELECT wallet, SUM(amount + fee) AS s -- outgoing, sum fees
|
|
|
|
FROM apipayments
|
|
|
|
WHERE amount < 0 -- do sum pending
|
|
|
|
GROUP BY wallet
|
2021-06-22 04:22:52 +02:00
|
|
|
)x
|
2020-04-16 15:23:38 +02:00
|
|
|
GROUP BY wallet;
|
2020-04-17 21:13:57 +02:00
|
|
|
"""
|
|
|
|
)
|
2020-04-16 15:23:38 +02:00
|
|
|
|
2020-08-19 18:53:27 +02:00
|
|
|
|
2020-11-21 22:04:39 +01:00
|
|
|
async def m002_add_fields_to_apipayments(db):
|
2020-08-19 18:53:27 +02:00
|
|
|
"""
|
2020-09-01 03:12:46 +02:00
|
|
|
Adding fields to apipayments for better accounting,
|
|
|
|
and renaming payhash to checking_id since that is what it really is.
|
|
|
|
"""
|
2020-09-04 03:39:52 +02:00
|
|
|
try:
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute("ALTER TABLE apipayments RENAME COLUMN payhash TO checking_id")
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN hash TEXT")
|
|
|
|
await db.execute("CREATE INDEX by_hash ON apipayments (hash)")
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN preimage TEXT")
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN bolt11 TEXT")
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN extra TEXT")
|
2020-09-02 17:44:54 +02:00
|
|
|
|
2020-09-04 03:39:52 +02:00
|
|
|
import json
|
2020-09-02 17:44:54 +02:00
|
|
|
|
2020-11-21 22:04:39 +01:00
|
|
|
rows = await (await db.execute("SELECT * FROM apipayments")).fetchall()
|
2020-09-04 03:39:52 +02:00
|
|
|
for row in rows:
|
|
|
|
if not row["memo"] or not row["memo"].startswith("#"):
|
|
|
|
continue
|
2020-04-16 15:23:38 +02:00
|
|
|
|
2020-09-04 03:39:52 +02:00
|
|
|
for ext in ["withdraw", "events", "lnticket", "paywall", "tpos"]:
|
|
|
|
prefix = "#" + ext + " "
|
|
|
|
if row["memo"].startswith(prefix):
|
|
|
|
new = row["memo"][len(prefix) :]
|
2020-11-21 22:04:39 +01:00
|
|
|
await db.execute(
|
2020-09-04 03:39:52 +02:00
|
|
|
"""
|
|
|
|
UPDATE apipayments SET extra = ?, memo = ?
|
|
|
|
WHERE checking_id = ? AND memo = ?
|
|
|
|
""",
|
2021-03-24 04:40:32 +01:00
|
|
|
(
|
|
|
|
json.dumps({"tag": ext}),
|
|
|
|
new,
|
|
|
|
row["checking_id"],
|
|
|
|
row["memo"],
|
|
|
|
),
|
2020-09-04 03:39:52 +02:00
|
|
|
)
|
|
|
|
break
|
2020-11-21 22:04:39 +01:00
|
|
|
except OperationalError:
|
2020-09-04 03:39:52 +02:00
|
|
|
# this is necessary now because it may be the case that this migration will
|
|
|
|
# run twice in some environments.
|
|
|
|
# catching errors like this won't be necessary in anymore now that we
|
|
|
|
# keep track of db versions so no migration ever runs twice.
|
|
|
|
pass
|
2020-12-24 13:38:35 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def m003_add_invoice_webhook(db):
|
|
|
|
"""
|
|
|
|
Special column for webhook endpoints that can be assigned
|
|
|
|
to each different invoice.
|
|
|
|
"""
|
|
|
|
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN webhook TEXT")
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN webhook_status TEXT")
|
2021-04-01 01:09:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def m004_ensure_fees_are_always_negative(db):
|
|
|
|
"""
|
|
|
|
Use abs() so wallet backends don't have to care about the sign of the fees.
|
|
|
|
"""
|
|
|
|
|
|
|
|
await db.execute("DROP VIEW balances")
|
|
|
|
await db.execute(
|
|
|
|
"""
|
2021-11-15 22:45:13 +01:00
|
|
|
CREATE VIEW balances AS
|
2021-04-01 01:09:33 +02:00
|
|
|
SELECT wallet, COALESCE(SUM(s), 0) AS balance FROM (
|
|
|
|
SELECT wallet, SUM(amount) AS s -- incoming
|
|
|
|
FROM apipayments
|
2021-06-22 04:22:52 +02:00
|
|
|
WHERE amount > 0 AND pending = false -- don't sum pending
|
2021-04-01 01:09:33 +02:00
|
|
|
GROUP BY wallet
|
|
|
|
UNION ALL
|
|
|
|
SELECT wallet, SUM(amount - abs(fee)) AS s -- outgoing, sum fees
|
|
|
|
FROM apipayments
|
|
|
|
WHERE amount < 0 -- do sum pending
|
|
|
|
GROUP BY wallet
|
2021-06-22 04:22:52 +02:00
|
|
|
)x
|
2021-04-01 01:09:33 +02:00
|
|
|
GROUP BY wallet;
|
|
|
|
"""
|
|
|
|
)
|
2021-04-17 23:27:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def m005_balance_check_balance_notify(db):
|
|
|
|
"""
|
|
|
|
Keep track of balanceCheck-enabled lnurl-withdrawals to be consumed by an LNbits wallet and of balanceNotify URLs supplied by users to empty their wallets.
|
|
|
|
"""
|
|
|
|
|
|
|
|
await db.execute(
|
|
|
|
"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS balance_check (
|
2021-06-22 04:22:52 +02:00
|
|
|
wallet TEXT NOT NULL REFERENCES wallets (id),
|
2021-04-17 23:27:15 +02:00
|
|
|
service TEXT NOT NULL,
|
|
|
|
url TEXT NOT NULL,
|
|
|
|
|
|
|
|
UNIQUE(wallet, service)
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
|
|
|
|
await db.execute(
|
|
|
|
"""
|
2021-11-09 16:17:44 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS balance_notify (
|
2021-06-22 04:22:52 +02:00
|
|
|
wallet TEXT NOT NULL REFERENCES wallets (id),
|
2021-04-17 23:27:15 +02:00
|
|
|
url TEXT NOT NULL,
|
|
|
|
|
|
|
|
UNIQUE(wallet, url)
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2022-12-08 14:41:52 +01:00
|
|
|
|
|
|
|
|
2022-10-05 14:17:23 +02:00
|
|
|
async def m006_add_invoice_expiry_to_apipayments(db):
|
|
|
|
"""
|
2022-12-06 16:21:19 +01:00
|
|
|
Adds invoice expiry column to apipayments.
|
2022-10-05 14:17:23 +02:00
|
|
|
"""
|
2022-12-02 17:38:36 +01:00
|
|
|
try:
|
|
|
|
await db.execute("ALTER TABLE apipayments ADD COLUMN expiry TIMESTAMP")
|
|
|
|
except OperationalError:
|
|
|
|
pass
|
2022-12-06 13:23:51 +01:00
|
|
|
|
2022-12-06 16:21:19 +01:00
|
|
|
|
|
|
|
async def m007_set_invoice_expiries(db):
|
|
|
|
"""
|
|
|
|
Precomputes invoice expiry for existing pending incoming payments.
|
|
|
|
"""
|
2022-10-05 14:17:23 +02:00
|
|
|
try:
|
|
|
|
rows = await (
|
|
|
|
await db.execute(
|
|
|
|
f"""
|
|
|
|
SELECT bolt11, checking_id
|
|
|
|
FROM apipayments
|
|
|
|
WHERE pending = true
|
2022-12-06 21:04:10 +01:00
|
|
|
AND amount > 0
|
2022-10-05 14:17:23 +02:00
|
|
|
AND bolt11 IS NOT NULL
|
|
|
|
AND expiry IS NULL
|
2022-12-06 21:04:10 +01:00
|
|
|
AND time < {db.timestamp_now}
|
2022-10-05 14:17:23 +02:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
).fetchall()
|
2022-12-06 21:05:09 +01:00
|
|
|
if len(rows):
|
2022-12-29 01:06:01 +01:00
|
|
|
logger.info(f"Migration: Checking expiry of {len(rows)} invoices")
|
2022-10-05 14:17:23 +02:00
|
|
|
for i, (
|
|
|
|
payment_request,
|
2022-12-06 10:48:16 +01:00
|
|
|
checking_id,
|
2022-10-05 14:17:23 +02:00
|
|
|
) in enumerate(rows):
|
|
|
|
try:
|
|
|
|
invoice = bolt11.decode(payment_request)
|
2022-12-06 10:48:16 +01:00
|
|
|
if invoice.expiry is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
expiration_date = datetime.datetime.fromtimestamp(
|
|
|
|
invoice.date + invoice.expiry
|
|
|
|
)
|
|
|
|
logger.info(
|
2022-12-29 01:06:01 +01:00
|
|
|
f"Migration: {i+1}/{len(rows)} setting expiry of invoice {invoice.payment_hash} to {expiration_date}"
|
2022-12-06 10:48:16 +01:00
|
|
|
)
|
|
|
|
await db.execute(
|
|
|
|
"""
|
|
|
|
UPDATE apipayments SET expiry = ?
|
|
|
|
WHERE checking_id = ? AND amount > 0
|
|
|
|
""",
|
|
|
|
(
|
|
|
|
db.datetime_to_timestamp(expiration_date),
|
|
|
|
checking_id,
|
|
|
|
),
|
|
|
|
)
|
2022-10-05 14:17:23 +02:00
|
|
|
except:
|
|
|
|
continue
|
|
|
|
except OperationalError:
|
|
|
|
# this is necessary now because it may be the case that this migration will
|
|
|
|
# run twice in some environments.
|
|
|
|
# catching errors like this won't be necessary in anymore now that we
|
|
|
|
# keep track of db versions so no migration ever runs twice.
|
|
|
|
pass
|
2022-12-12 09:49:31 +01:00
|
|
|
|
2022-12-12 09:52:01 +01:00
|
|
|
|
2022-12-12 09:49:31 +01:00
|
|
|
async def m008_create_admin_settings_table(db):
|
2022-12-08 14:41:52 +01:00
|
|
|
await db.execute(
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS settings (
|
|
|
|
super_user TEXT,
|
2022-12-09 12:14:22 +01:00
|
|
|
editable_settings TEXT NOT NULL DEFAULT '{}'
|
2022-12-08 14:41:52 +01:00
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2023-01-11 18:06:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def m009_create_installed_extensions_table(db):
|
|
|
|
await db.execute(
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS installed_extensions (
|
|
|
|
id TEXT PRIMARY KEY,
|
2023-01-12 14:33:32 +01:00
|
|
|
version TEXT NOT NULL,
|
2023-01-17 11:26:49 +01:00
|
|
|
name TEXT NOT NULL,
|
2023-01-17 15:28:24 +01:00
|
|
|
short_description TEXT,
|
|
|
|
icon TEXT,
|
|
|
|
icon_url TEXT,
|
|
|
|
stars INT NOT NULL DEFAULT 0,
|
2023-01-11 18:06:58 +01:00
|
|
|
active BOOLEAN DEFAULT false,
|
|
|
|
meta TEXT NOT NULL DEFAULT '{}'
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|