2022-07-17 13:11:13 +02:00
|
|
|
import argparse
|
2022-11-17 13:59:03 +01:00
|
|
|
import os
|
2022-07-28 12:08:55 +02:00
|
|
|
import sqlite3
|
2022-11-17 13:59:03 +01:00
|
|
|
import sys
|
2022-08-09 10:45:50 +02:00
|
|
|
from typing import List
|
2022-07-17 13:11:13 +02:00
|
|
|
|
2022-07-28 12:08:55 +02:00
|
|
|
import psycopg2
|
2022-07-17 13:11:13 +02:00
|
|
|
from environs import Env # type: ignore
|
|
|
|
|
|
|
|
env = Env()
|
|
|
|
env.read_env()
|
2022-06-01 14:53:05 +02:00
|
|
|
|
2021-12-28 15:05:25 +01:00
|
|
|
# Python script to migrate an LNbits SQLite DB to Postgres
|
|
|
|
# All credits to @Fritz446 for the awesome work
|
2021-11-18 12:33:26 +01:00
|
|
|
|
|
|
|
|
2021-12-28 15:05:25 +01:00
|
|
|
# pip install psycopg2 OR psycopg2-binary
|
2021-11-18 12:33:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Change these values as needed
|
|
|
|
|
2022-07-17 13:11:13 +02:00
|
|
|
|
2022-08-17 15:42:01 +02:00
|
|
|
sqfolder = env.str("LNBITS_DATA_FOLDER", default=None)
|
2022-07-17 13:11:13 +02:00
|
|
|
|
|
|
|
LNBITS_DATABASE_URL = env.str("LNBITS_DATABASE_URL", default=None)
|
|
|
|
if LNBITS_DATABASE_URL is None:
|
2022-08-17 15:42:01 +02:00
|
|
|
print("missing LNBITS_DATABASE_URL")
|
2022-11-16 14:13:25 +01:00
|
|
|
sys.exit(1)
|
2022-07-17 13:11:13 +02:00
|
|
|
else:
|
|
|
|
# parse postgres://lnbits:postgres@localhost:5432/lnbits
|
|
|
|
pgdb = LNBITS_DATABASE_URL.split("/")[-1]
|
|
|
|
pguser = LNBITS_DATABASE_URL.split("@")[0].split(":")[-2][2:]
|
|
|
|
pgpswd = LNBITS_DATABASE_URL.split("@")[0].split(":")[-1]
|
|
|
|
pghost = LNBITS_DATABASE_URL.split("@")[1].split(":")[0]
|
|
|
|
pgport = LNBITS_DATABASE_URL.split("@")[1].split(":")[1].split("/")[0]
|
|
|
|
pgschema = ""
|
|
|
|
|
2021-11-18 12:33:26 +01:00
|
|
|
|
|
|
|
def get_sqlite_cursor(sqdb) -> sqlite3:
|
|
|
|
consq = sqlite3.connect(sqdb)
|
|
|
|
return consq.cursor()
|
|
|
|
|
|
|
|
|
|
|
|
def get_postgres_cursor():
|
|
|
|
conpg = psycopg2.connect(
|
|
|
|
database=pgdb, user=pguser, password=pgpswd, host=pghost, port=pgport
|
|
|
|
)
|
|
|
|
return conpg.cursor()
|
|
|
|
|
|
|
|
|
|
|
|
def check_db_versions(sqdb):
|
|
|
|
sqlite = get_sqlite_cursor(sqdb)
|
|
|
|
dblite = dict(sqlite.execute("SELECT * FROM dbversions;").fetchall())
|
|
|
|
sqlite.close()
|
|
|
|
|
|
|
|
postgres = get_postgres_cursor()
|
|
|
|
postgres.execute("SELECT * FROM public.dbversions;")
|
|
|
|
dbpost = dict(postgres.fetchall())
|
|
|
|
|
|
|
|
for key in dblite.keys():
|
2021-12-28 15:05:25 +01:00
|
|
|
if key in dblite and key in dbpost and dblite[key] != dbpost[key]:
|
2021-11-18 12:33:26 +01:00
|
|
|
raise Exception(
|
|
|
|
f"sqlite database version ({dblite[key]}) of {key} doesn't match postgres database version {dbpost[key]}"
|
|
|
|
)
|
|
|
|
|
|
|
|
connection = postgres.connection
|
|
|
|
postgres.close()
|
|
|
|
connection.close()
|
|
|
|
|
|
|
|
print("Database versions OK, converting")
|
|
|
|
|
|
|
|
|
|
|
|
def fix_id(seq, values):
|
|
|
|
if not values or len(values) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
postgres = get_postgres_cursor()
|
|
|
|
|
|
|
|
max_id = values[len(values) - 1][0]
|
|
|
|
postgres.execute(f"SELECT setval('{seq}', {max_id});")
|
|
|
|
|
|
|
|
connection = postgres.connection
|
|
|
|
postgres.close()
|
|
|
|
connection.close()
|
|
|
|
|
|
|
|
|
|
|
|
def insert_to_pg(query, data):
|
|
|
|
if len(data) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
cursor = get_postgres_cursor()
|
|
|
|
connection = cursor.connection
|
|
|
|
|
|
|
|
for d in data:
|
2021-12-28 15:05:25 +01:00
|
|
|
try:
|
|
|
|
cursor.execute(query, d)
|
2022-07-25 09:13:41 +02:00
|
|
|
except Exception as e:
|
|
|
|
if args.ignore_errors:
|
|
|
|
print(e)
|
|
|
|
print(f"Failed to insert {d}")
|
|
|
|
else:
|
2022-07-25 15:04:55 +02:00
|
|
|
print("query:", query)
|
|
|
|
print("data:", d)
|
2022-07-25 09:13:41 +02:00
|
|
|
raise ValueError(f"Failed to insert {d}")
|
2021-11-18 12:33:26 +01:00
|
|
|
connection.commit()
|
|
|
|
|
|
|
|
cursor.close()
|
|
|
|
connection.close()
|
|
|
|
|
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
def migrate_core(file: str, exclude_tables: List[str] = []):
|
|
|
|
print(f"Migrating core: {file}")
|
|
|
|
migrate_db(file, "public", exclude_tables)
|
|
|
|
print("✅ Migrated core")
|
2021-11-18 12:33:26 +01:00
|
|
|
|
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
def migrate_ext(file: str):
|
|
|
|
filename = os.path.basename(file)
|
|
|
|
schema = filename.replace("ext_", "").split(".")[0]
|
|
|
|
print(f"Migrating ext: {file}.{schema}")
|
|
|
|
migrate_db(file, schema)
|
|
|
|
print(f"✅ Migrated ext: {schema}")
|
2021-11-18 12:33:26 +01:00
|
|
|
|
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
def migrate_db(file: str, schema: str, exclude_tables: List[str] = []):
|
|
|
|
sq = get_sqlite_cursor(file)
|
|
|
|
tables = sq.execute(
|
|
|
|
"""
|
2022-08-17 15:42:01 +02:00
|
|
|
SELECT name FROM sqlite_master
|
2022-08-09 10:45:50 +02:00
|
|
|
WHERE type='table' AND name not like 'sqlite?_%' escape '?'
|
|
|
|
"""
|
|
|
|
).fetchall()
|
2021-11-18 12:33:26 +01:00
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
for table in tables:
|
|
|
|
tableName = table[0]
|
2022-11-30 16:24:13 +01:00
|
|
|
print(f"Migrating table {tableName}")
|
|
|
|
# hard coded skip for dbversions (already produced during startup)
|
|
|
|
if tableName == "dbversions":
|
|
|
|
continue
|
2022-08-09 10:45:50 +02:00
|
|
|
if tableName in exclude_tables:
|
|
|
|
continue
|
2021-11-18 12:33:26 +01:00
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
columns = sq.execute(f"PRAGMA table_info({tableName})").fetchall()
|
|
|
|
q = build_insert_query(schema, tableName, columns)
|
2021-11-18 12:33:26 +01:00
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
data = sq.execute(f"SELECT * FROM {tableName};").fetchall()
|
|
|
|
insert_to_pg(q, data)
|
|
|
|
sq.close()
|
2022-07-17 13:11:13 +02:00
|
|
|
|
2021-11-18 12:33:26 +01:00
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
def build_insert_query(schema, tableName, columns):
|
|
|
|
to_columns = ", ".join(map(lambda column: f'"{column[1]}"', columns))
|
|
|
|
values = ", ".join(map(lambda column: to_column_type(column[2]), columns))
|
|
|
|
return f"""
|
|
|
|
INSERT INTO {schema}.{tableName}({to_columns})
|
|
|
|
VALUES ({values});
|
2021-11-18 12:33:26 +01:00
|
|
|
"""
|
2022-07-17 13:11:13 +02:00
|
|
|
|
2021-11-18 12:33:26 +01:00
|
|
|
|
2022-08-09 10:45:50 +02:00
|
|
|
def to_column_type(columnType):
|
|
|
|
if columnType == "TIMESTAMP":
|
|
|
|
return "to_timestamp(%s)"
|
2022-11-30 16:24:13 +01:00
|
|
|
if columnType in ["BOOLEAN", "BOOL"]:
|
2022-08-09 10:45:50 +02:00
|
|
|
return "%s::boolean"
|
|
|
|
return "%s"
|
2021-11-18 12:33:26 +01:00
|
|
|
|
|
|
|
|
2022-07-25 09:13:41 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="LNbits migration tool for migrating data from SQLite to PostgreSQL"
|
|
|
|
)
|
2022-07-17 13:11:13 +02:00
|
|
|
parser.add_argument(
|
2022-07-25 09:13:41 +02:00
|
|
|
dest="sqlite_path",
|
2022-07-17 13:11:13 +02:00
|
|
|
const=True,
|
|
|
|
nargs="?",
|
2022-07-25 09:13:41 +02:00
|
|
|
help=f"SQLite DB folder *or* single extension db file to migrate. Default: {sqfolder}",
|
|
|
|
default=sqfolder,
|
2022-07-17 13:11:13 +02:00
|
|
|
type=str,
|
|
|
|
)
|
2022-07-25 09:13:41 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"-e",
|
|
|
|
"--extensions-only",
|
|
|
|
help="Migrate only extensions",
|
|
|
|
required=False,
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"-s",
|
|
|
|
"--skip-missing",
|
|
|
|
help="Error if migration is missing for an extension",
|
|
|
|
required=False,
|
|
|
|
default=False,
|
|
|
|
action="store_true",
|
|
|
|
)
|
|
|
|
|
2022-07-17 13:11:13 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"-i",
|
2022-07-25 09:13:41 +02:00
|
|
|
"--ignore-errors",
|
|
|
|
help="Don't error if migration fails",
|
2022-07-17 13:11:13 +02:00
|
|
|
required=False,
|
|
|
|
default=False,
|
2022-07-25 09:13:41 +02:00
|
|
|
action="store_true",
|
2022-07-17 13:11:13 +02:00
|
|
|
)
|
2022-07-25 09:13:41 +02:00
|
|
|
|
2022-07-17 13:11:13 +02:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-07-25 09:13:41 +02:00
|
|
|
print("Selected path: ", args.sqlite_path)
|
|
|
|
|
|
|
|
if os.path.isdir(args.sqlite_path):
|
2022-08-09 10:45:50 +02:00
|
|
|
exclude_tables = ["dbversions"]
|
2022-07-25 09:13:41 +02:00
|
|
|
file = os.path.join(args.sqlite_path, "database.sqlite3")
|
|
|
|
check_db_versions(file)
|
|
|
|
if not args.extensions_only:
|
2022-11-26 01:14:06 +01:00
|
|
|
migrate_core(file, exclude_tables)
|
2022-07-17 13:11:13 +02:00
|
|
|
|
2022-07-25 09:13:41 +02:00
|
|
|
if os.path.isdir(args.sqlite_path):
|
|
|
|
files = [
|
|
|
|
os.path.join(args.sqlite_path, file) for file in os.listdir(args.sqlite_path)
|
|
|
|
]
|
|
|
|
else:
|
|
|
|
files = [args.sqlite_path]
|
2021-11-18 12:33:26 +01:00
|
|
|
|
2022-11-26 01:31:32 +01:00
|
|
|
excluded_exts = ["ext_lnurlpos.sqlite3"]
|
2021-11-18 12:33:26 +01:00
|
|
|
for file in files:
|
2022-07-25 09:13:41 +02:00
|
|
|
filename = os.path.basename(file)
|
2022-08-09 10:45:50 +02:00
|
|
|
if filename.startswith("ext_") and filename not in excluded_exts:
|
2022-11-26 01:14:31 +01:00
|
|
|
migrate_ext(file)
|