Merge pull request #427 from arcbtc/FastAPI

merge
This commit is contained in:
Arc 2021-11-23 10:28:13 +00:00 committed by GitHub
commit e2d1e3d261
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 729 additions and 56 deletions

653
conv.py Normal file
View File

@ -0,0 +1,653 @@
## Python script to migrate an LNbits SQLite DB to Postgres
## All credits to @Fritz446 for the awesome work
## pip install psycopg2 OR psycopg2-binary
import os
import sqlite3
import psycopg2
# Change these values as needed
sqfolder = "data/"
pgdb = "lnbits"
pguser = "postgres"
pgpswd = "yourpassword"
pghost = "localhost"
pgport = "5432"
pgschema = ""
def get_sqlite_cursor(sqdb) -> sqlite3:
consq = sqlite3.connect(sqdb)
return consq.cursor()
def get_postgres_cursor():
conpg = psycopg2.connect(
database=pgdb, user=pguser, password=pgpswd, host=pghost, port=pgport
)
return conpg.cursor()
def check_db_versions(sqdb):
sqlite = get_sqlite_cursor(sqdb)
dblite = dict(sqlite.execute("SELECT * FROM dbversions;").fetchall())
del dblite["lnurlpos"] # wrongly added?
sqlite.close()
postgres = get_postgres_cursor()
postgres.execute("SELECT * FROM public.dbversions;")
dbpost = dict(postgres.fetchall())
for key in dblite.keys():
if dblite[key] != dbpost[key]:
raise Exception(
f"sqlite database version ({dblite[key]}) of {key} doesn't match postgres database version {dbpost[key]}"
)
connection = postgres.connection
postgres.close()
connection.close()
print("Database versions OK, converting")
def fix_id(seq, values):
if not values or len(values) == 0:
return
postgres = get_postgres_cursor()
max_id = values[len(values) - 1][0]
postgres.execute(f"SELECT setval('{seq}', {max_id});")
connection = postgres.connection
postgres.close()
connection.close()
def insert_to_pg(query, data):
if len(data) == 0:
return
cursor = get_postgres_cursor()
connection = cursor.connection
for d in data:
cursor.execute(query, d)
connection.commit()
cursor.close()
connection.close()
def migrate_core(sqlite_db_file):
sq = get_sqlite_cursor(sqlite_db_file)
# ACCOUNTS
res = sq.execute("SELECT * FROM accounts;")
q = f"INSERT INTO public.accounts (id, email, pass) VALUES (%s, %s, %s);"
insert_to_pg(q, res.fetchall())
# WALLETS
res = sq.execute("SELECT * FROM wallets;")
q = f'INSERT INTO public.wallets (id, name, "user", adminkey, inkey) VALUES (%s, %s, %s, %s, %s);'
insert_to_pg(q, res.fetchall())
# API PAYMENTS
res = sq.execute("SELECT * FROM apipayments;")
q = f"""
INSERT INTO public.apipayments(
checking_id, amount, fee, wallet, pending, memo, "time", hash, preimage, bolt11, extra, webhook, webhook_status)
VALUES (%s, %s, %s, %s, %s::boolean, %s, to_timestamp(%s), %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# BALANCE CHECK
res = sq.execute("SELECT * FROM balance_check;")
q = f"INSERT INTO public.balance_check(wallet, service, url) VALUES (%s, %s, %s);"
insert_to_pg(q, res.fetchall())
# BALANCE NOTIFY
res = sq.execute("SELECT * FROM balance_notify;")
q = f"INSERT INTO public.balance_notify(wallet, url) VALUES (%s, %s);"
insert_to_pg(q, res.fetchall())
# EXTENSIONS
res = sq.execute("SELECT * FROM extensions;")
q = f'INSERT INTO public.extensions("user", extension, active) VALUES (%s, %s, %s::boolean);'
insert_to_pg(q, res.fetchall())
print("Migrated: core")
def migrate_ext(sqlite_db_file, schema):
sq = get_sqlite_cursor(sqlite_db_file)
if schema == "bleskomat":
# BLESKOMAT LNURLS
res = sq.execute("SELECT * FROM bleskomat_lnurls;")
q = f"""
INSERT INTO bleskomat.bleskomat_lnurls(
id, bleskomat, wallet, hash, tag, params, api_key_id, initial_uses, remaining_uses, created_time, updated_time)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# BLESKOMATS
res = sq.execute("SELECT * FROM bleskomats;")
q = f"""
INSERT INTO bleskomat.bleskomats(
id, wallet, api_key_id, api_key_secret, api_key_encoding, name, fiat_currency, exchange_rate_provider, fee)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "captcha":
# CAPTCHA
res = sq.execute("SELECT * FROM captchas;")
q = f"""
INSERT INTO captcha.captchas(
id, wallet, url, memo, description, amount, "time", remembers, extras)
VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s), %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "copilot":
# OLD COPILOTS
res = sq.execute("SELECT * FROM copilots;")
q = f"""
INSERT INTO copilot.copilots(
id, "user", title, lnurl_toggle, wallet, animation1, animation2, animation3, animation1threshold, animation2threshold, animation3threshold, animation1webhook, animation2webhook, animation3webhook, lnurl_title, show_message, show_ack, show_price, amount_made, fullscreen_cam, iframe_url, "timestamp")
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
# NEW COPILOTS
q = f"""
INSERT INTO copilot.newer_copilots(
id, "user", title, lnurl_toggle, wallet, animation1, animation2, animation3, animation1threshold, animation2threshold, animation3threshold, animation1webhook, animation2webhook, animation3webhook, lnurl_title, show_message, show_ack, show_price, amount_made, fullscreen_cam, iframe_url, "timestamp")
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
elif schema == "events":
# EVENTS
res = sq.execute("SELECT * FROM events;")
q = f"""
INSERT INTO events.events(
id, wallet, name, info, closing_date, event_start_date, event_end_date, amount_tickets, price_per_ticket, sold, "time")
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
# EVENT TICKETS
res = sq.execute("SELECT * FROM ticket;")
q = f"""
INSERT INTO events.ticket(
id, wallet, event, name, email, registered, paid, "time")
VALUES (%s, %s, %s, %s, %s, %s::boolean, %s::boolean, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
elif schema == "example":
# Example doesn't have a database at the moment
pass
elif schema == "hivemind":
# Hivemind doesn't have a database at the moment
pass
elif schema == "jukebox":
# JUKEBOXES
res = sq.execute("SELECT * FROM jukebox;")
q = f"""
INSERT INTO jukebox.jukebox(
id, "user", title, wallet, inkey, sp_user, sp_secret, sp_access_token, sp_refresh_token, sp_device, sp_playlists, price, profit)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# JUKEBOX PAYMENTS
res = sq.execute("SELECT * FROM jukebox_payment;")
q = f"""
INSERT INTO jukebox.jukebox_payment(
payment_hash, juke_id, song_id, paid)
VALUES (%s, %s, %s, %s::boolean);
"""
insert_to_pg(q, res.fetchall())
elif schema == "withdraw":
# WITHDRAW LINK
res = sq.execute("SELECT * FROM withdraw_link;")
q = f"""
INSERT INTO withdraw.withdraw_link (
id,
wallet,
title,
min_withdrawable,
max_withdrawable,
uses,
wait_time,
is_unique,
unique_hash,
k1,
open_time,
used,
usescsv
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# WITHDRAW HASH CHECK
res = sq.execute("SELECT * FROM hash_check;")
q = f"""
INSERT INTO withdraw.hash_check (id, lnurl_id)
VALUES (%s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "watchonly":
# WALLETS
res = sq.execute("SELECT * FROM wallets;")
q = f"""
INSERT INTO watchonly.wallets (
id,
"user",
masterpub,
title,
address_no,
balance
)
VALUES (%s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# ADDRESSES
res = sq.execute("SELECT * FROM addresses;")
q = f"""
INSERT INTO watchonly.addresses (id, address, wallet, amount)
VALUES (%s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# MEMPOOL
res = sq.execute("SELECT * FROM mempool;")
q = f"""
INSERT INTO watchonly.mempool ("user", endpoint)
VALUES (%s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "usermanager":
# USERS
res = sq.execute("SELECT * FROM users;")
q = f"""
INSERT INTO usermanager.users (id, name, admin, email, password)
VALUES (%s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# WALLETS
res = sq.execute("SELECT * FROM wallets;")
q = f"""
INSERT INTO usermanager.wallets (id, admin, name, "user", adminkey, inkey)
VALUES (%s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "tpos":
# TPOSS
res = sq.execute("SELECT * FROM tposs;")
q = f"""
INSERT INTO tpos.tposs (id, wallet, name, currency)
VALUES (%s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "tipjar":
# TIPJARS
res = sq.execute("SELECT * FROM TipJars;")
q = f"""
INSERT INTO tipjar.TipJars (id, name, wallet, onchain, webhook)
VALUES (%s, %s, %s, %s, %s);
"""
pay_links = res.fetchall()
insert_to_pg(q, pay_links)
fix_id("tipjar.tipjars_id_seq", pay_links)
# TIPS
res = sq.execute("SELECT * FROM Tips;")
q = f"""
INSERT INTO tipjar.Tips (id, wallet, name, message, sats, tipjar)
VALUES (%s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "subdomains":
# DOMAIN
res = sq.execute("SELECT * FROM domain;")
q = f"""
INSERT INTO subdomains.domain (
id,
wallet,
domain,
webhook,
cf_token,
cf_zone_id,
description,
cost,
amountmade,
allowed_record_types,
time
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
# SUBDOMAIN
res = sq.execute("SELECT * FROM subdomain;")
q = f"""
INSERT INTO subdomains.subdomain (
id,
domain,
email,
subdomain,
ip,
wallet,
sats,
duration,
paid,
record_type,
time
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
elif schema == "streamalerts":
# SERVICES
res = sq.execute("SELECT * FROM Services;")
q = f"""
INSERT INTO streamalerts.Services (
id,
state,
twitchuser,
client_id,
client_secret,
wallet,
onchain,
servicename,
authenticated,
token
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean, %s);
"""
services = res.fetchall()
insert_to_pg(q, services)
fix_id("streamalerts.services_id_seq", services)
# DONATIONS
res = sq.execute("SELECT * FROM Donations;")
q = f"""
INSERT INTO streamalerts.Donations (
id,
wallet,
name,
message,
cur_code,
sats,
amount,
service,
posted,
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s::boolean);
"""
insert_to_pg(q, res.fetchall())
elif schema == "splitpayments":
# TARGETS
res = sq.execute("SELECT * FROM targets;")
q = f"""
INSERT INTO splitpayments.targets (wallet, source, percent, alias)
VALUES (%s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "satspay":
# CHARGES
res = sq.execute("SELECT * FROM charges;")
q = f"""
INSERT INTO satspay.charges (
id,
"user",
description,
onchainwallet,
onchainaddress,
lnbitswallet,
payment_request,
payment_hash,
webhook,
completelink,
completelinktext,
time,
amount,
balance,
timestamp
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
elif schema == "satsdice":
# SATSDICE PAY
res = sq.execute("SELECT * FROM satsdice_pay;")
q = f"""
INSERT INTO satsdice.satsdice_pay (
id,
wallet,
title,
min_bet,
max_bet,
amount,
served_meta,
served_pr,
multiplier,
haircut,
chance,
base_url,
open_time
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# SATSDICE WITHDRAW
res = sq.execute("SELECT * FROM satsdice_withdraw;")
q = f"""
INSERT INTO satsdice.satsdice_withdraw (
id,
satsdice_pay,
value,
unique_hash,
k1,
open_time,
used
)
VALUES (%s, %s, %s, %s, %s, %s, %s);
"""
insert_to_pg(q, res.fetchall())
# SATSDICE PAYMENT
res = sq.execute("SELECT * FROM satsdice_payment;")
q = f"""
INSERT INTO satsdice.satsdice_payment (
payment_hash,
satsdice_pay,
value,
paid,
lost
)
VALUES (%s, %s, %s, %s::boolean, %s::boolean);
"""
insert_to_pg(q, res.fetchall())
# SATSDICE HASH CHECK
res = sq.execute("SELECT * FROM hash_checkw;")
q = f"""
INSERT INTO satsdice.hash_checkw (id, lnurl_id)
VALUES (%s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "paywall":
# PAYWALLS
res = sq.execute("SELECT * FROM paywalls;")
q = f"""
INSERT INTO paywall.paywalls(
id,
wallet,
url,
memo,
amount,
time,
remembers,
extra
)
VALUES (%s, %s, %s, %s, %s, to_timestamp(%s), %s, %s);
"""
insert_to_pg(q, res.fetchall())
elif schema == "offlineshop":
# SHOPS
res = sq.execute("SELECT * FROM shops;")
q = f"""
INSERT INTO offlineshop.shops (id, wallet, method, wordlist)
VALUES (%s, %s, %s, %s);
"""
shops = res.fetchall()
insert_to_pg(q, shops)
fix_id("offlineshop.shops_id_seq", shops)
# ITEMS
res = sq.execute("SELECT * FROM items;")
q = f"""
INSERT INTO offlineshop.items (shop, id, name, description, image, enabled, price, unit)
VALUES (%s, %s, %s, %s, %s, %s::boolean, %s, %s);
"""
items = res.fetchall()
insert_to_pg(q, items)
fix_id("offlineshop.items_id_seq", items)
elif schema == "lnurlpos":
# LNURLPOSS
res = sq.execute("SELECT * FROM lnurlposs;")
q = f"""
INSERT INTO lnurlpos.lnurlposs (id, key, title, wallet, currency, timestamp)
VALUES (%s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
# LNURLPOS PAYMENT
res = sq.execute("SELECT * FROM lnurlpospayment;")
q = f"""
INSERT INTO lnurlpos.lnurlpospayment (id, posid, payhash, payload, pin, sats, timestamp)
VALUES (%s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
elif schema == "lnurlp":
# PAY LINKS
res = sq.execute("SELECT * FROM pay_links;")
q = f"""
INSERT INTO lnurlp.pay_links (
id,
wallet,
description,
min,
served_meta,
served_pr,
webhook_url,
success_text,
success_url,
currency,
comment_chars,
max
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
pay_links = res.fetchall()
insert_to_pg(q, pay_links)
fix_id("lnurlp.pay_links_id_seq", pay_links)
elif schema == "lndhub":
# LndHub doesn't have a database at the moment
pass
elif schema == "lnticket":
# TICKET
res = sq.execute("SELECT * FROM ticket;")
q = f"""
INSERT INTO lnticket.ticket (
id,
form,
email,
ltext,
name,
wallet,
sats,
paid,
time
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s::boolean, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
# FORM
res = sq.execute("SELECT * FROM form2;")
q = f"""
INSERT INTO lnticket.form2 (
id,
wallet,
name,
webhook,
description,
flatrate,
amount,
amountmade,
time
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, to_timestamp(%s));
"""
insert_to_pg(q, res.fetchall())
elif schema == "livestream":
# LIVESTREAMS
res = sq.execute("SELECT * FROM livestreams;")
q = f"""
INSERT INTO livestream.livestreams (
id,
wallet,
fee_pct,
current_track
)
VALUES (%s, %s, %s, %s);
"""
livestreams = res.fetchall()
insert_to_pg(q, livestreams)
fix_id("livestream.livestreams_id_seq", livestreams)
# PRODUCERS
res = sq.execute("SELECT * FROM producers;")
q = f"""
INSERT INTO livestream.producers (
livestream,
id,
"user",
wallet,
name
)
VALUES (%s, %s, %s, %s, %s);
"""
producers = res.fetchall()
insert_to_pg(q, producers)
fix_id("livestream.producers_id_seq", producers)
# TRACKS
res = sq.execute("SELECT * FROM tracks;")
q = f"""
INSERT INTO livestream.tracks (
livestream,
id,
download_url,
price_msat,
name,
producer
)
VALUES (%s, %s, %s, %s, %s, %s);
"""
tracks = res.fetchall()
insert_to_pg(q, tracks)
fix_id("livestream.tracks_id_seq", tracks)
else:
print(f"Not implemented: {schema}")
sq.close()
return
print(f"Migrated: {schema}")
sq.close()
check_db_versions("data/database.sqlite3")
migrate_core("data/database.sqlite3")
files = os.listdir(sqfolder)
for file in files:
path = f"data/{file}"
if file.startswith("ext_"):
schema = file.replace("ext_", "").split(".")[0]
print(f"Migrating: {schema}")
migrate_ext(path, schema)

View File

@ -47,8 +47,23 @@ Then you can restart it and it will be using the new settings.
You might also need to install additional packages or perform additional setup steps, depending on the chosen backend. See [the short guide](./wallets.md) on each different funding source.
## Important note
If you already have LNbits installed and running, we **HIGHLY** recommend you migrate to postgres!
You can use the script and instructions on [this guide](https://github.com/talvasconcelos/lnbits-sqlite-to-postgres) to migrate your SQLite database to Postgres.
If you already have LNbits installed and running, on an SQLite database, we **HIGHLY** recommend you migrate to postgres!
There's a script included that can do the migration easy. You should have Postgres already installed and there should be a password for the user, check the guide above.
```sh
# STOP LNbits
# on the LNBits folder, locate and edit 'conv.py' with the relevant credentials
python3 conv.py
# add the database connection string to .env 'nano .env' LNBITS_DATABASE_URL=
# postgres://<user>:<password>@<host>/<database> - alter line bellow with your user, password and db name
LNBITS_DATABASE_URL="postgres://postgres:postgres@localhost/lnbits"
# save and exit
```
Hopefully, everything works and get migrated... Launch LNbits again and check if everything is working properly.
# Additional guides

View File

@ -36,8 +36,8 @@ async def get_livestream(ls_id: int) -> Optional[Livestream]:
async def get_livestream_by_track(track_id: int) -> Optional[Livestream]:
row = await db.fetchone(
"""
SELECT livestreams.* FROM livestream.livestreams
INNER JOIN tracks ON tracks.livestream = livestreams.id
SELECT livestreams.* AS livestreams FROM livestream.livestreams
INNER JOIN livestream.tracks AS tracks ON tracks.livestream = livestreams.id
WHERE tracks.id = ?
""",
(track_id,),

View File

@ -75,10 +75,11 @@ class Track(BaseModel):
if not self.download_url:
return None
url = request.url_for("livestream.track_redirect_download", track_id=self.id)
url_with_query = f"{url}?p={payment_hash}"
return UrlAction(
url=request.url_for(
"livestream.track_redirect_download", track_id=self.id, p=payment_hash
),
url=url_with_query,
description=f"Download the track {self.name}!",
)

View File

@ -1,8 +1,8 @@
import json
from datetime import datetime
from http import HTTPStatus
import shortuuid # type: ignore
import json
from fastapi import HTTPException
from fastapi.param_functions import Query
from starlette.requests import Request
@ -22,6 +22,7 @@ from .crud import get_withdraw_link_by_hash, update_withdraw_link
name="withdraw.api_lnurl_response",
)
async def api_lnurl_response(request: Request, unique_hash):
print("NOT UNIQUE")
link = await get_withdraw_link_by_hash(unique_hash)
if not link:
@ -49,68 +50,26 @@ async def api_lnurl_response(request: Request, unique_hash):
return json.dumps(withdrawResponse)
# FOR LNURLs WHICH ARE UNIQUE
@withdraw_ext.get(
"/api/v1/lnurl/{unique_hash}/{id_unique_hash}",
response_class=HTMLResponse,
name="withdraw.api_lnurl_multi_response",
)
async def api_lnurl_multi_response(request: Request, unique_hash, id_unique_hash):
link = await get_withdraw_link_by_hash(unique_hash)
if not link:
raise HTTPException(
status_code=HTTPStatus.OK, detail="LNURL-withdraw not found."
)
if link.is_spent:
raise HTTPException(status_code=HTTPStatus.OK, detail="Withdraw is spent.")
useslist = link.usescsv.split(",")
found = False
for x in useslist:
tohash = link.id + link.unique_hash + str(x)
if id_unique_hash == shortuuid.uuid(name=tohash):
found = True
if not found:
raise HTTPException(
status_code=HTTPStatus.OK, detail="LNURL-withdraw not found."
)
url = request.url_for("withdraw.api_lnurl_callback", unique_hash=link.unique_hash)
withdrawResponse = {
"tag": "withdrawRequest",
"callback": url,
"k1": link.k1,
"minWithdrawable": link.min_withdrawable * 1000,
"maxWithdrawable": link.max_withdrawable * 1000,
"defaultDescription": link.title,
}
return json.dumps(withdrawResponse)
# CALLBACK
#https://5650-2001-8a0-fa12-2900-4c13-748a-fbb9-a47f.ngrok.io/withdraw/api/v1/lnurl/cb/eJHybS8hqcBWajZM63H3FP?k1=MUaYBGrUPuAs8SLpfizmCk&pr=lnbc100n1pse2tsypp5ju0yn3w9j0n8rr3squg0knddawu2ude2cgrm6zje5f34e9jzpmlsdq8w3jhxaqxqyjw5qcqpjsp5tyhu78pamqg5zfy96kup329zt40ramc8gs2ev6jxgp66zca2348qrzjqwac3nxyg3f5mfa4ke9577c4u8kvkx8pqtdsusqdfww0aymk823x6znwa5qqzyqqqyqqqqlgqqqqppgq9q9qy9qsq66zp6pctnlmk59xwtqjga5lvqrkyccmafmn43enhhc6ugew80sanxymepshpv44m9yyhfgh8r2upvxhgk00d36rpqzfy3fxemeu4jhqp96l8hx
@withdraw_ext.get(
"/api/v1/lnurl/cb/{unique_hash}",
status_code=HTTPStatus.OK,
name="withdraw.api_lnurl_callback",
)
async def api_lnurl_callback(
unique_hash,
request: Request,
unique_hash: str = Query(...),
k1: str = Query(...),
payment_request: str = Query(..., alias="pr"),
pr: str = Query(...)
):
link = await get_withdraw_link_by_hash(unique_hash)
now = int(datetime.now().timestamp())
if not link:
raise HTTPException(
status_code=HTTPStatus.NOT_FOUND, detail="LNURL-withdraw not found."
status_code=HTTPStatus.NOT_FOUND, detail="LNURL-withdraw not found"
)
if link.is_spent:
@ -142,6 +101,8 @@ async def api_lnurl_callback(
"usescsv": usescsv,
}
await update_withdraw_link(link.id, **changes)
payment_request=pr
await pay_invoice(
wallet_id=link.wallet,
@ -154,3 +115,46 @@ async def api_lnurl_callback(
except Exception as e:
await update_withdraw_link(link.id, **changesback)
return {"status": "ERROR", "reason": "Link not working"}
# FOR LNURLs WHICH ARE UNIQUE
@withdraw_ext.get(
"/api/v1/lnurl/{unique_hash}/{id_unique_hash}",
response_class=HTMLResponse,
name="withdraw.api_lnurl_multi_response",
)
async def api_lnurl_multi_response(request: Request, unique_hash, id_unique_hash):
print("UNIQUE")
link = await get_withdraw_link_by_hash(unique_hash)
if not link:
raise HTTPException(
status_code=HTTPStatus.OK, detail="LNURL-withdraw not found."
)
if link.is_spent:
raise HTTPException(status_code=HTTPStatus.OK, detail="Withdraw is spent.")
useslist = link.usescsv.split(",")
found = False
for x in useslist:
tohash = link.id + link.unique_hash + str(x)
if id_unique_hash == shortuuid.uuid(name=tohash):
found = True
if not found:
raise HTTPException(
status_code=HTTPStatus.OK, detail="LNURL-withdraw not found."
)
url = request.url_for("withdraw.api_lnurl_callback", unique_hash=link.unique_hash)
withdrawResponse = {
"tag": "withdrawRequest",
"callback": url,
"k1": link.k1,
"minWithdrawable": link.min_withdrawable * 1000,
"maxWithdrawable": link.max_withdrawable * 1000,
"defaultDescription": link.title,
}
return json.dumps(withdrawResponse)

View File

@ -91,7 +91,7 @@ async def print_qr(request: Request, link_id):
return withdraw_renderer().TemplateResponse(
"withdraw/print_qr.html",
{"request": request, "link": link.dict(), unique: False},
{"request": request, "link": link.dict(), "unique": False},
)
links = []
count = 0