mirror of
https://github.com/lnbits/lnbits-legend.git
synced 2025-01-18 21:32:38 +01:00
test: lint tests
and tools
aswell (#2296)
* test: lint `tests` and `tools` aswell more linting :) * fix linting issues in tests and tools * fixup! * how is this working?
This commit is contained in:
parent
e8aa498683
commit
884a1b9d6f
70
poetry.lock
generated
70
poetry.lock
generated
@ -716,6 +716,17 @@ files = [
|
||||
{file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distro"
|
||||
version = "1.9.0"
|
||||
description = "Distro - an OS platform information API"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
|
||||
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.4.2"
|
||||
@ -1072,6 +1083,20 @@ MarkupSafe = ">=2.0"
|
||||
[package.extras]
|
||||
i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "json5"
|
||||
version = "0.9.17"
|
||||
description = "A Python implementation of the JSON5 data format."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "json5-0.9.17-py2.py3-none-any.whl", hash = "sha256:f8ec1ecf985951d70f780f6f877c4baca6a47b6e61e02c4cd190138d10a7805a"},
|
||||
{file = "json5-0.9.17.tar.gz", hash = "sha256:717d99d657fa71b7094877b1d921b1cce40ab444389f6d770302563bb7dfd9ae"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["hypothesis"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.20.0"
|
||||
@ -1399,6 +1424,29 @@ rsa = ["cryptography (>=3.0.0)"]
|
||||
signals = ["blinker (>=1.4.0)"]
|
||||
signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.12.0"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"},
|
||||
{file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=3.5.0,<5"
|
||||
distro = ">=1.7.0,<2"
|
||||
httpx = ">=0.23.0,<1"
|
||||
pydantic = ">=1.9.0,<3"
|
||||
sniffio = "*"
|
||||
tqdm = ">4"
|
||||
typing-extensions = ">=4.7,<5"
|
||||
|
||||
[package.extras]
|
||||
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-schema-validator"
|
||||
version = "0.6.2"
|
||||
@ -2458,6 +2506,26 @@ files = [
|
||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tqdm"
|
||||
version = "4.66.2"
|
||||
description = "Fast, Extensible Progress Meter"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"},
|
||||
{file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"]
|
||||
notebook = ["ipywidgets (>=6)"]
|
||||
slack = ["slack-sdk"]
|
||||
telegram = ["requests"]
|
||||
|
||||
[[package]]
|
||||
name = "types-passlib"
|
||||
version = "1.7.7.13"
|
||||
@ -2862,4 +2930,4 @@ liquid = ["wallycore"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10 | ^3.9"
|
||||
content-hash = "b2e21e0075047150888581a401d46fbe8efd6226e85a85189c3f3db51f825a48"
|
||||
content-hash = "e7e5228a2e8cac043065950b2c5e0df05a8e782adc74717c6247cdbe9abf8f77"
|
||||
|
@ -69,6 +69,8 @@ ruff = "^0.0.291"
|
||||
rpds-py = "0.10.3"
|
||||
types-passlib = "^1.7.7.13"
|
||||
types-python-jose = "^3.3.4.8"
|
||||
openai = "^1.12.0"
|
||||
json5 = "^0.9.17"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
@ -80,7 +82,9 @@ lnbits-cli = "lnbits.commands:main"
|
||||
|
||||
[tool.pyright]
|
||||
include = [
|
||||
"lnbits"
|
||||
"lnbits",
|
||||
"tests",
|
||||
"tools",
|
||||
]
|
||||
exclude = [
|
||||
"lnbits/wallets/lnd_grpc_files",
|
||||
@ -89,7 +93,11 @@ exclude = [
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
files = "lnbits"
|
||||
files = [
|
||||
"lnbits",
|
||||
"tests",
|
||||
"tools",
|
||||
]
|
||||
exclude = [
|
||||
"^lnbits/wallets/lnd_grpc_files",
|
||||
"^lnbits/extensions",
|
||||
@ -118,6 +126,7 @@ module = [
|
||||
"py_vapid.*",
|
||||
"pywebpush.*",
|
||||
"fastapi_sso.sso.*",
|
||||
"json5.*",
|
||||
]
|
||||
ignore_missing_imports = "True"
|
||||
|
||||
|
@ -109,6 +109,7 @@ async def test_channel_management(node_client):
|
||||
[channel for channel in data if channel.state == ChannelState.ACTIVE]
|
||||
)
|
||||
assert close, "No active channel found"
|
||||
assert close.point, "No channel point found"
|
||||
|
||||
response = await node_client.delete(
|
||||
"/node/api/v1/channels",
|
||||
|
@ -7,9 +7,9 @@ import time
|
||||
from subprocess import PIPE, Popen, TimeoutExpired
|
||||
from typing import Tuple
|
||||
|
||||
import psycopg2
|
||||
from loguru import logger
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from psycopg2 import connect
|
||||
from psycopg2.errors import InvalidCatalogName
|
||||
|
||||
from lnbits import core
|
||||
from lnbits.db import DB_TYPE, POSTGRES
|
||||
@ -158,24 +158,17 @@ def pay_onchain(address: str, sats: int) -> str:
|
||||
|
||||
def clean_database(settings):
|
||||
if DB_TYPE == POSTGRES:
|
||||
db_url = make_url(settings.lnbits_database_url)
|
||||
|
||||
conn = psycopg2.connect(settings.lnbits_database_url)
|
||||
conn = connect(settings.lnbits_database_url)
|
||||
conn.autocommit = True
|
||||
with conn.cursor() as cur:
|
||||
try:
|
||||
cur.execute("DROP DATABASE lnbits_test")
|
||||
except psycopg2.errors.InvalidCatalogName:
|
||||
except InvalidCatalogName:
|
||||
pass
|
||||
cur.execute("CREATE DATABASE lnbits_test")
|
||||
|
||||
db_url.database = "lnbits_test"
|
||||
settings.lnbits_database_url = str(db_url)
|
||||
|
||||
core.db.__init__("database")
|
||||
|
||||
conn.close()
|
||||
else:
|
||||
# FIXME: do this once mock data is removed from test data folder
|
||||
# TODO: do this once mock data is removed from test data folder
|
||||
# os.remove(settings.lnbits_data_folder + "/database.sqlite3")
|
||||
pass
|
||||
|
@ -48,7 +48,7 @@ def check_db_versions(sqdb):
|
||||
|
||||
postgres = get_postgres_cursor()
|
||||
postgres.execute("SELECT * FROM public.dbversions;")
|
||||
dbpost = dict(postgres.fetchall())
|
||||
dbpost = dict(postgres.fetchall()) # type: ignore
|
||||
|
||||
for key, value in dblite.items():
|
||||
if key in dblite and key in dbpost:
|
||||
@ -104,7 +104,7 @@ def insert_to_pg(query, data):
|
||||
connection.close()
|
||||
|
||||
|
||||
def migrate_core(file: str, exclude_tables: List[str] = None):
|
||||
def migrate_core(file: str, exclude_tables: List[str] = []):
|
||||
print(f"Migrating core: {file}")
|
||||
migrate_db(file, "public", exclude_tables)
|
||||
print("✅ Migrated core")
|
||||
@ -118,12 +118,12 @@ def migrate_ext(file: str):
|
||||
print(f"✅ Migrated ext: {schema}")
|
||||
|
||||
|
||||
def migrate_db(file: str, schema: str, exclude_tables: List[str] = None):
|
||||
def migrate_db(file: str, schema: str, exclude_tables: List[str] = []):
|
||||
# first we check if this file exists:
|
||||
assert os.path.isfile(file), f"{file} does not exist!"
|
||||
|
||||
sq = get_sqlite_cursor(file)
|
||||
tables = sq.execute(
|
||||
cursor = get_sqlite_cursor(file)
|
||||
tables = cursor.execute(
|
||||
"""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name not like 'sqlite?_%' escape '?'
|
||||
@ -139,16 +139,16 @@ def migrate_db(file: str, schema: str, exclude_tables: List[str] = None):
|
||||
if exclude_tables and tableName in exclude_tables:
|
||||
continue
|
||||
|
||||
columns = sq.execute(f"PRAGMA table_info({tableName})").fetchall()
|
||||
columns = cursor.execute(f"PRAGMA table_info({tableName})").fetchall()
|
||||
q = build_insert_query(schema, tableName, columns)
|
||||
|
||||
data = sq.execute(f"SELECT * FROM {tableName};").fetchall()
|
||||
data = cursor.execute(f"SELECT * FROM {tableName};").fetchall()
|
||||
|
||||
if len(data) == 0:
|
||||
print(f"🛑 You sneaky dev! Table {tableName} is empty!")
|
||||
|
||||
insert_to_pg(q, data)
|
||||
sq.close()
|
||||
cursor.close()
|
||||
|
||||
|
||||
def build_insert_query(schema, tableName, columns):
|
||||
|
@ -13,16 +13,20 @@ if len(sys.argv) < 2:
|
||||
sys.exit(1)
|
||||
lang = sys.argv[1]
|
||||
|
||||
assert os.getenv("OPENAI_API_KEY"), "OPENAI_API_KEY env var not set"
|
||||
|
||||
def load_language(lang):
|
||||
|
||||
def load_language(lang: str) -> dict:
|
||||
s = open(f"lnbits/static/i18n/{lang}.js", "rt").read()
|
||||
prefix = "window.localisation.%s = {\n" % lang
|
||||
assert s.startswith(prefix)
|
||||
s = s[len(prefix) - 2 :]
|
||||
return json5.loads(s)
|
||||
json = json5.loads(s)
|
||||
assert isinstance(json, dict)
|
||||
return json
|
||||
|
||||
|
||||
def save_language(lang, data):
|
||||
def save_language(lang: str, data) -> None:
|
||||
with open(f"lnbits/static/i18n/{lang}.js", "wt") as f:
|
||||
f.write("window.localisation.%s = {\n" % lang)
|
||||
row = 0
|
||||
@ -40,7 +44,7 @@ def save_language(lang, data):
|
||||
f.write("}\n")
|
||||
|
||||
|
||||
def string_variables_match(str1, str2):
|
||||
def string_variables_match(str1: str, str2: str) -> bool:
|
||||
pat = re.compile(r"%\{[a-z0-9_]*\}")
|
||||
m1 = re.findall(pat, str1)
|
||||
m2 = re.findall(pat, str2)
|
||||
@ -66,7 +70,6 @@ def translate_string(lang_from, lang_to, text):
|
||||
"kr": "Korean",
|
||||
"fi": "Finnish",
|
||||
}[lang_to]
|
||||
assert os.getenv("OPENAI_API_KEY"), "OPENAI_API_KEY env var not set"
|
||||
client = OpenAI()
|
||||
try:
|
||||
chat_completion = client.chat.completions.create(
|
||||
@ -82,6 +85,7 @@ def translate_string(lang_from, lang_to, text):
|
||||
],
|
||||
model="gpt-4-1106-preview", # aka GPT-4 Turbo
|
||||
)
|
||||
assert chat_completion.choices[0].message.content, "No response from GPT-4"
|
||||
translated = chat_completion.choices[0].message.content.strip()
|
||||
# return translated string only if variables were not broken
|
||||
if string_variables_match(text, translated):
|
||||
|
Loading…
Reference in New Issue
Block a user