2017-01-14 20:30:37 +01:00
|
|
|
import json
|
|
|
|
import logging
|
2019-12-16 14:54:22 +01:00
|
|
|
import os
|
2017-01-14 20:30:37 +01:00
|
|
|
import socket
|
2019-08-08 08:46:05 +02:00
|
|
|
import warnings
|
2021-01-02 14:28:31 +01:00
|
|
|
from contextlib import contextmanager
|
|
|
|
from decimal import Decimal
|
2020-06-15 12:52:42 +02:00
|
|
|
from json import JSONEncoder
|
2021-01-02 14:28:31 +01:00
|
|
|
from math import floor, log10
|
|
|
|
from typing import Optional, Union
|
2020-06-15 12:52:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _patched_default(self, obj):
|
|
|
|
return getattr(obj.__class__, "to_json", _patched_default.default)(obj)
|
|
|
|
|
|
|
|
|
|
|
|
def monkey_patch_json(patch=True):
|
|
|
|
is_patched = JSONEncoder.default == _patched_default
|
|
|
|
|
|
|
|
if patch and not is_patched:
|
|
|
|
_patched_default.default = JSONEncoder.default # Save unmodified
|
|
|
|
JSONEncoder.default = _patched_default # Replace it.
|
|
|
|
elif not patch and is_patched:
|
|
|
|
JSONEncoder.default = _patched_default.default
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-06-30 17:14:10 +02:00
|
|
|
class RpcError(ValueError):
|
2020-06-26 17:33:39 +02:00
|
|
|
def __init__(self, method: str, payload: dict, error: str):
|
|
|
|
super(ValueError, self).__init__(
|
|
|
|
"RPC call failed: method: {}, payload: {}, error: {}".format(
|
|
|
|
method, payload, error
|
|
|
|
)
|
|
|
|
)
|
2018-08-08 03:59:48 +02:00
|
|
|
|
|
|
|
self.method = method
|
|
|
|
self.payload = payload
|
2018-06-30 17:14:10 +02:00
|
|
|
self.error = error
|
|
|
|
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
class Millisatoshi:
|
|
|
|
"""
|
|
|
|
A subtype to represent thousandths of a satoshi.
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
Many JSON API fields are expressed in millisatoshis: these automatically
|
|
|
|
get turned into Millisatoshi types. Converts to and from int.
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
2020-06-26 17:33:39 +02:00
|
|
|
def __init__(self, v: Union[int, str, Decimal]):
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
|
|
|
Takes either a string ending in 'msat', 'sat', 'btc' or an integer.
|
|
|
|
"""
|
|
|
|
if isinstance(v, str):
|
|
|
|
if v.endswith("msat"):
|
2020-12-01 13:38:43 +01:00
|
|
|
parsed = Decimal(v[0:-4])
|
2019-02-21 03:40:33 +01:00
|
|
|
elif v.endswith("sat"):
|
2020-12-01 13:38:43 +01:00
|
|
|
parsed = Decimal(v[0:-3]) * 1000
|
2019-02-21 03:40:33 +01:00
|
|
|
elif v.endswith("btc"):
|
2020-12-01 13:38:43 +01:00
|
|
|
parsed = Decimal(v[0:-3]) * 1000 * 10**8
|
2019-04-16 13:35:55 +02:00
|
|
|
else:
|
2020-06-26 17:33:39 +02:00
|
|
|
raise TypeError(
|
|
|
|
"Millisatoshi must be string with msat/sat/btc suffix or"
|
|
|
|
" int"
|
|
|
|
)
|
2020-12-01 13:38:43 +01:00
|
|
|
if parsed != int(parsed):
|
2019-02-21 03:40:33 +01:00
|
|
|
raise ValueError("Millisatoshi must be a whole number")
|
2020-12-01 13:38:43 +01:00
|
|
|
self.millisatoshis = int(parsed)
|
2020-06-26 17:33:39 +02:00
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
elif isinstance(v, Millisatoshi):
|
|
|
|
self.millisatoshis = v.millisatoshis
|
2020-06-26 17:33:39 +02:00
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
elif int(v) == v:
|
2019-04-16 13:35:55 +02:00
|
|
|
self.millisatoshis = int(v)
|
2020-12-12 11:36:53 +01:00
|
|
|
|
|
|
|
elif isinstance(v, float):
|
|
|
|
raise TypeError("Millisatoshi by float is currently not supported")
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
else:
|
2020-06-26 17:33:39 +02:00
|
|
|
raise TypeError(
|
|
|
|
"Millisatoshi must be string with msat/sat/btc suffix or int"
|
|
|
|
)
|
2019-02-21 03:40:33 +01:00
|
|
|
|
|
|
|
if self.millisatoshis < 0:
|
|
|
|
raise ValueError("Millisatoshi must be >= 0")
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __repr__(self) -> str:
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
|
|
|
Appends the 'msat' as expected for this type.
|
|
|
|
"""
|
|
|
|
return str(self.millisatoshis) + "msat"
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def to_satoshi(self) -> Decimal:
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Return a Decimal representing the number of satoshis.
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
|
|
|
return Decimal(self.millisatoshis) / 1000
|
|
|
|
|
2020-12-15 21:08:10 +01:00
|
|
|
def to_whole_satoshi(self) -> int:
|
|
|
|
"""
|
|
|
|
Return an int respresenting the number of satoshis;
|
|
|
|
rounded up to the nearest satoshi
|
|
|
|
"""
|
|
|
|
return (self.millisatoshis + 999) // 1000
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def to_btc(self) -> Decimal:
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Return a Decimal representing the number of bitcoin.
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
|
|
|
return Decimal(self.millisatoshis) / 1000 / 10**8
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def to_satoshi_str(self) -> str:
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
|
|
|
Return a string of form 1234sat or 1234.567sat.
|
|
|
|
"""
|
|
|
|
if self.millisatoshis % 1000:
|
|
|
|
return '{:.3f}sat'.format(self.to_satoshi())
|
|
|
|
else:
|
|
|
|
return '{:.0f}sat'.format(self.to_satoshi())
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def to_btc_str(self) -> str:
|
2019-02-21 03:40:33 +01:00
|
|
|
"""
|
|
|
|
Return a string of form 12.34567890btc or 12.34567890123btc.
|
|
|
|
"""
|
|
|
|
if self.millisatoshis % 1000:
|
|
|
|
return '{:.11f}btc'.format(self.to_btc())
|
2019-05-08 01:34:18 +02:00
|
|
|
else:
|
|
|
|
return '{:.8f}btc'.format(self.to_btc())
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def to_approx_str(self, digits: int = 3) -> str:
|
2019-05-15 09:29:08 +02:00
|
|
|
"""Returns the shortmost string using common units representation.
|
|
|
|
|
|
|
|
Rounds to significant `digits`. Default: 3
|
|
|
|
"""
|
2020-06-26 17:33:39 +02:00
|
|
|
def round_to_n(x: int, n: int) -> float:
|
|
|
|
return round(x, -int(floor(log10(x))) + (n - 1))
|
|
|
|
result = self.to_satoshi_str()
|
2019-05-15 09:29:08 +02:00
|
|
|
|
|
|
|
# we try to increase digits to check if we did loose out on precision
|
|
|
|
# without gaining a shorter string, since this is a rarely used UI
|
|
|
|
# function, performance is not an issue. Adds at least one iteration.
|
|
|
|
while True:
|
|
|
|
# first round everything down to effective digits
|
|
|
|
amount_rounded = round_to_n(self.millisatoshis, digits)
|
|
|
|
# try different units and take shortest resulting normalized string
|
|
|
|
amounts_str = [
|
|
|
|
"%gbtc" % (amount_rounded / 1000 / 10**8),
|
|
|
|
"%gsat" % (amount_rounded / 1000),
|
|
|
|
"%gmsat" % (amount_rounded),
|
|
|
|
]
|
|
|
|
test_result = min(amounts_str, key=len)
|
|
|
|
|
|
|
|
# check result and do another run if necessary
|
|
|
|
if test_result == result:
|
|
|
|
return result
|
|
|
|
elif not result or len(test_result) <= len(result):
|
|
|
|
digits = digits + 1
|
|
|
|
result = test_result
|
|
|
|
else:
|
|
|
|
return result
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def to_json(self) -> str:
|
2019-02-21 03:40:33 +01:00
|
|
|
return self.__repr__()
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __int__(self) -> int:
|
2019-02-21 03:40:33 +01:00
|
|
|
return self.millisatoshis
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __lt__(self, other: 'Millisatoshi') -> bool:
|
2019-02-21 03:40:33 +01:00
|
|
|
return self.millisatoshis < other.millisatoshis
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __le__(self, other: 'Millisatoshi') -> bool:
|
2019-02-21 03:40:33 +01:00
|
|
|
return self.millisatoshis <= other.millisatoshis
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __eq__(self, other: object) -> bool:
|
|
|
|
if isinstance(other, Millisatoshi):
|
|
|
|
return self.millisatoshis == other.millisatoshis
|
|
|
|
elif isinstance(other, int):
|
|
|
|
return self.millisatoshis == other
|
|
|
|
else:
|
|
|
|
return False
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __gt__(self, other: 'Millisatoshi') -> bool:
|
2019-02-21 03:40:33 +01:00
|
|
|
return self.millisatoshis > other.millisatoshis
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __ge__(self, other: 'Millisatoshi') -> bool:
|
2019-02-21 03:40:33 +01:00
|
|
|
return self.millisatoshis >= other.millisatoshis
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __add__(self, other: 'Millisatoshi') -> 'Millisatoshi':
|
2019-02-21 03:40:33 +01:00
|
|
|
return Millisatoshi(int(self) + int(other))
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __sub__(self, other: 'Millisatoshi') -> 'Millisatoshi':
|
2019-02-21 03:40:33 +01:00
|
|
|
return Millisatoshi(int(self) - int(other))
|
|
|
|
|
2020-12-12 11:36:50 +01:00
|
|
|
def __mul__(self, other: Union[int, float]) -> 'Millisatoshi':
|
|
|
|
if isinstance(other, Millisatoshi):
|
|
|
|
raise TypeError("Resulting unit msat^2 is not supported")
|
2020-12-07 11:19:27 +01:00
|
|
|
return Millisatoshi(floor(self.millisatoshis * other))
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-12-12 11:36:50 +01:00
|
|
|
def __truediv__(self, other: Union[int, float, 'Millisatoshi']) -> Union['Millisatoshi', float]:
|
|
|
|
if isinstance(other, Millisatoshi):
|
|
|
|
return self.millisatoshis / other.millisatoshis
|
2020-12-07 11:19:27 +01:00
|
|
|
return Millisatoshi(floor(self.millisatoshis / other))
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-12-12 11:36:50 +01:00
|
|
|
def __floordiv__(self, other: Union[int, float, 'Millisatoshi']) -> Union['Millisatoshi', int]:
|
|
|
|
if isinstance(other, Millisatoshi):
|
|
|
|
return self.millisatoshis // other.millisatoshis
|
2020-12-07 11:19:27 +01:00
|
|
|
return Millisatoshi(floor(self.millisatoshis // float(other)))
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __mod__(self, other: Union[float, int]) -> 'Millisatoshi':
|
|
|
|
return Millisatoshi(int(self.millisatoshis % other))
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __radd__(self, other: 'Millisatoshi') -> 'Millisatoshi':
|
2019-09-03 12:35:23 +02:00
|
|
|
return Millisatoshi(int(self) + int(other))
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2019-12-16 14:54:22 +01:00
|
|
|
class UnixSocket(object):
|
|
|
|
"""A wrapper for socket.socket that is specialized to unix sockets.
|
|
|
|
|
|
|
|
Some OS implementations impose restrictions on the Unix sockets.
|
|
|
|
|
|
|
|
- On linux OSs the socket path must be shorter than the in-kernel buffer
|
|
|
|
size (somewhere around 100 bytes), thus long paths may end up failing
|
|
|
|
the `socket.connect` call.
|
|
|
|
|
|
|
|
This is a small wrapper that tries to work around these limitations.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __init__(self, path: str):
|
2019-12-16 14:54:22 +01:00
|
|
|
self.path = path
|
2020-06-26 17:33:39 +02:00
|
|
|
self.sock: Optional[socket.SocketType] = None
|
2019-12-16 14:54:22 +01:00
|
|
|
self.connect()
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def connect(self) -> None:
|
2019-12-16 14:54:22 +01:00
|
|
|
try:
|
|
|
|
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
2020-06-26 17:33:39 +02:00
|
|
|
self.sock.connect(self.path)
|
2019-12-16 14:54:22 +01:00
|
|
|
except OSError as e:
|
2020-06-26 17:33:39 +02:00
|
|
|
self.close()
|
2019-12-16 14:54:22 +01:00
|
|
|
|
|
|
|
if (e.args[0] == "AF_UNIX path too long" and os.uname()[0] == "Linux"):
|
|
|
|
# If this is a Linux system we may be able to work around this
|
|
|
|
# issue by opening our directory and using `/proc/self/fd/` to
|
|
|
|
# get a short alias for the socket file.
|
|
|
|
#
|
|
|
|
# This was heavily inspired by the Open vSwitch code see here:
|
|
|
|
# https://github.com/openvswitch/ovs/blob/master/python/ovs/socket_util.py
|
|
|
|
|
|
|
|
dirname = os.path.dirname(self.path)
|
|
|
|
basename = os.path.basename(self.path)
|
|
|
|
|
|
|
|
# Open an fd to our home directory, that we can then find
|
|
|
|
# through `/proc/self/fd` and access the contents.
|
|
|
|
dirfd = os.open(dirname, os.O_DIRECTORY | os.O_RDONLY)
|
|
|
|
short_path = "/proc/self/fd/%d/%s" % (dirfd, basename)
|
|
|
|
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
2020-06-26 17:33:39 +02:00
|
|
|
self.sock.connect(short_path)
|
2019-12-16 14:54:22 +01:00
|
|
|
else:
|
|
|
|
# There is no good way to recover from this.
|
|
|
|
raise
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def close(self) -> None:
|
2019-12-16 14:54:22 +01:00
|
|
|
if self.sock is not None:
|
|
|
|
self.sock.close()
|
|
|
|
self.sock = None
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def sendall(self, b: bytes) -> None:
|
2019-12-16 14:54:22 +01:00
|
|
|
if self.sock is None:
|
|
|
|
raise socket.error("not connected")
|
|
|
|
|
|
|
|
self.sock.sendall(b)
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def recv(self, length: int) -> bytes:
|
2019-12-16 14:54:22 +01:00
|
|
|
if self.sock is None:
|
|
|
|
raise socket.error("not connected")
|
|
|
|
|
|
|
|
return self.sock.recv(length)
|
|
|
|
|
2020-06-26 17:33:39 +02:00
|
|
|
def __del__(self) -> None:
|
2019-12-16 14:54:22 +01:00
|
|
|
self.close()
|
|
|
|
|
|
|
|
|
2017-01-17 23:26:00 +01:00
|
|
|
class UnixDomainSocketRpc(object):
|
2019-02-25 05:15:56 +01:00
|
|
|
def __init__(self, socket_path, executor=None, logger=logging, encoder_cls=json.JSONEncoder, decoder=json.JSONDecoder()):
|
2017-01-14 20:30:37 +01:00
|
|
|
self.socket_path = socket_path
|
2019-02-25 05:15:56 +01:00
|
|
|
self.encoder_cls = encoder_cls
|
2019-02-21 03:40:33 +01:00
|
|
|
self.decoder = decoder
|
2017-01-14 20:30:37 +01:00
|
|
|
self.executor = executor
|
2018-02-14 14:12:29 +01:00
|
|
|
self.logger = logger
|
2021-01-02 14:28:31 +01:00
|
|
|
self._notify = None
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2021-01-02 14:28:31 +01:00
|
|
|
self.next_id = 1
|
2018-12-05 15:35:02 +01:00
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
def _writeobj(self, sock, obj):
|
2019-09-02 23:29:56 +02:00
|
|
|
s = json.dumps(obj, ensure_ascii=False, cls=self.encoder_cls)
|
2017-01-14 20:30:37 +01:00
|
|
|
sock.sendall(bytearray(s, 'UTF-8'))
|
|
|
|
|
2018-11-18 10:43:28 +01:00
|
|
|
def _readobj(self, sock, buff=b''):
|
2020-05-08 17:30:19 +02:00
|
|
|
"""Read a JSON object, starting with buff; returns object and any buffer left over."""
|
2017-01-14 20:30:37 +01:00
|
|
|
while True:
|
2018-11-18 10:43:39 +01:00
|
|
|
parts = buff.split(b'\n\n', 1)
|
|
|
|
if len(parts) == 1:
|
|
|
|
# Didn't read enough.
|
2018-12-17 23:35:26 +01:00
|
|
|
b = sock.recv(max(1024, len(buff)))
|
2018-11-18 10:43:39 +01:00
|
|
|
buff += b
|
|
|
|
if len(b) == 0:
|
|
|
|
return {'error': 'Connection to RPC server lost.'}, buff
|
|
|
|
else:
|
|
|
|
buff = parts[1]
|
|
|
|
obj, _ = self.decoder.raw_decode(parts[0].decode("UTF-8"))
|
|
|
|
return obj, buff
|
2018-11-18 10:43:28 +01:00
|
|
|
|
2017-04-12 20:08:48 +02:00
|
|
|
def __getattr__(self, name):
|
2020-05-08 17:30:19 +02:00
|
|
|
"""Intercept any call that is not explicitly defined and call @call.
|
2017-04-12 20:08:48 +02:00
|
|
|
|
|
|
|
We might still want to define the actual methods in the subclasses for
|
|
|
|
documentation purposes.
|
|
|
|
"""
|
|
|
|
name = name.replace('_', '-')
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-12-05 15:33:59 +01:00
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
if len(args) != 0 and len(kwargs) != 0:
|
|
|
|
raise RpcError("Cannot mix positional and non-positional arguments")
|
|
|
|
elif len(args) != 0:
|
|
|
|
return self.call(name, payload=args)
|
|
|
|
else:
|
|
|
|
return self.call(name, payload=kwargs)
|
2017-04-12 20:08:48 +02:00
|
|
|
return wrapper
|
|
|
|
|
2018-02-14 14:17:31 +01:00
|
|
|
def call(self, method, payload=None):
|
2018-02-14 14:12:29 +01:00
|
|
|
self.logger.debug("Calling %s with payload %r", method, payload)
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2018-02-15 11:27:53 +01:00
|
|
|
if payload is None:
|
|
|
|
payload = {}
|
|
|
|
# Filter out arguments that are None
|
2018-12-05 15:33:59 +01:00
|
|
|
if isinstance(payload, dict):
|
|
|
|
payload = {k: v for k, v in payload.items() if v is not None}
|
2018-02-15 11:27:53 +01:00
|
|
|
|
2018-11-18 10:43:28 +01:00
|
|
|
# FIXME: we open a new socket for every readobj call...
|
2019-12-16 14:54:22 +01:00
|
|
|
sock = UnixSocket(self.socket_path)
|
2020-10-12 07:33:50 +02:00
|
|
|
this_id = self.next_id
|
2021-01-02 14:28:31 +01:00
|
|
|
self.next_id += 0
|
|
|
|
buf = b''
|
|
|
|
|
|
|
|
if self._notify is not None:
|
|
|
|
# Opt into the notifications support
|
|
|
|
self._writeobj(sock, {
|
|
|
|
"jsonrpc": "2.0",
|
|
|
|
"method": "notifications",
|
|
|
|
"id": 0,
|
|
|
|
"params": {
|
|
|
|
"enable": True
|
|
|
|
},
|
|
|
|
})
|
2021-05-26 07:47:01 +02:00
|
|
|
# FIXME: Notification schema support?
|
2021-01-02 14:28:31 +01:00
|
|
|
_, buf = self._readobj(sock, buf)
|
|
|
|
|
|
|
|
request = {
|
2020-01-27 01:25:13 +01:00
|
|
|
"jsonrpc": "2.0",
|
2017-01-14 20:30:37 +01:00
|
|
|
"method": method,
|
2018-02-15 11:27:53 +01:00
|
|
|
"params": payload,
|
2020-10-12 07:33:50 +02:00
|
|
|
"id": this_id,
|
2021-01-02 14:28:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
self._writeobj(sock, request)
|
2020-10-12 07:33:50 +02:00
|
|
|
while True:
|
|
|
|
resp, buf = self._readobj(sock, buf)
|
2021-01-02 14:28:31 +01:00
|
|
|
id = resp.get("id", None)
|
|
|
|
meth = resp.get("method", None)
|
|
|
|
|
|
|
|
if meth == 'message' and self._notify is not None:
|
|
|
|
n = resp['params']
|
|
|
|
self._notify(
|
|
|
|
message=n.get('message', None),
|
|
|
|
progress=n.get('progress', None),
|
|
|
|
request=request
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if meth is None or id is None:
|
2020-10-12 07:33:50 +02:00
|
|
|
break
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2018-02-14 14:12:29 +01:00
|
|
|
self.logger.debug("Received response for %s call: %r", method, resp)
|
2020-10-12 07:33:50 +02:00
|
|
|
if 'id' in resp and resp['id'] != this_id:
|
|
|
|
raise ValueError("Malformed response, id is not {}: {}.".format(this_id, resp))
|
|
|
|
sock.close()
|
|
|
|
|
2019-08-30 22:10:32 +02:00
|
|
|
if not isinstance(resp, dict):
|
|
|
|
raise ValueError("Malformed response, response is not a dictionary %s." % resp)
|
|
|
|
elif "error" in resp:
|
2018-08-08 03:59:48 +02:00
|
|
|
raise RpcError(method, payload, resp['error'])
|
2018-02-14 14:17:31 +01:00
|
|
|
elif "result" not in resp:
|
|
|
|
raise ValueError("Malformed response, \"result\" missing.")
|
|
|
|
return resp["result"]
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2021-01-02 14:28:31 +01:00
|
|
|
@contextmanager
|
|
|
|
def notify(self, fn):
|
|
|
|
"""Register a notification callback to use for a set of RPC calls.
|
|
|
|
|
|
|
|
This is a context manager and should be used like this:
|
|
|
|
|
|
|
|
```python
|
|
|
|
def fn(message, progress, request, **kwargs):
|
|
|
|
print(message)
|
|
|
|
|
|
|
|
with rpc.notify(fn):
|
|
|
|
rpc.somemethod()
|
|
|
|
```
|
|
|
|
|
|
|
|
The `fn` function will be called once for each notification
|
|
|
|
the is sent by `somemethod`. This is a context manager,
|
|
|
|
meaning that multiple commands can share the same context, and
|
|
|
|
the same notification function.
|
|
|
|
|
|
|
|
"""
|
|
|
|
old = self._notify
|
|
|
|
self._notify = fn
|
|
|
|
yield
|
|
|
|
self._notify = old
|
|
|
|
|
2017-01-17 23:26:00 +01:00
|
|
|
|
|
|
|
class LightningRpc(UnixDomainSocketRpc):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
|
|
|
RPC client for the `lightningd` daemon.
|
2017-01-17 23:26:00 +01:00
|
|
|
|
|
|
|
This RPC client connects to the `lightningd` daemon through a unix
|
|
|
|
domain socket and passes calls through. Since some of the calls
|
|
|
|
are blocking, the corresponding python methods include an `async`
|
|
|
|
keyword argument. If `async` is set to true then the method
|
|
|
|
returns a future immediately, instead of blocking indefinitely.
|
|
|
|
|
|
|
|
This implementation is thread safe in that it locks the socket
|
|
|
|
between calls, but it does not (yet) support concurrent calls.
|
|
|
|
"""
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
class LightningJSONEncoder(json.JSONEncoder):
|
|
|
|
def default(self, o):
|
|
|
|
try:
|
|
|
|
return o.to_json()
|
|
|
|
except NameError:
|
|
|
|
pass
|
|
|
|
return json.JSONEncoder.default(self, o)
|
|
|
|
|
2019-02-25 05:15:56 +01:00
|
|
|
class LightningJSONDecoder(json.JSONDecoder):
|
2020-06-15 12:52:42 +02:00
|
|
|
def __init__(self, *, object_hook=None, parse_float=None,
|
|
|
|
parse_int=None, parse_constant=None,
|
|
|
|
strict=True, object_pairs_hook=None,
|
|
|
|
patch_json=True):
|
2019-02-25 05:15:56 +01:00
|
|
|
self.object_hook_next = object_hook
|
|
|
|
super().__init__(object_hook=self.millisatoshi_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, strict=strict, object_pairs_hook=object_pairs_hook)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def replace_amounts(obj):
|
|
|
|
"""
|
|
|
|
Recursively replace _msat fields with appropriate values with Millisatoshi.
|
|
|
|
"""
|
|
|
|
if isinstance(obj, dict):
|
|
|
|
for k, v in obj.items():
|
|
|
|
if k.endswith('msat'):
|
|
|
|
if isinstance(v, str) and v.endswith('msat'):
|
|
|
|
obj[k] = Millisatoshi(v)
|
|
|
|
# Special case for array of msat values
|
|
|
|
elif isinstance(v, list) and all(isinstance(e, str) and e.endswith('msat') for e in v):
|
|
|
|
obj[k] = [Millisatoshi(e) for e in v]
|
|
|
|
else:
|
|
|
|
obj[k] = LightningRpc.LightningJSONDecoder.replace_amounts(v)
|
|
|
|
elif isinstance(obj, list):
|
|
|
|
obj = [LightningRpc.LightningJSONDecoder.replace_amounts(e) for e in obj]
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def millisatoshi_hook(self, obj):
|
|
|
|
obj = LightningRpc.LightningJSONDecoder.replace_amounts(obj)
|
|
|
|
if self.object_hook_next:
|
|
|
|
obj = self.object_hook_next(obj)
|
|
|
|
return obj
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2020-06-15 12:52:42 +02:00
|
|
|
def __init__(self, socket_path, executor=None, logger=logging,
|
|
|
|
patch_json=True):
|
|
|
|
super().__init__(
|
|
|
|
socket_path,
|
|
|
|
executor,
|
|
|
|
logger,
|
|
|
|
self.LightningJSONEncoder,
|
|
|
|
self.LightningJSONDecoder()
|
|
|
|
)
|
|
|
|
|
|
|
|
if patch_json:
|
|
|
|
monkey_patch_json(patch=True)
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2021-02-02 06:16:20 +01:00
|
|
|
def addgossip(self, message):
|
|
|
|
"""
|
|
|
|
Inject this (hex-encoded) gossip message.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"message": message,
|
|
|
|
}
|
|
|
|
return self.call("addgossip", payload)
|
|
|
|
|
2019-05-13 17:59:29 +02:00
|
|
|
def autocleaninvoice(self, cycle_seconds=None, expired_by=None):
|
|
|
|
"""
|
|
|
|
Sets up automatic cleaning of expired invoices. {cycle_seconds} sets
|
|
|
|
the cleaning frequency in seconds (defaults to 3600) and {expired_by}
|
|
|
|
sets the minimum time an invoice should have been expired for to be
|
|
|
|
cleaned in seconds (defaults to 86400).
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"cycle_seconds": cycle_seconds,
|
|
|
|
"expired_by": expired_by
|
|
|
|
}
|
|
|
|
return self.call("autocleaninvoice", payload)
|
|
|
|
|
|
|
|
def check(self, command_to_check, **kwargs):
|
|
|
|
"""
|
|
|
|
Checks if a command is valid without running it.
|
|
|
|
"""
|
|
|
|
payload = {"command_to_check": command_to_check}
|
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
|
|
|
return self.call("check", payload)
|
|
|
|
|
2021-07-27 12:10:31 +02:00
|
|
|
def close(self, peer_id, unilateraltimeout=None, destination=None,
|
2021-09-08 06:41:46 +02:00
|
|
|
fee_negotiation_step=None, force_lease_closed=None, feerange=None):
|
2018-01-28 12:12:37 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Close the channel with peer {id}, forcing a unilateral
|
2019-09-29 10:53:26 +02:00
|
|
|
close after {unilateraltimeout} seconds if non-zero, and
|
|
|
|
the to-local output will be sent to {destination}.
|
2021-07-02 22:38:59 +02:00
|
|
|
|
|
|
|
If channel funds have been leased to the peer and the
|
|
|
|
lease has not yet expired, you can force a close with
|
|
|
|
{force_lease_closed}. Note that your funds will still be
|
|
|
|
locked until the lease expires.
|
2018-01-28 12:12:37 +01:00
|
|
|
"""
|
2021-04-07 04:26:32 +02:00
|
|
|
payload = {
|
|
|
|
"id": peer_id,
|
|
|
|
"unilateraltimeout": unilateraltimeout,
|
|
|
|
"destination": destination,
|
2021-07-02 22:38:59 +02:00
|
|
|
"fee_negotiation_step": fee_negotiation_step,
|
|
|
|
"force_lease_closed": force_lease_closed,
|
2021-09-08 06:41:46 +02:00
|
|
|
"feerange": feerange,
|
2021-04-07 04:26:32 +02:00
|
|
|
}
|
|
|
|
return self.call("close", payload)
|
2018-01-28 12:12:37 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def connect(self, peer_id, host=None, port=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Connect to {peer_id} at {host} and {port}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id,
|
|
|
|
"host": host,
|
|
|
|
"port": port
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("connect", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def decodepay(self, bolt11, description=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Decode {bolt11}, using {description} if necessary.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"bolt11": bolt11,
|
|
|
|
"description": description
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("decodepay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def delexpiredinvoice(self, maxexpirytime=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Delete all invoices that have expired on or before the given {maxexpirytime}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"maxexpirytime": maxexpirytime
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("delexpiredinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def delinvoice(self, label, status):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Delete unpaid invoice {label} with {status}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"label": label,
|
2019-05-13 15:16:35 +02:00
|
|
|
"status": status
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("delinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_crash(self):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Crash lightningd by calling fatal().
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
2019-08-05 12:58:52 +02:00
|
|
|
payload = {
|
|
|
|
"subcommand": "crash"
|
|
|
|
}
|
|
|
|
return self.call("dev", payload)
|
2019-05-13 15:16:35 +02:00
|
|
|
|
|
|
|
def dev_fail(self, peer_id):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Fail with peer {peer_id}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("dev-fail", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_forget_channel(self, peerid, force=False):
|
2020-05-08 17:30:19 +02:00
|
|
|
""" Forget the channel with id=peerid.
|
2017-01-17 23:26:00 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call(
|
|
|
|
"dev-forget-channel",
|
|
|
|
payload={"id": peerid, "force": force}
|
|
|
|
)
|
|
|
|
|
|
|
|
def dev_memdump(self):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show memory objects currently in use.
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
return self.call("dev-memdump")
|
|
|
|
|
|
|
|
def dev_memleak(self):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show unreferenced memory objects.
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
return self.call("dev-memleak")
|
|
|
|
|
2019-11-04 15:59:01 +01:00
|
|
|
def dev_pay(self, bolt11, msatoshi=None, label=None, riskfactor=None,
|
2021-04-07 04:27:03 +02:00
|
|
|
maxfeepercent=None, retry_for=None,
|
2019-11-04 15:59:01 +01:00
|
|
|
maxdelay=None, exemptfee=None, use_shadow=True):
|
|
|
|
"""
|
|
|
|
A developer version of `pay`, with the possibility to deactivate
|
|
|
|
shadow routing (used for testing).
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"riskfactor": riskfactor,
|
|
|
|
"maxfeepercent": maxfeepercent,
|
|
|
|
"retry_for": retry_for,
|
|
|
|
"maxdelay": maxdelay,
|
|
|
|
"exemptfee": exemptfee,
|
|
|
|
"use_shadow": use_shadow,
|
|
|
|
}
|
|
|
|
return self.call("pay", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_reenable_commit(self, peer_id):
|
2019-05-11 12:45:02 +02:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Re-enable the commit timer on peer {id}.
|
2019-05-11 12:45:02 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id
|
2019-05-11 12:45:02 +02:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("dev-reenable-commit", payload)
|
2019-05-11 12:45:02 +02:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_rescan_outputs(self):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Synchronize the state of our funds with bitcoind.
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
return self.call("dev-rescan-outputs")
|
|
|
|
|
|
|
|
def dev_rhash(self, secret):
|
|
|
|
"""
|
|
|
|
Show SHA256 of {secret}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-08-05 12:58:52 +02:00
|
|
|
"subcommand": "rhash",
|
2019-05-13 15:16:35 +02:00
|
|
|
"secret": secret
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-08-05 12:58:52 +02:00
|
|
|
return self.call("dev", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_sign_last_tx(self, peer_id):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Sign and show the last commitment transaction with peer {id}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("dev-sign-last-tx", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-08-05 12:58:52 +02:00
|
|
|
def dev_slowcmd(self, msec=None):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Torture test for slow commands, optional {msec}.
|
2019-08-05 12:58:52 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "slowcmd",
|
|
|
|
"msec": msec
|
|
|
|
}
|
|
|
|
return self.call("dev", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def disconnect(self, peer_id, force=False):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Disconnect from peer with {peer_id}, optional {force} even if has active channel.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id,
|
|
|
|
"force": force,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("disconnect", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def feerates(self, style, urgent=None, normal=None, slow=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Supply feerate estimates manually.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-02-04 03:59:26 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"style": style,
|
|
|
|
"urgent": urgent,
|
|
|
|
"normal": normal,
|
|
|
|
"slow": slow
|
2019-02-04 03:59:26 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("feerates", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-07-02 23:19:47 +02:00
|
|
|
def fundchannel(self, node_id, amount, feerate=None, announce=True, minconf=None, utxos=None, push_msat=None, close_to=None, request_amt=None, compact_lease=None):
|
2019-09-19 09:37:05 +02:00
|
|
|
"""
|
|
|
|
Fund channel with {id} using {amount} satoshis with feerate
|
|
|
|
of {feerate} (uses default feerate if unset).
|
|
|
|
If {announce} is False, don't send channel announcements.
|
|
|
|
Only select outputs with {minconf} confirmations.
|
|
|
|
If {utxos} is specified (as a list of 'txid:vout' strings),
|
|
|
|
fund a channel from these specifics utxos.
|
2020-10-15 04:37:27 +02:00
|
|
|
{close_to} is a valid Bitcoin address.
|
2021-06-08 23:55:06 +02:00
|
|
|
|
|
|
|
{request_amt} is the lease amount to request from the peer. Only
|
|
|
|
valid if peer is advertising a liquidity ad + supports v2 channel opens
|
|
|
|
(dual-funding)
|
2019-09-19 09:37:05 +02:00
|
|
|
"""
|
2019-09-19 20:50:58 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
2021-04-07 04:28:03 +02:00
|
|
|
"amount": amount,
|
2019-09-19 20:50:58 +02:00
|
|
|
"feerate": feerate,
|
|
|
|
"announce": announce,
|
2021-04-07 04:28:03 +02:00
|
|
|
"minconf": minconf,
|
|
|
|
"utxos": utxos,
|
|
|
|
"push_msat": push_msat,
|
|
|
|
"close_to": close_to,
|
2021-06-08 23:55:06 +02:00
|
|
|
"request_amt": request_amt,
|
2021-07-02 23:19:47 +02:00
|
|
|
"compact_lease": compact_lease,
|
2019-09-19 20:50:58 +02:00
|
|
|
}
|
2021-04-07 04:28:03 +02:00
|
|
|
return self.call("fundchannel", payload)
|
2019-09-19 20:50:58 +02:00
|
|
|
|
2021-04-07 04:28:03 +02:00
|
|
|
def fundchannel_start(self, node_id, amount, feerate=None, announce=True, close_to=None):
|
2019-05-23 01:27:56 +02:00
|
|
|
"""
|
2019-09-19 20:50:58 +02:00
|
|
|
Start channel funding with {id} for {amount} satoshis
|
2019-05-23 01:27:56 +02:00
|
|
|
with feerate of {feerate} (uses default feerate if unset).
|
|
|
|
If {announce} is False, don't send channel announcements.
|
|
|
|
Returns a Bech32 {funding_address} for an external wallet
|
|
|
|
to create a funding transaction for. Requires a call to
|
2019-06-05 02:26:39 +02:00
|
|
|
'fundchannel_complete' to complete channel establishment
|
2019-05-23 01:27:56 +02:00
|
|
|
with peer.
|
|
|
|
"""
|
2021-04-07 04:28:03 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"amount": amount,
|
|
|
|
"feerate": feerate,
|
|
|
|
"announce": announce,
|
|
|
|
"close_to": close_to,
|
|
|
|
}
|
|
|
|
return self.call("fundchannel_start", payload)
|
2019-05-23 01:27:56 +02:00
|
|
|
|
2019-05-31 23:57:04 +02:00
|
|
|
def fundchannel_cancel(self, node_id):
|
|
|
|
"""
|
|
|
|
Cancel a 'started' fundchannel with node {id}.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
}
|
|
|
|
return self.call("fundchannel_cancel", payload)
|
|
|
|
|
2021-03-15 05:27:59 +01:00
|
|
|
def _deprecated_fundchannel_complete(self, node_id, funding_txid, funding_txout):
|
|
|
|
warnings.warn("fundchannel_complete: funding_txid & funding_txout replaced by psbt: expect removal"
|
|
|
|
" in Mid-2021",
|
|
|
|
DeprecationWarning)
|
|
|
|
|
2019-05-25 02:40:00 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"txid": funding_txid,
|
2019-05-31 23:37:05 +02:00
|
|
|
"txout": funding_txout,
|
2019-05-25 02:40:00 +02:00
|
|
|
}
|
2019-06-05 02:26:39 +02:00
|
|
|
return self.call("fundchannel_complete", payload)
|
2019-05-25 02:40:00 +02:00
|
|
|
|
2021-03-15 05:27:59 +01:00
|
|
|
def fundchannel_complete(self, node_id, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Complete channel establishment with {id}, using {psbt}.
|
|
|
|
"""
|
|
|
|
if 'txid' in kwargs or len(args) == 2:
|
|
|
|
return self._deprecated_fundchannel_complete(node_id, *args, **kwargs)
|
|
|
|
|
|
|
|
def _fundchannel_complete(node_id, psbt):
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"psbt": psbt,
|
|
|
|
}
|
|
|
|
return self.call("fundchannel_complete", payload)
|
|
|
|
|
|
|
|
return _fundchannel_complete(node_id, *args, **kwargs)
|
|
|
|
|
2019-05-13 17:59:29 +02:00
|
|
|
def getinfo(self):
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show information about this node.
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
return self.call("getinfo")
|
2017-01-17 23:26:00 +01:00
|
|
|
|
|
|
|
def getlog(self, level=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show logs, with optional log {level} (info|unusual|debug|io).
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"level": level
|
|
|
|
}
|
|
|
|
return self.call("getlog", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def getpeer(self, peer_id, level=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show peer with {peer_id}, if {level} is set, include {log}s.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id,
|
|
|
|
"level": level
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
res = self.call("listpeers", payload)
|
|
|
|
return res.get("peers") and res["peers"][0] or None
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def getroute(self, node_id, msatoshi, riskfactor, cltv=9, fromid=None, fuzzpercent=None, exclude=[], maxhops=20):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show route to {id} for {msatoshi}, using {riskfactor} and optional
|
|
|
|
{cltv} (default 9). If specified search from {fromid} otherwise use
|
|
|
|
this node as source. Randomize the route with up to {fuzzpercent}
|
|
|
|
(0.0 -> 100.0, default 5.0). {exclude} is an optional array of
|
2019-08-31 15:57:08 +02:00
|
|
|
scid/direction or node-id to exclude. Limit the number of hops in the
|
|
|
|
route to {maxhops}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"riskfactor": riskfactor,
|
|
|
|
"cltv": cltv,
|
|
|
|
"fromid": fromid,
|
|
|
|
"fuzzpercent": fuzzpercent,
|
|
|
|
"exclude": exclude,
|
|
|
|
"maxhops": maxhops
|
|
|
|
}
|
2021-06-14 23:07:39 +02:00
|
|
|
return self.call("getroute", payload)
|
2017-01-17 23:26:00 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def help(self, command=None):
|
2018-06-04 06:23:25 +02:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show available commands, or just {command} if supplied.
|
2018-06-04 06:23:25 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"command": command,
|
2018-06-04 06:23:25 +02:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("help", payload)
|
2018-06-04 06:23:25 +02:00
|
|
|
|
2021-01-08 05:43:50 +01:00
|
|
|
def invoice(self, msatoshi, label, description, expiry=None, fallbacks=None, preimage=None, exposeprivatechannels=None, cltv=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Create an invoice for {msatoshi} with {label} and {description} with
|
2020-05-08 17:30:19 +02:00
|
|
|
optional {expiry} seconds (default 1 week).
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"description": description,
|
|
|
|
"expiry": expiry,
|
|
|
|
"fallbacks": fallbacks,
|
|
|
|
"preimage": preimage,
|
2021-01-08 05:43:50 +01:00
|
|
|
"exposeprivatechannels": exposeprivatechannels,
|
|
|
|
"cltv": cltv,
|
2019-05-13 15:16:35 +02:00
|
|
|
}
|
|
|
|
return self.call("invoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-06-22 15:41:54 +02:00
|
|
|
def listchannels(self, short_channel_id=None, source=None, destination=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2021-06-22 15:41:54 +02:00
|
|
|
Show all known channels or filter by optional
|
|
|
|
{short_channel_id}, {source} or {destination}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"short_channel_id": short_channel_id,
|
2021-06-22 15:41:54 +02:00
|
|
|
"source": source,
|
|
|
|
"destination": destination
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2021-06-14 23:07:39 +02:00
|
|
|
return self.call("listchannels", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-18 19:36:08 +02:00
|
|
|
def listconfigs(self, config=None):
|
2020-05-08 17:30:19 +02:00
|
|
|
"""List this node's config.
|
2019-05-18 19:36:08 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"config": config
|
|
|
|
}
|
|
|
|
return self.call("listconfigs", payload)
|
|
|
|
|
2021-01-26 09:27:25 +01:00
|
|
|
def listforwards(self, status=None, in_channel=None, out_channel=None):
|
|
|
|
"""List all forwarded payments and their information matching
|
|
|
|
forward {status}, {in_channel} and {out_channel}.
|
2018-03-04 23:37:50 +01:00
|
|
|
"""
|
2021-01-26 09:27:25 +01:00
|
|
|
payload = {
|
|
|
|
"status": status,
|
|
|
|
"in_channel": in_channel,
|
|
|
|
"out_channel": out_channel,
|
|
|
|
}
|
|
|
|
return self.call("listforwards", payload)
|
2019-05-13 15:16:35 +02:00
|
|
|
|
2021-03-09 10:20:13 +01:00
|
|
|
def listfunds(self, spent=None):
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
2020-12-22 10:33:42 +01:00
|
|
|
Show funds available for opening channels
|
|
|
|
or both unspent and spent funds if {spent} is True.
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
2020-12-22 10:33:42 +01:00
|
|
|
|
|
|
|
payload = {
|
|
|
|
"spent": spent
|
|
|
|
}
|
|
|
|
return self.call("listfunds", payload)
|
2019-05-13 15:16:35 +02:00
|
|
|
|
2019-06-22 02:36:29 +02:00
|
|
|
def listtransactions(self):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show wallet history.
|
2019-06-22 02:36:29 +02:00
|
|
|
"""
|
|
|
|
return self.call("listtransactions")
|
|
|
|
|
2021-07-02 02:11:35 +02:00
|
|
|
def listinvoices(self, label=None, payment_hash=None, invstring=None, offer_id=None):
|
2021-01-05 18:58:41 +01:00
|
|
|
"""Query invoices
|
|
|
|
|
2021-07-02 02:11:35 +02:00
|
|
|
Show invoice matching {label}, {payment_hash}, {invstring} or {offer_id}
|
|
|
|
(or all, if no filters are present).
|
2021-01-05 18:58:41 +01:00
|
|
|
|
2018-03-04 23:37:50 +01:00
|
|
|
"""
|
|
|
|
payload = {
|
2021-01-05 18:58:41 +01:00
|
|
|
"label": label,
|
|
|
|
"payment_hash": payment_hash,
|
|
|
|
"invstring": invstring,
|
2021-07-02 02:11:35 +02:00
|
|
|
"offer_id": offer_id,
|
2018-03-04 23:37:50 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("listinvoices", payload)
|
2018-03-04 23:37:50 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def listnodes(self, node_id=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show all nodes in our local network view, filter on node {id}
|
2020-05-08 17:30:19 +02:00
|
|
|
if provided.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": node_id
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2021-06-14 23:07:39 +02:00
|
|
|
return self.call("listnodes", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-07-20 23:24:36 +02:00
|
|
|
def listpays(self, bolt11=None, payment_hash=None, status=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-01-28 14:46:52 +01:00
|
|
|
Show outgoing payments, regarding {bolt11} or {payment_hash} if set
|
2021-07-20 23:24:36 +02:00
|
|
|
Can only specify one of {bolt11} or {payment_hash}. It is possible
|
2021-09-06 12:08:07 +02:00
|
|
|
filter the payments by {status}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
assert not (bolt11 and payment_hash)
|
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11,
|
2021-07-20 23:24:36 +02:00
|
|
|
"payment_hash": payment_hash,
|
|
|
|
"status": status
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2020-10-08 20:46:46 +02:00
|
|
|
return self.call("listpays", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-02-14 14:17:31 +01:00
|
|
|
def listpeers(self, peerid=None, level=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Show current peers, if {level} is set, include {log}s".
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"id": peerid,
|
|
|
|
"level": level,
|
|
|
|
}
|
|
|
|
return self.call("listpeers", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-09-06 10:51:21 +02:00
|
|
|
def listsendpays(self, bolt11=None, payment_hash=None, status=None):
|
2020-05-08 17:30:19 +02:00
|
|
|
"""Show all sendpays results, or only for `bolt11` or `payment_hash`."""
|
2019-05-18 19:36:08 +02:00
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11,
|
2021-09-06 10:51:21 +02:00
|
|
|
"payment_hash": payment_hash,
|
|
|
|
"status": status
|
2019-05-18 19:36:08 +02:00
|
|
|
}
|
|
|
|
return self.call("listsendpays", payload)
|
|
|
|
|
2020-09-09 05:14:37 +02:00
|
|
|
def multifundchannel(self, destinations, feerate=None, minconf=None, utxos=None, minchannels=None, **kwargs):
|
2020-09-09 05:11:37 +02:00
|
|
|
"""
|
|
|
|
Fund channels to an array of {destinations},
|
|
|
|
each entry of which is a dict of node {id}
|
|
|
|
and {amount} to fund, and optionally whether
|
|
|
|
to {announce} and how much {push_msat} to
|
|
|
|
give outright to the node.
|
|
|
|
You may optionally specify {feerate},
|
|
|
|
{minconf} depth, and the {utxos} set to use
|
|
|
|
for the single transaction that funds all
|
|
|
|
the channels.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"destinations": destinations,
|
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
2020-09-09 05:14:37 +02:00
|
|
|
"utxos": utxos,
|
|
|
|
"minchannels": minchannels,
|
2020-09-09 05:11:37 +02:00
|
|
|
}
|
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
|
|
|
return self.call("multifundchannel", payload)
|
|
|
|
|
2020-09-09 12:26:14 +02:00
|
|
|
def multiwithdraw(self, outputs, feerate=None, minconf=None, utxos=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Send to {outputs}
|
|
|
|
via Bitcoin transaction. Only select outputs
|
|
|
|
with {minconf} confirmations.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"outputs": outputs,
|
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
|
|
|
"utxos": utxos,
|
|
|
|
}
|
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
|
|
|
return self.call("multiwithdraw", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def newaddr(self, addresstype=None):
|
|
|
|
"""Get a new address of type {addresstype} of the internal wallet.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("newaddr", {"addresstype": addresstype})
|
|
|
|
|
2019-11-04 15:59:01 +01:00
|
|
|
def pay(self, bolt11, msatoshi=None, label=None, riskfactor=None,
|
2021-04-07 04:27:03 +02:00
|
|
|
maxfeepercent=None, retry_for=None,
|
2019-11-04 15:59:01 +01:00
|
|
|
maxdelay=None, exemptfee=None):
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
Send payment specified by {bolt11} with {msatoshi}
|
|
|
|
(ignored if {bolt11} has an amount), optional {label}
|
2020-05-08 17:30:19 +02:00
|
|
|
and {riskfactor} (default 1.0).
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"bolt11": bolt11,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"riskfactor": riskfactor,
|
2019-11-04 15:59:01 +01:00
|
|
|
"maxfeepercent": maxfeepercent,
|
|
|
|
"retry_for": retry_for,
|
|
|
|
"maxdelay": maxdelay,
|
|
|
|
"exemptfee": exemptfee,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("pay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-06-08 23:50:41 +02:00
|
|
|
def openchannel_init(self, node_id, channel_amount, psbt, feerate=None, funding_feerate=None, announce=True, close_to=None, request_amt=None, *args, **kwargs):
|
2020-09-10 21:43:40 +02:00
|
|
|
"""Initiate an openchannel with a peer """
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"amount": channel_amount,
|
|
|
|
"initialpsbt": psbt,
|
|
|
|
"commitment_feerate": feerate,
|
|
|
|
"funding_feerate": funding_feerate,
|
|
|
|
"announce": announce,
|
|
|
|
"close_to": close_to,
|
2021-06-08 23:50:41 +02:00
|
|
|
"request_amt": request_amt,
|
2020-09-10 21:43:40 +02:00
|
|
|
}
|
|
|
|
return self.call("openchannel_init", payload)
|
|
|
|
|
|
|
|
def openchannel_signed(self, channel_id, signed_psbt, *args, **kwargs):
|
|
|
|
""" Send the funding transaction signatures to the peer, finish
|
|
|
|
the channel open """
|
|
|
|
payload = {
|
|
|
|
"channel_id": channel_id,
|
|
|
|
"signed_psbt": signed_psbt,
|
|
|
|
}
|
|
|
|
return self.call("openchannel_signed", payload)
|
|
|
|
|
|
|
|
def openchannel_update(self, channel_id, psbt, *args, **kwargs):
|
|
|
|
"""Update an openchannel with a peer """
|
|
|
|
payload = {
|
|
|
|
"channel_id": channel_id,
|
|
|
|
"psbt": psbt,
|
|
|
|
}
|
|
|
|
return self.call("openchannel_update", payload)
|
|
|
|
|
2021-07-09 21:13:20 +02:00
|
|
|
def openchannel_bump(self, channel_id, amount, initialpsbt, funding_feerate=None):
|
2021-01-20 02:40:01 +01:00
|
|
|
""" Initiate an RBF for an in-progress open """
|
|
|
|
payload = {
|
|
|
|
"channel_id": channel_id,
|
|
|
|
"amount": amount,
|
|
|
|
"initialpsbt": initialpsbt,
|
2021-07-09 21:13:20 +02:00
|
|
|
"funding_feerate": funding_feerate,
|
2021-01-20 02:40:01 +01:00
|
|
|
}
|
|
|
|
return self.call("openchannel_bump", payload)
|
|
|
|
|
2021-03-09 22:14:08 +01:00
|
|
|
def openchannel_abort(self, channel_id):
|
|
|
|
""" Abort a channel open """
|
|
|
|
payload = {
|
|
|
|
"channel_id": channel_id,
|
|
|
|
}
|
|
|
|
return self.call("openchannel_abort", payload)
|
|
|
|
|
2019-05-18 20:59:55 +02:00
|
|
|
def paystatus(self, bolt11=None):
|
2020-05-08 17:30:19 +02:00
|
|
|
"""Detail status of attempts to pay {bolt11} or any."""
|
2019-05-18 20:59:55 +02:00
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11
|
|
|
|
}
|
|
|
|
return self.call("paystatus", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def ping(self, peer_id, length=128, pongbytes=128):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Send {peer_id} a ping of length {len} asking for {pongbytes}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2018-04-10 08:03:15 +02:00
|
|
|
"id": peer_id,
|
2019-05-13 15:16:35 +02:00
|
|
|
"len": length,
|
|
|
|
"pongbytes": pongbytes
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("ping", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2020-12-14 05:58:35 +01:00
|
|
|
def plugin_start(self, plugin, **kwargs):
|
2019-07-23 01:24:47 +02:00
|
|
|
"""
|
|
|
|
Adds a plugin to lightningd.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "start",
|
2020-12-14 05:58:35 +01:00
|
|
|
"plugin": plugin,
|
2019-07-23 01:24:47 +02:00
|
|
|
}
|
2020-12-14 05:58:35 +01:00
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
2019-07-23 01:24:47 +02:00
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_startdir(self, directory):
|
|
|
|
"""
|
|
|
|
Adds all plugins from a directory to lightningd.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "startdir",
|
|
|
|
"directory": directory
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_stop(self, plugin):
|
|
|
|
"""
|
|
|
|
Stops a lightningd plugin, will fail if plugin is not dynamic.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "stop",
|
|
|
|
"plugin": plugin
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_list(self):
|
|
|
|
"""
|
|
|
|
Lists all plugins lightningd knows about.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "list"
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_rescan(self):
|
|
|
|
payload = {
|
|
|
|
"subcommand": "rescan"
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
2021-09-28 18:13:04 +02:00
|
|
|
def sendpay(self, route, payment_hash, label=None, msatoshi=None, bolt11=None, payment_secret=None, partid=None, groupid=None):
|
2021-04-07 04:27:03 +02:00
|
|
|
"""
|
|
|
|
Send along {route} in return for preimage of {payment_hash}.
|
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"route": route,
|
|
|
|
"payment_hash": payment_hash,
|
2021-04-07 04:27:03 +02:00
|
|
|
"label": label,
|
2019-05-13 15:16:35 +02:00
|
|
|
"msatoshi": msatoshi,
|
2021-04-07 04:27:03 +02:00
|
|
|
"bolt11": bolt11,
|
|
|
|
"payment_secret": payment_secret,
|
|
|
|
"partid": partid,
|
2021-09-28 18:13:04 +02:00
|
|
|
"groupid": groupid,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("sendpay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-09-28 18:13:04 +02:00
|
|
|
def sendonion(
|
|
|
|
self, onion, first_hop, payment_hash, label=None,
|
|
|
|
shared_secrets=None, partid=None, bolt11=None, msatoshi=None,
|
|
|
|
destination=None
|
|
|
|
):
|
|
|
|
"""Send an outgoing payment using the specified onion.
|
|
|
|
|
|
|
|
This method allows sending a payment using an externally
|
|
|
|
generated routing onion, with optional metadata to facilitate
|
|
|
|
internal handling, but not required.
|
|
|
|
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"onion": onion,
|
|
|
|
"first_hop": first_hop,
|
|
|
|
"payment_hash": payment_hash,
|
|
|
|
"label": label,
|
|
|
|
"shared_secrets": shared_secrets,
|
|
|
|
"partid": partid,
|
|
|
|
"bolt11": bolt11,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"destination": destination,
|
|
|
|
}
|
|
|
|
return self.call("sendonion", payload)
|
|
|
|
|
2021-09-23 04:42:47 +02:00
|
|
|
def setchannelfee(self, id, base=None, ppm=None, enforcedelay=None):
|
2019-05-13 17:59:29 +02:00
|
|
|
"""
|
|
|
|
Set routing fees for a channel/peer {id} (or 'all'). {base} is a value in millisatoshi
|
|
|
|
that is added as base fee to any routed payment. {ppm} is a value added proportionally
|
2021-09-23 04:42:47 +02:00
|
|
|
per-millionths to any routed payment volume in satoshi. {enforcedelay} is the number of seconds before enforcing this change.
|
2019-05-13 17:59:29 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"id": id,
|
|
|
|
"base": base,
|
2021-09-23 04:42:47 +02:00
|
|
|
"ppm": ppm,
|
|
|
|
"enforcedelay": enforcedelay,
|
2019-05-13 17:59:29 +02:00
|
|
|
}
|
|
|
|
return self.call("setchannelfee", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def stop(self):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Shut down the lightningd process.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("stop")
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2020-01-28 02:30:00 +01:00
|
|
|
def waitanyinvoice(self, lastpay_index=None, timeout=None, **kwargs):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Wait for the next invoice to be paid, after {lastpay_index}
|
2020-05-08 17:30:19 +02:00
|
|
|
(if supplied).
|
2020-01-28 02:30:00 +01:00
|
|
|
Fail after {timeout} seconds has passed without an invoice
|
|
|
|
being paid.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2020-01-28 02:30:00 +01:00
|
|
|
"lastpay_index": lastpay_index,
|
|
|
|
"timeout": timeout
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2020-01-28 02:30:00 +01:00
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("waitanyinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-12-26 11:19:09 +01:00
|
|
|
def waitblockheight(self, blockheight, timeout=None):
|
|
|
|
"""
|
|
|
|
Wait for the blockchain to reach the specified block height.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"blockheight": blockheight,
|
|
|
|
"timeout": timeout
|
|
|
|
}
|
|
|
|
return self.call("waitblockheight", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def waitinvoice(self, label):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Wait for an incoming payment matching the invoice with {label}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"label": label
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("waitinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2021-09-28 18:13:04 +02:00
|
|
|
def waitsendpay(self, payment_hash, timeout=None, partid=None, groupid=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Wait for payment for preimage of {payment_hash} to complete.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"payment_hash": payment_hash,
|
2019-12-12 11:57:07 +01:00
|
|
|
"timeout": timeout,
|
|
|
|
"partid": partid,
|
2021-09-28 18:13:04 +02:00
|
|
|
"groupid": groupid,
|
2019-05-13 15:16:35 +02:00
|
|
|
}
|
|
|
|
return self.call("waitsendpay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-08-28 05:35:29 +02:00
|
|
|
def withdraw(self, destination, satoshi, feerate=None, minconf=None, utxos=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-16 06:24:43 +01:00
|
|
|
Send to {destination} address {satoshi} (or "all")
|
2019-02-21 20:55:01 +01:00
|
|
|
amount via Bitcoin transaction. Only select outputs
|
2020-05-08 17:30:19 +02:00
|
|
|
with {minconf} confirmations.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"destination": destination,
|
2018-08-27 07:13:57 +02:00
|
|
|
"satoshi": satoshi,
|
2019-02-21 20:55:01 +01:00
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
2019-08-28 05:35:29 +02:00
|
|
|
"utxos": utxos,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-08-28 05:35:29 +02:00
|
|
|
|
2018-02-14 14:17:31 +01:00
|
|
|
return self.call("withdraw", payload)
|
2019-06-05 09:00:05 +02:00
|
|
|
|
2020-12-04 10:23:33 +01:00
|
|
|
def txprepare(self, outputs, feerate=None, minconf=None, utxos=None):
|
2019-06-05 09:00:05 +02:00
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Prepare a Bitcoin transaction which sends to [outputs].
|
2019-08-15 19:41:23 +02:00
|
|
|
The format of output is like [{address1: amount1},
|
|
|
|
{address2: amount2}], or [{address: "all"}]).
|
|
|
|
Only select outputs with {minconf} confirmations.
|
2019-06-05 09:00:05 +02:00
|
|
|
|
|
|
|
Outputs will be reserved until you call txdiscard or txsend, or
|
|
|
|
lightningd restarts.
|
|
|
|
"""
|
2020-12-04 10:23:33 +01:00
|
|
|
payload = {
|
|
|
|
"outputs": outputs,
|
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
|
|
|
"utxos": utxos,
|
|
|
|
}
|
|
|
|
return self.call("txprepare", payload)
|
2019-06-05 09:00:05 +02:00
|
|
|
|
|
|
|
def txdiscard(self, txid):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Cancel a Bitcoin transaction returned from txprepare. The outputs
|
2019-06-05 09:00:05 +02:00
|
|
|
it was spending are released for other use.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"txid": txid
|
|
|
|
}
|
|
|
|
return self.call("txdiscard", payload)
|
|
|
|
|
|
|
|
def txsend(self, txid):
|
|
|
|
"""
|
2020-05-08 17:30:19 +02:00
|
|
|
Sign and broadcast a Bitcoin transaction returned from txprepare.
|
2019-06-05 09:00:05 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"txid": txid
|
|
|
|
}
|
|
|
|
return self.call("txsend", payload)
|
2019-10-15 02:53:41 +02:00
|
|
|
|
2021-05-26 03:19:37 +02:00
|
|
|
def reserveinputs(self, psbt, exclusive=True, reserve=None):
|
2020-06-10 01:41:51 +02:00
|
|
|
"""
|
2020-07-15 07:33:49 +02:00
|
|
|
Reserve any inputs in this psbt.
|
2020-06-10 01:41:51 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
2020-07-15 07:33:49 +02:00
|
|
|
"psbt": psbt,
|
|
|
|
"exclusive": exclusive,
|
2021-05-26 03:19:37 +02:00
|
|
|
"reserve": reserve,
|
2020-06-10 01:41:51 +02:00
|
|
|
}
|
|
|
|
return self.call("reserveinputs", payload)
|
|
|
|
|
2021-05-26 03:19:37 +02:00
|
|
|
def unreserveinputs(self, psbt, reserve=None):
|
2020-06-10 01:41:51 +02:00
|
|
|
"""
|
2020-07-15 07:33:49 +02:00
|
|
|
Unreserve (or reduce reservation) on any UTXOs in this psbt were previously reserved.
|
2020-06-10 01:41:51 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"psbt": psbt,
|
2021-05-26 03:19:37 +02:00
|
|
|
"reserve": reserve,
|
2020-06-10 01:41:51 +02:00
|
|
|
}
|
|
|
|
return self.call("unreserveinputs", payload)
|
|
|
|
|
2021-02-02 20:43:15 +01:00
|
|
|
def fundpsbt(self, satoshi, feerate, startweight, minconf=None, reserve=True, locktime=None, min_witness_weight=None, excess_as_change=False):
|
2020-07-15 07:39:47 +02:00
|
|
|
"""
|
|
|
|
Create a PSBT with inputs sufficient to give an output of satoshi.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"satoshi": satoshi,
|
|
|
|
"feerate": feerate,
|
2020-07-17 07:45:57 +02:00
|
|
|
"startweight": startweight,
|
2020-07-15 07:39:47 +02:00
|
|
|
"minconf": minconf,
|
|
|
|
"reserve": reserve,
|
2020-08-18 06:24:39 +02:00
|
|
|
"locktime": locktime,
|
2020-12-15 21:09:22 +01:00
|
|
|
"min_witness_weight": min_witness_weight,
|
2021-02-02 20:43:15 +01:00
|
|
|
"excess_as_change": excess_as_change,
|
2020-07-15 07:39:47 +02:00
|
|
|
}
|
|
|
|
return self.call("fundpsbt", payload)
|
|
|
|
|
2021-02-02 20:43:15 +01:00
|
|
|
def utxopsbt(self, satoshi, feerate, startweight, utxos, reserve=True, reservedok=False, locktime=None, min_witness_weight=None, excess_as_change=False):
|
2020-08-18 06:25:52 +02:00
|
|
|
"""
|
|
|
|
Create a PSBT with given inputs, to give an output of satoshi.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"satoshi": satoshi,
|
|
|
|
"feerate": feerate,
|
|
|
|
"startweight": startweight,
|
|
|
|
"utxos": utxos,
|
|
|
|
"reserve": reserve,
|
|
|
|
"reservedok": reservedok,
|
|
|
|
"locktime": locktime,
|
2020-12-15 21:09:22 +01:00
|
|
|
"min_witness_weight": min_witness_weight,
|
2021-02-02 20:43:15 +01:00
|
|
|
"excess_as_change": excess_as_change,
|
2020-08-18 06:25:52 +02:00
|
|
|
}
|
|
|
|
return self.call("utxopsbt", payload)
|
|
|
|
|
2020-08-18 06:27:04 +02:00
|
|
|
def signpsbt(self, psbt, signonly=None):
|
2020-06-16 20:47:07 +02:00
|
|
|
"""
|
|
|
|
Add internal wallet's signatures to PSBT
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"psbt": psbt,
|
2020-08-18 06:27:04 +02:00
|
|
|
"signonly": signonly,
|
2020-06-16 20:47:07 +02:00
|
|
|
}
|
|
|
|
return self.call("signpsbt", payload)
|
|
|
|
|
2021-05-26 03:19:37 +02:00
|
|
|
def sendpsbt(self, psbt, reserve=None):
|
2020-06-16 20:47:07 +02:00
|
|
|
"""
|
|
|
|
Finalize extract and broadcast a PSBT
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"psbt": psbt,
|
2021-05-26 03:19:37 +02:00
|
|
|
"reserve": reserve,
|
2020-06-16 20:47:07 +02:00
|
|
|
}
|
|
|
|
return self.call("sendpsbt", payload)
|
|
|
|
|
2019-10-15 02:53:41 +02:00
|
|
|
def signmessage(self, message):
|
|
|
|
"""
|
|
|
|
Sign a message with this node's secret key.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"message": message
|
|
|
|
}
|
|
|
|
return self.call("signmessage", payload)
|
|
|
|
|
|
|
|
def checkmessage(self, message, zbase, pubkey=None):
|
|
|
|
"""
|
|
|
|
Check if a message was signed (with a specific key).
|
|
|
|
Use returned field ['verified'] to get result.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"message": message,
|
|
|
|
"zbase": zbase,
|
|
|
|
"pubkey": pubkey,
|
|
|
|
}
|
|
|
|
return self.call("checkmessage", payload)
|
2020-02-04 06:53:17 +01:00
|
|
|
|
|
|
|
def getsharedsecret(self, point, **kwargs):
|
|
|
|
"""
|
|
|
|
Compute the hash of the Elliptic Curve Diffie Hellman shared
|
|
|
|
secret point from this node private key and an
|
|
|
|
input {point}.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"point": point
|
|
|
|
}
|
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
|
|
|
return self.call("getsharedsecret", payload)
|
2021-06-18 13:29:47 +02:00
|
|
|
|
|
|
|
def keysend(self, destination, msatoshi, label=None, maxfeepercent=None,
|
|
|
|
retry_for=None, maxdelay=None, exemptfee=None,
|
|
|
|
extratlvs=None):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
|
2021-06-18 14:32:23 +02:00
|
|
|
if extratlvs is not None and not isinstance(extratlvs, dict):
|
|
|
|
raise ValueError(
|
|
|
|
"extratlvs is not a dictionary with integer keys and hexadecimal values"
|
2021-06-18 13:29:47 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
payload = {
|
|
|
|
"destination": destination,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"maxfeepercent": maxfeepercent,
|
|
|
|
"retry_for": retry_for,
|
|
|
|
"maxdelay": maxdelay,
|
|
|
|
"exemptfee": exemptfee,
|
|
|
|
"extratlvs": extratlvs,
|
|
|
|
}
|
|
|
|
return self.call("keysend", payload)
|