2017-01-14 20:30:37 +01:00
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import socket
|
2019-08-08 08:46:05 +02:00
|
|
|
import warnings
|
2019-10-14 21:47:09 +02:00
|
|
|
from decimal import Decimal
|
|
|
|
from math import floor, log10
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-06-30 17:14:10 +02:00
|
|
|
class RpcError(ValueError):
|
2018-08-08 03:59:48 +02:00
|
|
|
def __init__(self, method, payload, error):
|
|
|
|
super(ValueError, self).__init__("RPC call failed: method: {}, payload: {}, error: {}"
|
|
|
|
.format(method, payload, error))
|
|
|
|
|
|
|
|
self.method = method
|
|
|
|
self.payload = payload
|
2018-06-30 17:14:10 +02:00
|
|
|
self.error = error
|
|
|
|
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
class Millisatoshi:
|
|
|
|
"""
|
|
|
|
A subtype to represent thousandths of a satoshi.
|
|
|
|
|
|
|
|
Many JSON API fields are expressed in millisatoshis: these automatically get
|
|
|
|
turned into Millisatoshi types. Converts to and from int.
|
|
|
|
"""
|
|
|
|
def __init__(self, v):
|
|
|
|
"""
|
|
|
|
Takes either a string ending in 'msat', 'sat', 'btc' or an integer.
|
|
|
|
"""
|
|
|
|
if isinstance(v, str):
|
|
|
|
if v.endswith("msat"):
|
|
|
|
self.millisatoshis = int(v[0:-4])
|
|
|
|
elif v.endswith("sat"):
|
|
|
|
self.millisatoshis = Decimal(v[0:-3]) * 1000
|
|
|
|
elif v.endswith("btc"):
|
|
|
|
self.millisatoshis = Decimal(v[0:-3]) * 1000 * 10**8
|
2019-04-16 13:35:55 +02:00
|
|
|
else:
|
|
|
|
raise TypeError("Millisatoshi must be string with msat/sat/btc suffix or int")
|
2019-02-21 03:40:33 +01:00
|
|
|
if self.millisatoshis != int(self.millisatoshis):
|
|
|
|
raise ValueError("Millisatoshi must be a whole number")
|
2019-04-16 13:35:55 +02:00
|
|
|
self.millisatoshis = int(self.millisatoshis)
|
2019-02-21 03:40:33 +01:00
|
|
|
elif isinstance(v, Millisatoshi):
|
|
|
|
self.millisatoshis = v.millisatoshis
|
|
|
|
elif int(v) == v:
|
2019-04-16 13:35:55 +02:00
|
|
|
self.millisatoshis = int(v)
|
2019-02-21 03:40:33 +01:00
|
|
|
else:
|
|
|
|
raise TypeError("Millisatoshi must be string with msat/sat/btc suffix or int")
|
|
|
|
|
|
|
|
if self.millisatoshis < 0:
|
|
|
|
raise ValueError("Millisatoshi must be >= 0")
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
"""
|
|
|
|
Appends the 'msat' as expected for this type.
|
|
|
|
"""
|
|
|
|
return str(self.millisatoshis) + "msat"
|
|
|
|
|
|
|
|
def to_satoshi(self):
|
|
|
|
"""
|
|
|
|
Return a Decimal representing the number of satoshis
|
|
|
|
"""
|
|
|
|
return Decimal(self.millisatoshis) / 1000
|
|
|
|
|
|
|
|
def to_btc(self):
|
|
|
|
"""
|
|
|
|
Return a Decimal representing the number of bitcoin
|
|
|
|
"""
|
|
|
|
return Decimal(self.millisatoshis) / 1000 / 10**8
|
|
|
|
|
|
|
|
def to_satoshi_str(self):
|
|
|
|
"""
|
|
|
|
Return a string of form 1234sat or 1234.567sat.
|
|
|
|
"""
|
|
|
|
if self.millisatoshis % 1000:
|
|
|
|
return '{:.3f}sat'.format(self.to_satoshi())
|
|
|
|
else:
|
|
|
|
return '{:.0f}sat'.format(self.to_satoshi())
|
|
|
|
|
|
|
|
def to_btc_str(self):
|
|
|
|
"""
|
|
|
|
Return a string of form 12.34567890btc or 12.34567890123btc.
|
|
|
|
"""
|
|
|
|
if self.millisatoshis % 1000:
|
|
|
|
return '{:.11f}btc'.format(self.to_btc())
|
2019-05-08 01:34:18 +02:00
|
|
|
else:
|
|
|
|
return '{:.8f}btc'.format(self.to_btc())
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2019-05-15 09:29:08 +02:00
|
|
|
def to_approx_str(self, digits: int = 3):
|
|
|
|
"""Returns the shortmost string using common units representation.
|
|
|
|
|
|
|
|
Rounds to significant `digits`. Default: 3
|
|
|
|
"""
|
|
|
|
round_to_n = lambda x, n: round(x, -int(floor(log10(x))) + (n - 1))
|
|
|
|
result = None
|
|
|
|
|
|
|
|
# we try to increase digits to check if we did loose out on precision
|
|
|
|
# without gaining a shorter string, since this is a rarely used UI
|
|
|
|
# function, performance is not an issue. Adds at least one iteration.
|
|
|
|
while True:
|
|
|
|
# first round everything down to effective digits
|
|
|
|
amount_rounded = round_to_n(self.millisatoshis, digits)
|
|
|
|
# try different units and take shortest resulting normalized string
|
|
|
|
amounts_str = [
|
|
|
|
"%gbtc" % (amount_rounded / 1000 / 10**8),
|
|
|
|
"%gsat" % (amount_rounded / 1000),
|
|
|
|
"%gmsat" % (amount_rounded),
|
|
|
|
]
|
|
|
|
test_result = min(amounts_str, key=len)
|
|
|
|
|
|
|
|
# check result and do another run if necessary
|
|
|
|
if test_result == result:
|
|
|
|
return result
|
|
|
|
elif not result or len(test_result) <= len(result):
|
|
|
|
digits = digits + 1
|
|
|
|
result = test_result
|
|
|
|
else:
|
|
|
|
return result
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
def to_json(self):
|
|
|
|
return self.__repr__()
|
|
|
|
|
|
|
|
def __int__(self):
|
|
|
|
return self.millisatoshis
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
return self.millisatoshis < other.millisatoshis
|
|
|
|
|
|
|
|
def __le__(self, other):
|
|
|
|
return self.millisatoshis <= other.millisatoshis
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return self.millisatoshis == other.millisatoshis
|
|
|
|
|
|
|
|
def __gt__(self, other):
|
|
|
|
return self.millisatoshis > other.millisatoshis
|
|
|
|
|
|
|
|
def __ge__(self, other):
|
|
|
|
return self.millisatoshis >= other.millisatoshis
|
|
|
|
|
|
|
|
def __add__(self, other):
|
|
|
|
return Millisatoshi(int(self) + int(other))
|
|
|
|
|
|
|
|
def __sub__(self, other):
|
|
|
|
return Millisatoshi(int(self) - int(other))
|
|
|
|
|
|
|
|
def __mul__(self, other):
|
2019-04-29 17:28:57 +02:00
|
|
|
return Millisatoshi(int(int(self) * other))
|
2019-02-21 03:40:33 +01:00
|
|
|
|
|
|
|
def __truediv__(self, other):
|
2019-04-29 17:28:57 +02:00
|
|
|
return Millisatoshi(int(int(self) / other))
|
2019-02-21 03:40:33 +01:00
|
|
|
|
|
|
|
def __floordiv__(self, other):
|
|
|
|
return Millisatoshi(int(self) // other)
|
|
|
|
|
|
|
|
def __mod__(self, other):
|
|
|
|
return Millisatoshi(int(self) % other)
|
|
|
|
|
2019-09-03 12:35:23 +02:00
|
|
|
def __radd__(self, other):
|
|
|
|
return Millisatoshi(int(self) + int(other))
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2017-01-17 23:26:00 +01:00
|
|
|
class UnixDomainSocketRpc(object):
|
2019-02-25 05:15:56 +01:00
|
|
|
def __init__(self, socket_path, executor=None, logger=logging, encoder_cls=json.JSONEncoder, decoder=json.JSONDecoder()):
|
2017-01-14 20:30:37 +01:00
|
|
|
self.socket_path = socket_path
|
2019-02-25 05:15:56 +01:00
|
|
|
self.encoder_cls = encoder_cls
|
2019-02-21 03:40:33 +01:00
|
|
|
self.decoder = decoder
|
2017-01-14 20:30:37 +01:00
|
|
|
self.executor = executor
|
2018-02-14 14:12:29 +01:00
|
|
|
self.logger = logger
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2019-01-22 23:11:37 +01:00
|
|
|
self.next_id = 0
|
2018-12-05 15:35:02 +01:00
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
def _writeobj(self, sock, obj):
|
2019-09-02 23:29:56 +02:00
|
|
|
s = json.dumps(obj, ensure_ascii=False, cls=self.encoder_cls)
|
2017-01-14 20:30:37 +01:00
|
|
|
sock.sendall(bytearray(s, 'UTF-8'))
|
|
|
|
|
2018-11-18 10:43:28 +01:00
|
|
|
def _readobj(self, sock, buff=b''):
|
|
|
|
"""Read a JSON object, starting with buff; returns object and any buffer left over"""
|
2017-01-14 20:30:37 +01:00
|
|
|
while True:
|
2018-11-18 10:43:39 +01:00
|
|
|
parts = buff.split(b'\n\n', 1)
|
|
|
|
if len(parts) == 1:
|
|
|
|
# Didn't read enough.
|
2018-12-17 23:35:26 +01:00
|
|
|
b = sock.recv(max(1024, len(buff)))
|
2018-11-18 10:43:39 +01:00
|
|
|
buff += b
|
|
|
|
if len(b) == 0:
|
|
|
|
return {'error': 'Connection to RPC server lost.'}, buff
|
|
|
|
else:
|
|
|
|
buff = parts[1]
|
|
|
|
obj, _ = self.decoder.raw_decode(parts[0].decode("UTF-8"))
|
|
|
|
return obj, buff
|
2018-11-18 10:43:28 +01:00
|
|
|
|
2017-04-12 20:08:48 +02:00
|
|
|
def __getattr__(self, name):
|
2018-02-14 14:17:31 +01:00
|
|
|
"""Intercept any call that is not explicitly defined and call @call
|
2017-04-12 20:08:48 +02:00
|
|
|
|
|
|
|
We might still want to define the actual methods in the subclasses for
|
|
|
|
documentation purposes.
|
|
|
|
"""
|
|
|
|
name = name.replace('_', '-')
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-12-05 15:33:59 +01:00
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
if len(args) != 0 and len(kwargs) != 0:
|
|
|
|
raise RpcError("Cannot mix positional and non-positional arguments")
|
|
|
|
elif len(args) != 0:
|
|
|
|
return self.call(name, payload=args)
|
|
|
|
else:
|
|
|
|
return self.call(name, payload=kwargs)
|
2017-04-12 20:08:48 +02:00
|
|
|
return wrapper
|
|
|
|
|
2018-02-14 14:17:31 +01:00
|
|
|
def call(self, method, payload=None):
|
2018-02-14 14:12:29 +01:00
|
|
|
self.logger.debug("Calling %s with payload %r", method, payload)
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2018-02-15 11:27:53 +01:00
|
|
|
if payload is None:
|
|
|
|
payload = {}
|
|
|
|
# Filter out arguments that are None
|
2018-12-05 15:33:59 +01:00
|
|
|
if isinstance(payload, dict):
|
|
|
|
payload = {k: v for k, v in payload.items() if v is not None}
|
2018-02-15 11:27:53 +01:00
|
|
|
|
2018-11-18 10:43:28 +01:00
|
|
|
# FIXME: we open a new socket for every readobj call...
|
2017-01-14 20:30:37 +01:00
|
|
|
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
|
|
sock.connect(self.socket_path)
|
|
|
|
self._writeobj(sock, {
|
|
|
|
"method": method,
|
2018-02-15 11:27:53 +01:00
|
|
|
"params": payload,
|
2019-01-22 23:11:37 +01:00
|
|
|
"id": self.next_id,
|
2017-01-14 20:30:37 +01:00
|
|
|
})
|
2019-01-22 23:11:37 +01:00
|
|
|
self.next_id += 1
|
2019-06-12 02:38:55 +02:00
|
|
|
resp, _ = self._readobj(sock)
|
2017-01-14 20:30:37 +01:00
|
|
|
sock.close()
|
|
|
|
|
2018-02-14 14:12:29 +01:00
|
|
|
self.logger.debug("Received response for %s call: %r", method, resp)
|
2019-08-30 22:10:32 +02:00
|
|
|
if not isinstance(resp, dict):
|
|
|
|
raise ValueError("Malformed response, response is not a dictionary %s." % resp)
|
|
|
|
elif "error" in resp:
|
2018-08-08 03:59:48 +02:00
|
|
|
raise RpcError(method, payload, resp['error'])
|
2018-02-14 14:17:31 +01:00
|
|
|
elif "result" not in resp:
|
|
|
|
raise ValueError("Malformed response, \"result\" missing.")
|
|
|
|
return resp["result"]
|
2017-01-14 20:30:37 +01:00
|
|
|
|
2017-01-17 23:26:00 +01:00
|
|
|
|
|
|
|
class LightningRpc(UnixDomainSocketRpc):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
|
|
|
RPC client for the `lightningd` daemon.
|
2017-01-17 23:26:00 +01:00
|
|
|
|
|
|
|
This RPC client connects to the `lightningd` daemon through a unix
|
|
|
|
domain socket and passes calls through. Since some of the calls
|
|
|
|
are blocking, the corresponding python methods include an `async`
|
|
|
|
keyword argument. If `async` is set to true then the method
|
|
|
|
returns a future immediately, instead of blocking indefinitely.
|
|
|
|
|
|
|
|
This implementation is thread safe in that it locks the socket
|
|
|
|
between calls, but it does not (yet) support concurrent calls.
|
|
|
|
"""
|
|
|
|
|
2019-02-21 03:40:33 +01:00
|
|
|
class LightningJSONEncoder(json.JSONEncoder):
|
|
|
|
def default(self, o):
|
|
|
|
try:
|
|
|
|
return o.to_json()
|
|
|
|
except NameError:
|
|
|
|
pass
|
|
|
|
return json.JSONEncoder.default(self, o)
|
|
|
|
|
2019-02-25 05:15:56 +01:00
|
|
|
class LightningJSONDecoder(json.JSONDecoder):
|
|
|
|
def __init__(self, *, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, strict=True, object_pairs_hook=None):
|
|
|
|
self.object_hook_next = object_hook
|
|
|
|
super().__init__(object_hook=self.millisatoshi_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, strict=strict, object_pairs_hook=object_pairs_hook)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def replace_amounts(obj):
|
|
|
|
"""
|
|
|
|
Recursively replace _msat fields with appropriate values with Millisatoshi.
|
|
|
|
"""
|
|
|
|
if isinstance(obj, dict):
|
|
|
|
for k, v in obj.items():
|
|
|
|
if k.endswith('msat'):
|
|
|
|
if isinstance(v, str) and v.endswith('msat'):
|
|
|
|
obj[k] = Millisatoshi(v)
|
|
|
|
# Special case for array of msat values
|
|
|
|
elif isinstance(v, list) and all(isinstance(e, str) and e.endswith('msat') for e in v):
|
|
|
|
obj[k] = [Millisatoshi(e) for e in v]
|
|
|
|
else:
|
|
|
|
obj[k] = LightningRpc.LightningJSONDecoder.replace_amounts(v)
|
|
|
|
elif isinstance(obj, list):
|
|
|
|
obj = [LightningRpc.LightningJSONDecoder.replace_amounts(e) for e in obj]
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def millisatoshi_hook(self, obj):
|
|
|
|
obj = LightningRpc.LightningJSONDecoder.replace_amounts(obj)
|
|
|
|
if self.object_hook_next:
|
|
|
|
obj = self.object_hook_next(obj)
|
|
|
|
return obj
|
2019-02-21 03:40:33 +01:00
|
|
|
|
|
|
|
def __init__(self, socket_path, executor=None, logger=logging):
|
2019-02-25 05:15:56 +01:00
|
|
|
super().__init__(socket_path, executor, logging, self.LightningJSONEncoder, self.LightningJSONDecoder())
|
2019-02-21 03:40:33 +01:00
|
|
|
|
2019-05-13 17:59:29 +02:00
|
|
|
def autocleaninvoice(self, cycle_seconds=None, expired_by=None):
|
|
|
|
"""
|
|
|
|
Sets up automatic cleaning of expired invoices. {cycle_seconds} sets
|
|
|
|
the cleaning frequency in seconds (defaults to 3600) and {expired_by}
|
|
|
|
sets the minimum time an invoice should have been expired for to be
|
|
|
|
cleaned in seconds (defaults to 86400).
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"cycle_seconds": cycle_seconds,
|
|
|
|
"expired_by": expired_by
|
|
|
|
}
|
|
|
|
return self.call("autocleaninvoice", payload)
|
|
|
|
|
|
|
|
def check(self, command_to_check, **kwargs):
|
|
|
|
"""
|
|
|
|
Checks if a command is valid without running it.
|
|
|
|
"""
|
|
|
|
payload = {"command_to_check": command_to_check}
|
|
|
|
payload.update({k: v for k, v in kwargs.items()})
|
|
|
|
return self.call("check", payload)
|
|
|
|
|
2019-08-08 08:46:05 +02:00
|
|
|
def _deprecated_close(self, peer_id, force=None, timeout=None):
|
|
|
|
warnings.warn("close now takes unilateraltimeout arg: expect removal"
|
|
|
|
" in early 2020",
|
|
|
|
DeprecationWarning)
|
|
|
|
payload = {
|
|
|
|
"id": peer_id,
|
|
|
|
"force": force,
|
|
|
|
"timeout": timeout
|
|
|
|
}
|
|
|
|
return self.call("close", payload)
|
|
|
|
|
|
|
|
def close(self, peer_id, *args, **kwargs):
|
2018-01-28 12:12:37 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Close the channel with peer {id}, forcing a unilateral
|
2019-09-29 10:53:26 +02:00
|
|
|
close after {unilateraltimeout} seconds if non-zero, and
|
|
|
|
the to-local output will be sent to {destination}.
|
2019-08-08 08:46:05 +02:00
|
|
|
|
|
|
|
Deprecated usage has {force} and {timeout} args.
|
2018-01-28 12:12:37 +01:00
|
|
|
"""
|
2019-08-08 08:46:05 +02:00
|
|
|
|
|
|
|
if 'force' in kwargs or 'timeout' in kwargs:
|
|
|
|
return self._deprecated_close(peer_id, *args, **kwargs)
|
|
|
|
|
|
|
|
# Single arg is ambigious.
|
2019-09-29 10:53:26 +02:00
|
|
|
if len(args) >= 1:
|
2019-08-08 08:46:05 +02:00
|
|
|
if isinstance(args[0], bool):
|
|
|
|
return self._deprecated_close(peer_id, *args, **kwargs)
|
2019-09-29 17:41:50 +02:00
|
|
|
if len(args) == 2:
|
|
|
|
if args[0] is None and isinstance(args[1], int):
|
|
|
|
return self._deprecated_close(peer_id, *args, **kwargs)
|
2019-08-08 08:46:05 +02:00
|
|
|
|
2019-09-29 10:53:26 +02:00
|
|
|
def _close(peer_id, unilateraltimeout=None, destination=None):
|
|
|
|
payload = {
|
|
|
|
"id": peer_id,
|
|
|
|
"unilateraltimeout": unilateraltimeout,
|
|
|
|
"destination": destination
|
|
|
|
}
|
|
|
|
return self.call("close", payload)
|
2019-08-08 08:46:05 +02:00
|
|
|
|
2019-09-29 10:53:26 +02:00
|
|
|
return _close(peer_id, *args, **kwargs)
|
2018-01-28 12:12:37 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def connect(self, peer_id, host=None, port=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Connect to {peer_id} at {host} and {port}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id,
|
|
|
|
"host": host,
|
|
|
|
"port": port
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("connect", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def decodepay(self, bolt11, description=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Decode {bolt11}, using {description} if necessary
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"bolt11": bolt11,
|
|
|
|
"description": description
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("decodepay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def delexpiredinvoice(self, maxexpirytime=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Delete all invoices that have expired on or before the given {maxexpirytime}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"maxexpirytime": maxexpirytime
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("delexpiredinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def delinvoice(self, label, status):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Delete unpaid invoice {label} with {status}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"label": label,
|
2019-05-13 15:16:35 +02:00
|
|
|
"status": status
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("delinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_crash(self):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Crash lightningd by calling fatal()
|
|
|
|
"""
|
2019-08-05 12:58:52 +02:00
|
|
|
payload = {
|
|
|
|
"subcommand": "crash"
|
|
|
|
}
|
|
|
|
return self.call("dev", payload)
|
2019-05-13 15:16:35 +02:00
|
|
|
|
|
|
|
def dev_fail(self, peer_id):
|
|
|
|
"""
|
|
|
|
Fail with peer {peer_id}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("dev-fail", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_forget_channel(self, peerid, force=False):
|
|
|
|
""" Forget the channel with id=peerid
|
2017-01-17 23:26:00 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call(
|
|
|
|
"dev-forget-channel",
|
|
|
|
payload={"id": peerid, "force": force}
|
|
|
|
)
|
|
|
|
|
|
|
|
def dev_memdump(self):
|
|
|
|
"""
|
|
|
|
Show memory objects currently in use
|
|
|
|
"""
|
|
|
|
return self.call("dev-memdump")
|
|
|
|
|
|
|
|
def dev_memleak(self):
|
|
|
|
"""
|
|
|
|
Show unreferenced memory objects
|
|
|
|
"""
|
|
|
|
return self.call("dev-memleak")
|
|
|
|
|
2019-11-04 15:59:01 +01:00
|
|
|
def dev_pay(self, bolt11, msatoshi=None, label=None, riskfactor=None,
|
|
|
|
description=None, maxfeepercent=None, retry_for=None,
|
|
|
|
maxdelay=None, exemptfee=None, use_shadow=True):
|
|
|
|
"""
|
|
|
|
A developer version of `pay`, with the possibility to deactivate
|
|
|
|
shadow routing (used for testing).
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"riskfactor": riskfactor,
|
|
|
|
"maxfeepercent": maxfeepercent,
|
|
|
|
"retry_for": retry_for,
|
|
|
|
"maxdelay": maxdelay,
|
|
|
|
"exemptfee": exemptfee,
|
|
|
|
"use_shadow": use_shadow,
|
|
|
|
# Deprecated.
|
|
|
|
"description": description,
|
|
|
|
}
|
|
|
|
return self.call("pay", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_reenable_commit(self, peer_id):
|
2019-05-11 12:45:02 +02:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Re-enable the commit timer on peer {id}
|
2019-05-11 12:45:02 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id
|
2019-05-11 12:45:02 +02:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("dev-reenable-commit", payload)
|
2019-05-11 12:45:02 +02:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_rescan_outputs(self):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Synchronize the state of our funds with bitcoind
|
|
|
|
"""
|
|
|
|
return self.call("dev-rescan-outputs")
|
|
|
|
|
|
|
|
def dev_rhash(self, secret):
|
|
|
|
"""
|
|
|
|
Show SHA256 of {secret}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-08-05 12:58:52 +02:00
|
|
|
"subcommand": "rhash",
|
2019-05-13 15:16:35 +02:00
|
|
|
"secret": secret
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-08-05 12:58:52 +02:00
|
|
|
return self.call("dev", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def dev_sign_last_tx(self, peer_id):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Sign and show the last commitment transaction with peer {id}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("dev-sign-last-tx", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-08-05 12:58:52 +02:00
|
|
|
def dev_slowcmd(self, msec=None):
|
|
|
|
"""
|
|
|
|
Torture test for slow commands, optional {msec}
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "slowcmd",
|
|
|
|
"msec": msec
|
|
|
|
}
|
|
|
|
return self.call("dev", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def disconnect(self, peer_id, force=False):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Disconnect from peer with {peer_id}, optional {force} even if has active channel
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id,
|
|
|
|
"force": force,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("disconnect", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def feerates(self, style, urgent=None, normal=None, slow=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Supply feerate estimates manually.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-02-04 03:59:26 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"style": style,
|
|
|
|
"urgent": urgent,
|
|
|
|
"normal": normal,
|
|
|
|
"slow": slow
|
2019-02-04 03:59:26 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("feerates", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-09-19 09:37:05 +02:00
|
|
|
def _deprecated_fundchannel(self, node_id, satoshi, feerate=None, announce=True, minconf=None, utxos=None):
|
|
|
|
warnings.warn("fundchannel: the 'satoshi' field is renamed 'amount' : expect removal"
|
|
|
|
" in Mid-2020",
|
|
|
|
DeprecationWarning)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"satoshi": satoshi,
|
|
|
|
"feerate": feerate,
|
|
|
|
"announce": announce,
|
|
|
|
"minconf": minconf,
|
2019-06-08 15:56:40 +02:00
|
|
|
"utxos": utxos
|
2019-05-13 15:16:35 +02:00
|
|
|
}
|
|
|
|
return self.call("fundchannel", payload)
|
|
|
|
|
2019-09-19 09:37:05 +02:00
|
|
|
def fundchannel(self, node_id, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Fund channel with {id} using {amount} satoshis with feerate
|
|
|
|
of {feerate} (uses default feerate if unset).
|
|
|
|
If {announce} is False, don't send channel announcements.
|
|
|
|
Only select outputs with {minconf} confirmations.
|
|
|
|
If {utxos} is specified (as a list of 'txid:vout' strings),
|
|
|
|
fund a channel from these specifics utxos.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if 'satoshi' in kwargs:
|
|
|
|
return self._deprecated_fundchannel(node_id, *args, **kwargs)
|
|
|
|
|
|
|
|
def _fundchannel(node_id, amount, feerate=None, announce=True, minconf=None, utxos=None):
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"amount": amount,
|
|
|
|
"feerate": feerate,
|
|
|
|
"announce": announce,
|
|
|
|
"minconf": minconf,
|
|
|
|
"utxos": utxos
|
|
|
|
}
|
|
|
|
return self.call("fundchannel", payload)
|
|
|
|
|
|
|
|
return _fundchannel(node_id, *args, **kwargs)
|
|
|
|
|
2019-09-19 20:50:58 +02:00
|
|
|
def _deprecated_fundchannel_start(self, node_id, satoshi, feerate=None, announce=True):
|
|
|
|
warnings.warn("fundchannel_start: the 'satoshi' field is renamed 'amount' : expect removal"
|
|
|
|
" in Mid-2020",
|
|
|
|
DeprecationWarning)
|
|
|
|
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"satoshi": satoshi,
|
|
|
|
"feerate": feerate,
|
|
|
|
"announce": announce,
|
|
|
|
}
|
|
|
|
return self.call("fundchannel_start", payload)
|
|
|
|
|
|
|
|
def fundchannel_start(self, node_id, *args, **kwargs):
|
2019-05-23 01:27:56 +02:00
|
|
|
"""
|
2019-09-19 20:50:58 +02:00
|
|
|
Start channel funding with {id} for {amount} satoshis
|
2019-05-23 01:27:56 +02:00
|
|
|
with feerate of {feerate} (uses default feerate if unset).
|
|
|
|
If {announce} is False, don't send channel announcements.
|
|
|
|
Returns a Bech32 {funding_address} for an external wallet
|
|
|
|
to create a funding transaction for. Requires a call to
|
2019-06-05 02:26:39 +02:00
|
|
|
'fundchannel_complete' to complete channel establishment
|
2019-05-23 01:27:56 +02:00
|
|
|
with peer.
|
|
|
|
"""
|
2019-09-19 20:50:58 +02:00
|
|
|
|
|
|
|
if 'satoshi' in kwargs:
|
|
|
|
return self._deprecated_fundchannel_start(node_id, *args, **kwargs)
|
|
|
|
|
2019-10-15 03:08:33 +02:00
|
|
|
def _fundchannel_start(node_id, amount, feerate=None, announce=True, close_to=None):
|
2019-09-19 20:50:58 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"amount": amount,
|
|
|
|
"feerate": feerate,
|
2019-10-15 03:08:33 +02:00
|
|
|
"announce": announce,
|
|
|
|
"close_to": close_to,
|
2019-09-19 20:50:58 +02:00
|
|
|
}
|
|
|
|
return self.call("fundchannel_start", payload)
|
|
|
|
|
|
|
|
return _fundchannel_start(node_id, *args, **kwargs)
|
2019-05-23 01:27:56 +02:00
|
|
|
|
2019-05-31 23:57:04 +02:00
|
|
|
def fundchannel_cancel(self, node_id):
|
|
|
|
"""
|
|
|
|
Cancel a 'started' fundchannel with node {id}.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
}
|
|
|
|
return self.call("fundchannel_cancel", payload)
|
|
|
|
|
2019-06-05 02:26:39 +02:00
|
|
|
def fundchannel_complete(self, node_id, funding_txid, funding_txout):
|
2019-05-25 02:40:00 +02:00
|
|
|
"""
|
2019-05-31 23:37:05 +02:00
|
|
|
Complete channel establishment with {id}, using {funding_txid} at {funding_txout}
|
2019-05-25 02:40:00 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"txid": funding_txid,
|
2019-05-31 23:37:05 +02:00
|
|
|
"txout": funding_txout,
|
2019-05-25 02:40:00 +02:00
|
|
|
}
|
2019-06-05 02:26:39 +02:00
|
|
|
return self.call("fundchannel_complete", payload)
|
2019-05-25 02:40:00 +02:00
|
|
|
|
2019-05-13 17:59:29 +02:00
|
|
|
def getinfo(self):
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
Show information about this node
|
|
|
|
"""
|
|
|
|
return self.call("getinfo")
|
2017-01-17 23:26:00 +01:00
|
|
|
|
|
|
|
def getlog(self, level=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
|
|
|
Show logs, with optional log {level} (info|unusual|debug|io)
|
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"level": level
|
|
|
|
}
|
|
|
|
return self.call("getlog", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def getpeer(self, peer_id, level=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show peer with {peer_id}, if {level} is set, include {log}s
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": peer_id,
|
|
|
|
"level": level
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
res = self.call("listpeers", payload)
|
|
|
|
return res.get("peers") and res["peers"][0] or None
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def getroute(self, node_id, msatoshi, riskfactor, cltv=9, fromid=None, fuzzpercent=None, exclude=[], maxhops=20):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show route to {id} for {msatoshi}, using {riskfactor} and optional
|
|
|
|
{cltv} (default 9). If specified search from {fromid} otherwise use
|
|
|
|
this node as source. Randomize the route with up to {fuzzpercent}
|
|
|
|
(0.0 -> 100.0, default 5.0). {exclude} is an optional array of
|
2019-08-31 15:57:08 +02:00
|
|
|
scid/direction or node-id to exclude. Limit the number of hops in the
|
|
|
|
route to {maxhops}.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"id": node_id,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"riskfactor": riskfactor,
|
|
|
|
"cltv": cltv,
|
|
|
|
"fromid": fromid,
|
|
|
|
"fuzzpercent": fuzzpercent,
|
|
|
|
"exclude": exclude,
|
|
|
|
"maxhops": maxhops
|
|
|
|
}
|
|
|
|
return self.call("getroute", payload)
|
2017-01-17 23:26:00 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def help(self, command=None):
|
2018-06-04 06:23:25 +02:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show available commands, or just {command} if supplied.
|
2018-06-04 06:23:25 +02:00
|
|
|
"""
|
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"command": command,
|
2018-06-04 06:23:25 +02:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("help", payload)
|
2018-06-04 06:23:25 +02:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def invoice(self, msatoshi, label, description, expiry=None, fallbacks=None, preimage=None, exposeprivatechannels=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Create an invoice for {msatoshi} with {label} and {description} with
|
2019-05-18 19:36:08 +02:00
|
|
|
optional {expiry} seconds (default 1 week)
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"description": description,
|
|
|
|
"expiry": expiry,
|
|
|
|
"fallbacks": fallbacks,
|
|
|
|
"preimage": preimage,
|
|
|
|
"exposeprivatechannels": exposeprivatechannels
|
|
|
|
}
|
|
|
|
return self.call("invoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def listchannels(self, short_channel_id=None, source=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show all known channels, accept optional {short_channel_id} or {source}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"short_channel_id": short_channel_id,
|
|
|
|
"source": source
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("listchannels", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-18 19:36:08 +02:00
|
|
|
def listconfigs(self, config=None):
|
|
|
|
"""List this node's config
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"config": config
|
|
|
|
}
|
|
|
|
return self.call("listconfigs", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def listforwards(self):
|
|
|
|
"""List all forwarded payments and their information
|
2018-03-04 23:37:50 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("listforwards")
|
|
|
|
|
|
|
|
def listfunds(self):
|
|
|
|
"""
|
|
|
|
Show funds available for opening channels
|
|
|
|
"""
|
|
|
|
return self.call("listfunds")
|
|
|
|
|
2019-06-22 02:36:29 +02:00
|
|
|
def listtransactions(self):
|
|
|
|
"""
|
|
|
|
Show wallet history
|
|
|
|
"""
|
|
|
|
return self.call("listtransactions")
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def listinvoices(self, label=None):
|
|
|
|
"""
|
|
|
|
Show invoice {label} (or all, if no {label))
|
2018-03-04 23:37:50 +01:00
|
|
|
"""
|
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"label": label
|
2018-03-04 23:37:50 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("listinvoices", payload)
|
2018-03-04 23:37:50 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def listnodes(self, node_id=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Show all nodes in our local network view, filter on node {id}
|
|
|
|
if provided
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"id": node_id
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("listnodes", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-01-28 14:46:52 +01:00
|
|
|
def listpayments(self, bolt11=None, payment_hash=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-01-28 14:46:52 +01:00
|
|
|
Show outgoing payments, regarding {bolt11} or {payment_hash} if set
|
|
|
|
Can only specify one of {bolt11} or {payment_hash}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
assert not (bolt11 and payment_hash)
|
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11,
|
|
|
|
"payment_hash": payment_hash
|
|
|
|
}
|
|
|
|
return self.call("listpayments", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2018-02-14 14:17:31 +01:00
|
|
|
def listpeers(self, peerid=None, level=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
|
|
|
Show current peers, if {level} is set, include {log}s"
|
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"id": peerid,
|
|
|
|
"level": level,
|
|
|
|
}
|
|
|
|
return self.call("listpeers", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-18 19:36:08 +02:00
|
|
|
def listsendpays(self, bolt11=None, payment_hash=None):
|
|
|
|
"""Show all sendpays results, or only for `bolt11` or `payment_hash`"""
|
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11,
|
|
|
|
"payment_hash": payment_hash
|
|
|
|
}
|
|
|
|
return self.call("listsendpays", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def newaddr(self, addresstype=None):
|
|
|
|
"""Get a new address of type {addresstype} of the internal wallet.
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("newaddr", {"addresstype": addresstype})
|
|
|
|
|
2019-11-04 15:59:01 +01:00
|
|
|
def pay(self, bolt11, msatoshi=None, label=None, riskfactor=None,
|
|
|
|
description=None, maxfeepercent=None, retry_for=None,
|
|
|
|
maxdelay=None, exemptfee=None):
|
2019-05-13 15:16:35 +02:00
|
|
|
"""
|
|
|
|
Send payment specified by {bolt11} with {msatoshi}
|
|
|
|
(ignored if {bolt11} has an amount), optional {label}
|
|
|
|
and {riskfactor} (default 1.0)
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"bolt11": bolt11,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
"label": label,
|
|
|
|
"riskfactor": riskfactor,
|
2019-11-04 15:59:01 +01:00
|
|
|
"maxfeepercent": maxfeepercent,
|
|
|
|
"retry_for": retry_for,
|
|
|
|
"maxdelay": maxdelay,
|
|
|
|
"exemptfee": exemptfee,
|
2019-05-13 15:16:35 +02:00
|
|
|
# Deprecated.
|
|
|
|
"description": description,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("pay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-18 20:59:55 +02:00
|
|
|
def paystatus(self, bolt11=None):
|
|
|
|
"""Detail status of attempts to pay {bolt11} or any"""
|
|
|
|
payload = {
|
|
|
|
"bolt11": bolt11
|
|
|
|
}
|
|
|
|
return self.call("paystatus", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def ping(self, peer_id, length=128, pongbytes=128):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Send {peer_id} a ping of length {len} asking for {pongbytes}"
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2018-04-10 08:03:15 +02:00
|
|
|
"id": peer_id,
|
2019-05-13 15:16:35 +02:00
|
|
|
"len": length,
|
|
|
|
"pongbytes": pongbytes
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("ping", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-07-23 01:24:47 +02:00
|
|
|
def plugin_start(self, plugin):
|
|
|
|
"""
|
|
|
|
Adds a plugin to lightningd.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "start",
|
|
|
|
"plugin": plugin
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_startdir(self, directory):
|
|
|
|
"""
|
|
|
|
Adds all plugins from a directory to lightningd.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "startdir",
|
|
|
|
"directory": directory
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_stop(self, plugin):
|
|
|
|
"""
|
|
|
|
Stops a lightningd plugin, will fail if plugin is not dynamic.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "stop",
|
|
|
|
"plugin": plugin
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_list(self):
|
|
|
|
"""
|
|
|
|
Lists all plugins lightningd knows about.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"subcommand": "list"
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
|
|
|
def plugin_rescan(self):
|
|
|
|
payload = {
|
|
|
|
"subcommand": "rescan"
|
|
|
|
}
|
|
|
|
return self.call("plugin", payload)
|
|
|
|
|
2019-09-24 06:25:58 +02:00
|
|
|
def _deprecated_sendpay(self, route, payment_hash, description, msatoshi=None):
|
|
|
|
warnings.warn("sendpay: the 'description' field is renamed 'label' : expect removal"
|
|
|
|
" in early-2020",
|
|
|
|
DeprecationWarning)
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"route": route,
|
|
|
|
"payment_hash": payment_hash,
|
2019-09-24 06:25:58 +02:00
|
|
|
"label": description,
|
2019-05-13 15:16:35 +02:00
|
|
|
"msatoshi": msatoshi,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("sendpay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-09-24 06:25:58 +02:00
|
|
|
def sendpay(self, route, payment_hash, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Send along {route} in return for preimage of {payment_hash}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if 'description' in kwargs:
|
|
|
|
return self._deprecated_sendpay(route, payment_hash, *args, **kwargs)
|
|
|
|
|
|
|
|
def _sendpay(route, payment_hash, label=None, msatoshi=None):
|
|
|
|
payload = {
|
|
|
|
"route": route,
|
|
|
|
"payment_hash": payment_hash,
|
|
|
|
"label": label,
|
|
|
|
"msatoshi": msatoshi,
|
|
|
|
}
|
|
|
|
return self.call("sendpay", payload)
|
|
|
|
|
|
|
|
return _sendpay(route, payment_hash, *args, **kwargs)
|
|
|
|
|
2019-05-13 17:59:29 +02:00
|
|
|
def setchannelfee(self, id, base=None, ppm=None):
|
|
|
|
"""
|
|
|
|
Set routing fees for a channel/peer {id} (or 'all'). {base} is a value in millisatoshi
|
|
|
|
that is added as base fee to any routed payment. {ppm} is a value added proportionally
|
|
|
|
per-millionths to any routed payment volume in satoshi.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"id": id,
|
|
|
|
"base": base,
|
|
|
|
"ppm": ppm
|
|
|
|
}
|
|
|
|
return self.call("setchannelfee", payload)
|
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def stop(self):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Shut down the lightningd process
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("stop")
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def waitanyinvoice(self, lastpay_index=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Wait for the next invoice to be paid, after {lastpay_index}
|
|
|
|
(if supplied)
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"lastpay_index": lastpay_index
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("waitanyinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def waitinvoice(self, label):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Wait for an incoming payment matching the invoice with {label}
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
2019-05-13 15:16:35 +02:00
|
|
|
"label": label
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-05-13 15:16:35 +02:00
|
|
|
return self.call("waitinvoice", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-05-13 15:16:35 +02:00
|
|
|
def waitsendpay(self, payment_hash, timeout=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
Wait for payment for preimage of {payment_hash} to complete
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2019-05-13 15:16:35 +02:00
|
|
|
payload = {
|
|
|
|
"payment_hash": payment_hash,
|
|
|
|
"timeout": timeout
|
|
|
|
}
|
|
|
|
return self.call("waitsendpay", payload)
|
2018-01-27 16:53:14 +01:00
|
|
|
|
2019-08-28 05:35:29 +02:00
|
|
|
def withdraw(self, destination, satoshi, feerate=None, minconf=None, utxos=None):
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-16 06:24:43 +01:00
|
|
|
Send to {destination} address {satoshi} (or "all")
|
2019-02-21 20:55:01 +01:00
|
|
|
amount via Bitcoin transaction. Only select outputs
|
|
|
|
with {minconf} confirmations
|
2018-01-27 16:53:14 +01:00
|
|
|
"""
|
2018-02-14 14:17:31 +01:00
|
|
|
payload = {
|
|
|
|
"destination": destination,
|
2018-08-27 07:13:57 +02:00
|
|
|
"satoshi": satoshi,
|
2019-02-21 20:55:01 +01:00
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
2019-08-28 05:35:29 +02:00
|
|
|
"utxos": utxos,
|
2018-02-14 14:17:31 +01:00
|
|
|
}
|
2019-08-28 05:35:29 +02:00
|
|
|
|
2018-02-14 14:17:31 +01:00
|
|
|
return self.call("withdraw", payload)
|
2019-06-05 09:00:05 +02:00
|
|
|
|
2019-10-06 21:17:54 +02:00
|
|
|
def _deprecated_txprepare(self, destination, satoshi, feerate=None, minconf=None):
|
|
|
|
warnings.warn("txprepare now takes output arg: expect removal"
|
|
|
|
" in Mid-2020",
|
|
|
|
DeprecationWarning)
|
|
|
|
payload = {
|
|
|
|
"destination": destination,
|
|
|
|
"satoshi": satoshi,
|
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
|
|
|
}
|
|
|
|
return self.call("txprepare", payload)
|
|
|
|
|
|
|
|
def txprepare(self, *args, **kwargs):
|
2019-06-05 09:00:05 +02:00
|
|
|
"""
|
2019-08-15 19:41:23 +02:00
|
|
|
Prepare a bitcoin transaction which sends to [outputs].
|
|
|
|
The format of output is like [{address1: amount1},
|
|
|
|
{address2: amount2}], or [{address: "all"}]).
|
|
|
|
Only select outputs with {minconf} confirmations.
|
2019-06-05 09:00:05 +02:00
|
|
|
|
|
|
|
Outputs will be reserved until you call txdiscard or txsend, or
|
|
|
|
lightningd restarts.
|
|
|
|
"""
|
2019-10-06 21:17:54 +02:00
|
|
|
if 'destination' in kwargs or 'satoshi' in kwargs:
|
|
|
|
return self._deprecated_txprepare(*args, **kwargs)
|
|
|
|
|
|
|
|
if not isinstance(args[0], list):
|
|
|
|
return self._deprecated_txprepare(*args, **kwargs)
|
|
|
|
|
|
|
|
def _txprepare(outputs, feerate=None, minconf=None, utxos=None):
|
|
|
|
payload = {
|
|
|
|
"outputs": outputs,
|
|
|
|
"feerate": feerate,
|
|
|
|
"minconf": minconf,
|
|
|
|
"utxos": utxos,
|
|
|
|
}
|
|
|
|
return self.call("txprepare", payload)
|
|
|
|
|
|
|
|
return _txprepare(*args, **kwargs)
|
2019-06-05 09:00:05 +02:00
|
|
|
|
|
|
|
def txdiscard(self, txid):
|
|
|
|
"""
|
|
|
|
Cancel a bitcoin transaction returned from txprepare. The outputs
|
|
|
|
it was spending are released for other use.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"txid": txid
|
|
|
|
}
|
|
|
|
return self.call("txdiscard", payload)
|
|
|
|
|
|
|
|
def txsend(self, txid):
|
|
|
|
"""
|
|
|
|
Sign and broadcast a bitcoin transaction returned from txprepare.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"txid": txid
|
|
|
|
}
|
|
|
|
return self.call("txsend", payload)
|
2019-10-15 02:53:41 +02:00
|
|
|
|
|
|
|
def signmessage(self, message):
|
|
|
|
"""
|
|
|
|
Sign a message with this node's secret key.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"message": message
|
|
|
|
}
|
|
|
|
return self.call("signmessage", payload)
|
|
|
|
|
|
|
|
def checkmessage(self, message, zbase, pubkey=None):
|
|
|
|
"""
|
|
|
|
Check if a message was signed (with a specific key).
|
|
|
|
Use returned field ['verified'] to get result.
|
|
|
|
"""
|
|
|
|
payload = {
|
|
|
|
"message": message,
|
|
|
|
"zbase": zbase,
|
|
|
|
"pubkey": pubkey,
|
|
|
|
}
|
|
|
|
return self.call("checkmessage", payload)
|