2023-09-21 07:36:28 +02:00
|
|
|
from pyln.testing.utils import TEST_NETWORK, TIMEOUT, VALGRIND, DEPRECATED_APIS # noqa: F401
|
2022-09-19 02:49:53 +02:00
|
|
|
from pyln.testing.utils import env, only_one, wait_for, write_config, TailableProc, sync_blockheight, wait_channel_quiescent, get_tx_p2wsh_outnum, mine_funding_to_announce, scid_to_int # noqa: F401
|
2020-05-19 22:41:24 +02:00
|
|
|
import bitstring
|
2020-07-06 07:26:14 +02:00
|
|
|
from pyln.client import Millisatoshi
|
2023-07-27 23:37:52 +02:00
|
|
|
from pyln.testing.utils import EXPERIMENTAL_DUAL_FUND, EXPERIMENTAL_SPLICING
|
2021-12-29 04:41:15 +01:00
|
|
|
import time
|
2018-07-16 07:52:56 +02:00
|
|
|
|
2019-10-27 15:26:46 +01:00
|
|
|
COMPAT = env("COMPAT", "1") == "1"
|
2019-11-14 04:34:29 +01:00
|
|
|
|
2023-02-03 17:32:29 +01:00
|
|
|
# Big enough to make channels with 10k effective capacity, including Elements channels
|
|
|
|
# which have bigger txns
|
|
|
|
CHANNEL_SIZE = 50000
|
|
|
|
|
2022-06-27 01:16:14 +02:00
|
|
|
|
2022-06-27 01:04:50 +02:00
|
|
|
def default_ln_port(network: str) -> int:
|
|
|
|
network_map = {
|
|
|
|
"bitcoin": 9735,
|
|
|
|
"testnet": 19735,
|
|
|
|
"regtest": 19846,
|
|
|
|
"signet": 39735,
|
|
|
|
"liquid-regtest": 20735,
|
2022-06-27 01:16:14 +02:00
|
|
|
"liquid": 9735,
|
2022-06-27 01:04:50 +02:00
|
|
|
}
|
|
|
|
return network_map[network]
|
2019-11-14 04:34:29 +01:00
|
|
|
|
2022-06-27 01:16:14 +02:00
|
|
|
|
2020-05-19 22:41:24 +02:00
|
|
|
def hex_bits(features):
|
|
|
|
# We always to full bytes
|
|
|
|
flen = (max(features + [0]) + 7) // 8 * 8
|
|
|
|
res = bitstring.BitArray(length=flen)
|
|
|
|
# Big endian sucketh.
|
|
|
|
for f in features:
|
|
|
|
res[flen - 1 - f] = 1
|
|
|
|
return res.hex
|
|
|
|
|
|
|
|
|
2023-10-16 02:36:25 +02:00
|
|
|
def expected_peer_features(extra=[]):
|
2020-04-28 02:33:03 +02:00
|
|
|
"""Return the expected peer features hexstring for this configuration"""
|
2024-01-29 00:36:15 +01:00
|
|
|
features = [0, 5, 6, 8, 11, 12, 14, 17, 19, 25, 27, 45, 47, 51]
|
2021-05-06 21:11:11 +02:00
|
|
|
if EXPERIMENTAL_DUAL_FUND:
|
|
|
|
# option_dual_fund
|
|
|
|
features += [29]
|
2023-07-27 23:37:52 +02:00
|
|
|
if EXPERIMENTAL_SPLICING:
|
|
|
|
features += [35] # option_quiesce
|
2023-08-10 02:20:29 +02:00
|
|
|
features += [163] # option_experimental_splice
|
2024-02-06 05:13:28 +01:00
|
|
|
if TEST_NETWORK != 'liquid-regtest':
|
|
|
|
# Anchors, except for elements
|
|
|
|
features += [23]
|
2020-05-19 22:41:24 +02:00
|
|
|
return hex_bits(features + extra)
|
2020-04-28 02:33:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
# With the addition of the keysend plugin, we now send a different set of
|
|
|
|
# features for the 'node' and the 'peer' feature sets
|
2023-10-16 02:36:25 +02:00
|
|
|
def expected_node_features(extra=[]):
|
2020-04-28 02:33:03 +02:00
|
|
|
"""Return the expected node features hexstring for this configuration"""
|
2024-01-29 00:36:15 +01:00
|
|
|
features = [0, 5, 6, 8, 11, 12, 14, 17, 19, 25, 27, 45, 47, 51, 55]
|
2021-05-06 21:11:11 +02:00
|
|
|
if EXPERIMENTAL_DUAL_FUND:
|
|
|
|
# option_dual_fund
|
|
|
|
features += [29]
|
2023-07-27 23:37:52 +02:00
|
|
|
if EXPERIMENTAL_SPLICING:
|
|
|
|
features += [35] # option_quiesce
|
2023-08-10 02:20:29 +02:00
|
|
|
features += [163] # option_experimental_splice
|
2024-02-06 05:13:28 +01:00
|
|
|
if TEST_NETWORK != 'liquid-regtest':
|
|
|
|
# Anchors, except for elements
|
|
|
|
features += [23]
|
2020-05-19 22:41:24 +02:00
|
|
|
return hex_bits(features + extra)
|
2020-05-04 02:19:32 +02:00
|
|
|
|
|
|
|
|
2023-10-16 02:36:25 +02:00
|
|
|
def expected_channel_features(extra=[]):
|
2020-05-04 02:19:32 +02:00
|
|
|
"""Return the expected channel features hexstring for this configuration"""
|
2020-05-19 22:41:24 +02:00
|
|
|
features = []
|
|
|
|
return hex_bits(features + extra)
|
2020-03-19 00:46:17 +01:00
|
|
|
|
|
|
|
|
2020-08-28 04:43:57 +02:00
|
|
|
def move_matches(exp, mv):
|
|
|
|
if mv['type'] != exp['type']:
|
|
|
|
return False
|
2022-06-19 09:19:11 +02:00
|
|
|
if Millisatoshi(mv['credit_msat']) != Millisatoshi(exp['credit_msat']):
|
2020-08-28 04:43:57 +02:00
|
|
|
return False
|
2022-06-19 09:19:11 +02:00
|
|
|
if Millisatoshi(mv['debit_msat']) != Millisatoshi(exp['debit_msat']):
|
2020-08-28 04:43:57 +02:00
|
|
|
return False
|
2021-12-06 19:24:20 +01:00
|
|
|
if mv['tags'] != exp['tags']:
|
2020-08-28 04:43:57 +02:00
|
|
|
return False
|
2022-06-19 09:19:11 +02:00
|
|
|
if 'fees_msat' in exp:
|
|
|
|
if 'fees_msat' not in mv:
|
2021-12-07 17:05:29 +01:00
|
|
|
return False
|
2022-06-19 09:19:11 +02:00
|
|
|
if Millisatoshi(mv['fees_msat']) != Millisatoshi(exp['fees_msat']):
|
2021-12-07 17:05:29 +01:00
|
|
|
return False
|
2022-06-19 09:19:11 +02:00
|
|
|
elif 'fees_msat' in mv:
|
2021-12-07 17:05:29 +01:00
|
|
|
return False
|
2020-08-28 04:43:57 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-08-05 19:32:38 +02:00
|
|
|
def calc_lease_fee(amt, feerate, rates):
|
|
|
|
fee = rates['lease_fee_base_msat']
|
|
|
|
fee += amt * rates['lease_fee_basis'] // 10
|
|
|
|
fee += rates['funding_weight'] * feerate
|
|
|
|
return fee
|
|
|
|
|
|
|
|
|
2023-01-11 01:56:06 +01:00
|
|
|
def _dictify(balances):
|
|
|
|
return {b['account_id']: Millisatoshi(b['balance_msat']) for b in balances['accounts']}
|
|
|
|
|
|
|
|
|
2021-12-10 16:46:42 +01:00
|
|
|
def check_balance_snaps(n, expected_bals):
|
|
|
|
snaps = n.rpc.listsnapshots()['balance_snapshots']
|
|
|
|
for snap, exp in zip(snaps, expected_bals):
|
|
|
|
assert snap['blockheight'] == exp['blockheight']
|
2023-07-10 21:59:41 +02:00
|
|
|
if _dictify(snap) != _dictify(exp):
|
|
|
|
raise Exception('Unexpected balance snap: {} vs {}'.format(_dictify(snap), _dictify(exp)))
|
2021-12-10 16:46:42 +01:00
|
|
|
|
|
|
|
|
2020-06-23 03:46:35 +02:00
|
|
|
def check_coin_moves(n, account_id, expected_moves, chainparams):
|
2020-03-19 00:46:17 +01:00
|
|
|
moves = n.rpc.call('listcoinmoves_plugin')['coin_moves']
|
2021-12-29 04:41:15 +01:00
|
|
|
# moves can lag; wait for a few seconds if we don't have correct number.
|
|
|
|
# then move on: we'll get details below.
|
|
|
|
expected_count = 0
|
|
|
|
for m in enumerate(expected_moves):
|
|
|
|
if isinstance(m, list):
|
|
|
|
expected_count += len(m)
|
|
|
|
else:
|
|
|
|
expected_count += 1
|
|
|
|
|
|
|
|
if len(moves) != expected_count:
|
|
|
|
time.sleep(5)
|
|
|
|
moves = n.rpc.call('listcoinmoves_plugin')['coin_moves']
|
|
|
|
|
2020-03-19 00:46:17 +01:00
|
|
|
node_id = n.info['id']
|
|
|
|
acct_moves = [m for m in moves if m['account_id'] == account_id]
|
2020-07-06 07:26:14 +02:00
|
|
|
for mv in acct_moves:
|
2022-06-19 09:19:11 +02:00
|
|
|
print("{{'type': '{}', 'credit_msat': {}, 'debit_msat': {}, 'tags': '{}' , ['fees_msat'?: '{}']}},"
|
2020-07-06 07:26:14 +02:00
|
|
|
.format(mv['type'],
|
2022-06-19 09:19:11 +02:00
|
|
|
Millisatoshi(mv['credit_msat']).millisatoshis,
|
|
|
|
Millisatoshi(mv['debit_msat']).millisatoshis,
|
2021-12-07 17:05:29 +01:00
|
|
|
mv['tags'],
|
2022-06-19 09:19:11 +02:00
|
|
|
mv['fees_msat'] if 'fees_msat' in mv else ''))
|
2021-12-01 17:34:58 +01:00
|
|
|
assert mv['version'] == 2
|
2020-03-19 00:46:17 +01:00
|
|
|
assert mv['node_id'] == node_id
|
|
|
|
assert mv['timestamp'] > 0
|
2020-06-23 03:46:35 +02:00
|
|
|
assert mv['coin_type'] == chainparams['bip173_prefix']
|
2020-03-19 00:46:17 +01:00
|
|
|
# chain moves should have blockheights
|
2022-01-25 21:24:31 +01:00
|
|
|
if mv['type'] == 'chain_mvt' and mv['account_id'] != 'external':
|
2020-03-19 00:46:17 +01:00
|
|
|
assert mv['blockheight'] is not None
|
|
|
|
|
2020-08-28 04:43:57 +02:00
|
|
|
for num, m in enumerate(expected_moves):
|
|
|
|
# They can group things which are in any order.
|
|
|
|
if isinstance(m, list):
|
|
|
|
number_moves = len(m)
|
|
|
|
for acct_move in acct_moves[:number_moves]:
|
|
|
|
found = None
|
|
|
|
for i in range(len(m)):
|
|
|
|
if move_matches(m[i], acct_move):
|
|
|
|
found = i
|
|
|
|
break
|
|
|
|
if found is None:
|
|
|
|
raise ValueError("Unexpected move {} amongst {}".format(acct_move, m))
|
|
|
|
del m[i]
|
|
|
|
acct_moves = acct_moves[number_moves:]
|
|
|
|
else:
|
|
|
|
if not move_matches(m, acct_moves[0]):
|
|
|
|
raise ValueError("Unexpected move {}: {} != {}".format(num, acct_moves[0], m))
|
|
|
|
acct_moves = acct_moves[1:]
|
|
|
|
|
|
|
|
assert acct_moves == []
|
|
|
|
|
2020-03-19 00:46:17 +01:00
|
|
|
|
|
|
|
def account_balance(n, account_id):
|
2021-12-01 17:08:32 +01:00
|
|
|
moves = dedupe_moves(n.rpc.call('listcoinmoves_plugin')['coin_moves'])
|
2020-03-19 00:46:17 +01:00
|
|
|
chan_moves = [m for m in moves if m['account_id'] == account_id]
|
|
|
|
assert len(chan_moves) > 0
|
2022-06-19 09:19:11 +02:00
|
|
|
m_sum = Millisatoshi(0)
|
2020-03-19 00:46:17 +01:00
|
|
|
for m in chan_moves:
|
2022-06-19 09:19:11 +02:00
|
|
|
m_sum += Millisatoshi(m['credit_msat'])
|
|
|
|
m_sum -= Millisatoshi(m['debit_msat'])
|
2020-03-19 00:46:17 +01:00
|
|
|
return m_sum
|
|
|
|
|
|
|
|
|
2021-12-01 16:32:55 +01:00
|
|
|
def extract_utxos(moves):
|
|
|
|
utxos = {}
|
|
|
|
for m in moves:
|
|
|
|
if 'utxo_txid' not in m:
|
|
|
|
continue
|
|
|
|
txid = m['utxo_txid']
|
|
|
|
if txid not in utxos:
|
|
|
|
utxos[txid] = []
|
|
|
|
|
|
|
|
if 'txid' not in m:
|
|
|
|
utxos[txid].append([m, None])
|
|
|
|
else:
|
|
|
|
evs = utxos[txid]
|
|
|
|
# it's a withdrawal, find the deposit and add to the pair
|
|
|
|
for ev in evs:
|
|
|
|
if ev[0]['vout'] == m['vout']:
|
|
|
|
ev[1] = m
|
2022-06-19 09:16:11 +02:00
|
|
|
assert ev[0]['output_msat'] == m['output_msat']
|
2021-12-01 16:32:55 +01:00
|
|
|
break
|
|
|
|
return utxos
|
|
|
|
|
|
|
|
|
|
|
|
def print_utxos(utxos):
|
|
|
|
for k, us in utxos.items():
|
|
|
|
print(k)
|
|
|
|
for u in us:
|
|
|
|
if u[1]:
|
2021-12-06 19:24:20 +01:00
|
|
|
print('\t', u[0]['account_id'], u[0]['tags'], u[1]['tags'], u[1]['txid'])
|
2021-12-01 16:32:55 +01:00
|
|
|
else:
|
2021-12-06 19:24:20 +01:00
|
|
|
print('\t', u[0]['account_id'], u[0]['tags'], None, None)
|
2021-12-01 16:32:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
def utxos_for_channel(utxoset, channel_id):
|
|
|
|
relevant_txids = []
|
|
|
|
chan_utxos = {}
|
|
|
|
|
|
|
|
def _add_relevant(txid, utxo):
|
|
|
|
if txid not in chan_utxos:
|
|
|
|
chan_utxos[txid] = []
|
|
|
|
chan_utxos[txid].append(utxo)
|
|
|
|
|
|
|
|
for txid, utxo_list in utxoset.items():
|
|
|
|
for utxo in utxo_list:
|
|
|
|
if utxo[0]['account_id'] == channel_id:
|
|
|
|
_add_relevant(txid, utxo)
|
|
|
|
relevant_txids.append(txid)
|
|
|
|
if utxo[1]:
|
|
|
|
relevant_txids.append(utxo[1]['txid'])
|
|
|
|
elif txid in relevant_txids:
|
|
|
|
_add_relevant(txid, utxo)
|
|
|
|
if utxo[1]:
|
|
|
|
relevant_txids.append(utxo[1]['txid'])
|
|
|
|
|
|
|
|
# if they're not well ordered, we'll leave some txids out
|
|
|
|
for txid in relevant_txids:
|
|
|
|
if txid not in chan_utxos:
|
|
|
|
chan_utxos[txid] = utxoset[txid]
|
|
|
|
|
|
|
|
return chan_utxos
|
|
|
|
|
|
|
|
|
|
|
|
def matchup_events(u_set, evs, chans, tag_list):
|
|
|
|
assert len(u_set) == len(evs) and len(u_set) > 0
|
|
|
|
|
|
|
|
txid = u_set[0][0]['utxo_txid']
|
|
|
|
for ev in evs:
|
|
|
|
found = False
|
|
|
|
for u in u_set:
|
|
|
|
# We use 'cid' as a placeholder for the channel id, since it's
|
|
|
|
# dyanmic, but we need to sub it in. 'chans' is a list of cids,
|
|
|
|
# which are mapped to `cid` tags' suffixes. eg. 'cid1' is the
|
|
|
|
# first cid in the chans list
|
|
|
|
if ev[0][:3] == 'cid':
|
|
|
|
idx = int(ev[0][3:])
|
|
|
|
acct = chans[idx - 1]
|
|
|
|
else:
|
|
|
|
acct = ev[0]
|
|
|
|
|
2021-12-06 19:24:20 +01:00
|
|
|
if u[0]['account_id'] != acct or u[0]['tags'] != ev[1]:
|
2021-12-01 16:32:55 +01:00
|
|
|
continue
|
|
|
|
|
|
|
|
if ev[2] is None:
|
|
|
|
assert u[1] is None
|
|
|
|
found = True
|
|
|
|
u_set.remove(u)
|
|
|
|
break
|
|
|
|
|
|
|
|
# ugly hack to annotate two possible futures for a utxo
|
2021-12-06 19:24:20 +01:00
|
|
|
if type(ev[2]) is tuple:
|
|
|
|
tag = u[1]['tags'] if u[1] else u[1]
|
2021-12-01 16:32:55 +01:00
|
|
|
assert tag in [x[0] for x in ev[2]]
|
|
|
|
if not u[1]:
|
|
|
|
found = True
|
|
|
|
u_set.remove(u)
|
|
|
|
break
|
|
|
|
for x in ev[2]:
|
2021-12-06 19:24:20 +01:00
|
|
|
if x[0] == u[1]['tags'] and 'to_miner' not in u[1]['tags']:
|
2021-12-01 16:32:55 +01:00
|
|
|
# Save the 'spent to' txid in the tag-list
|
|
|
|
tag_list[x[1]] = u[1]['txid']
|
|
|
|
else:
|
2021-12-06 19:24:20 +01:00
|
|
|
assert ev[2] == u[1]['tags']
|
2021-12-01 16:32:55 +01:00
|
|
|
# Save the 'spent to' txid in the tag-list
|
2021-12-06 19:24:20 +01:00
|
|
|
if 'to_miner' not in u[1]['tags']:
|
2021-12-01 16:32:55 +01:00
|
|
|
tag_list[ev[3]] = u[1]['txid']
|
|
|
|
|
|
|
|
found = True
|
|
|
|
u_set.remove(u)
|
|
|
|
|
|
|
|
assert found
|
|
|
|
|
|
|
|
# Verify we used them all up
|
|
|
|
assert len(u_set) == 0
|
|
|
|
return txid
|
|
|
|
|
|
|
|
|
2021-12-01 17:08:32 +01:00
|
|
|
def dedupe_moves(moves):
|
|
|
|
move_set = {}
|
|
|
|
deduped_moves = []
|
|
|
|
for move in moves:
|
|
|
|
# Dupes only pertain to onchain moves?
|
|
|
|
if 'utxo_txid' not in move:
|
|
|
|
deduped_moves.append(move)
|
|
|
|
continue
|
|
|
|
|
|
|
|
outpoint = '{}:{};{}'.format(move['utxo_txid'], move['vout'], move['txid'] if 'txid' in move else 'xx')
|
|
|
|
if outpoint not in move_set:
|
|
|
|
deduped_moves.append(move)
|
|
|
|
move_set[outpoint] = move
|
|
|
|
return deduped_moves
|
|
|
|
|
|
|
|
|
2022-07-19 09:34:39 +02:00
|
|
|
def inspect_check_actual(txids, channel_id, actual, exp):
|
|
|
|
assert len(actual['outputs']) == len(exp)
|
|
|
|
for e in exp:
|
|
|
|
# find the event in actual that matches
|
|
|
|
found = False
|
|
|
|
for a in actual['outputs']:
|
|
|
|
if e[0].startswith('cid'):
|
|
|
|
if a['account'] != channel_id:
|
|
|
|
continue
|
|
|
|
elif a['account'] != e[0]:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if e[1][0] != a['output_tag']:
|
|
|
|
continue
|
|
|
|
if e[2]:
|
|
|
|
assert e[2][0] == a['spend_tag']
|
|
|
|
txids.append((e[3], a['spending_txid']))
|
|
|
|
else:
|
|
|
|
assert 'spend_tag' not in a
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
assert found
|
|
|
|
|
|
|
|
return txids
|
|
|
|
|
|
|
|
|
|
|
|
def check_inspect_channel(n, channel_id, expected_txs):
|
|
|
|
actual_txs = n.rpc.bkpr_inspect(channel_id)['txs']
|
|
|
|
assert len(actual_txs) == len(expected_txs.keys())
|
|
|
|
# start at the top
|
|
|
|
exp = list(expected_txs.values())[0]
|
|
|
|
actual = actual_txs[0]
|
|
|
|
|
|
|
|
txids = []
|
|
|
|
|
|
|
|
exp_counter = 1
|
|
|
|
inspect_check_actual(txids, channel_id, actual, exp)
|
|
|
|
actual_txs.remove(actual)
|
|
|
|
|
|
|
|
for (marker, txid) in txids:
|
|
|
|
actual = None
|
|
|
|
for a in actual_txs:
|
|
|
|
if a['txid'] == txid:
|
|
|
|
actual = a
|
|
|
|
break
|
|
|
|
assert actual
|
|
|
|
exp = expected_txs[marker]
|
|
|
|
inspect_check_actual(txids, channel_id, actual, exp)
|
|
|
|
|
|
|
|
# after we've inspected it, remove it
|
|
|
|
actual_txs.remove(actual)
|
|
|
|
exp_counter += 1
|
|
|
|
|
|
|
|
# Did we inspect everything?
|
|
|
|
assert len(actual_txs) == 0
|
|
|
|
assert exp_counter == len(expected_txs.keys())
|
|
|
|
|
|
|
|
|
2021-12-01 16:32:55 +01:00
|
|
|
def check_utxos_channel(n, chans, expected, exp_tag_list=None, filter_channel=None):
|
|
|
|
tag_list = {}
|
|
|
|
moves = n.rpc.call('listcoinmoves_plugin')['coin_moves']
|
2021-12-01 17:08:32 +01:00
|
|
|
|
|
|
|
utxos = extract_utxos(dedupe_moves(moves))
|
2021-12-01 16:32:55 +01:00
|
|
|
|
|
|
|
if filter_channel:
|
|
|
|
utxos = utxos_for_channel(utxos, filter_channel)
|
|
|
|
|
|
|
|
for tag, evs in expected.items():
|
|
|
|
if tag not in tag_list:
|
|
|
|
u_set = list(utxos.values())[0]
|
|
|
|
elif tag in tag_list:
|
|
|
|
u_set = utxos[tag_list[tag]]
|
|
|
|
|
|
|
|
txid = matchup_events(u_set, evs, chans, tag_list)
|
|
|
|
|
|
|
|
if tag not in tag_list:
|
|
|
|
tag_list[tag] = txid
|
|
|
|
|
|
|
|
# Remove checked set from utxos
|
|
|
|
del utxos[txid]
|
|
|
|
|
|
|
|
# Verify that we went through all of the utxos
|
|
|
|
assert len(utxos) == 0
|
|
|
|
|
|
|
|
# Verify that the expected tags match the found tags
|
|
|
|
if exp_tag_list:
|
|
|
|
for tag, txid in tag_list.items():
|
|
|
|
if tag in exp_tag_list:
|
|
|
|
assert exp_tag_list[tag] == txid
|
|
|
|
|
|
|
|
return tag_list
|
|
|
|
|
|
|
|
|
2020-03-19 00:46:17 +01:00
|
|
|
def first_channel_id(n1, n2):
|
2023-01-12 02:24:10 +01:00
|
|
|
return only_one(n1.rpc.listpeerchannels(n2.info['id'])['channels'])['channel_id']
|
2020-08-14 03:30:39 +02:00
|
|
|
|
|
|
|
|
2022-08-08 23:49:29 +02:00
|
|
|
def first_scid(n1, n2):
|
2023-01-12 02:24:10 +01:00
|
|
|
return only_one(n1.rpc.listpeerchannels(n2.info['id'])['channels'])['short_channel_id']
|
2022-08-08 23:49:29 +02:00
|
|
|
|
|
|
|
|
2023-06-29 02:14:09 +02:00
|
|
|
def basic_fee(feerate, anchor_expected):
|
|
|
|
if anchor_expected:
|
|
|
|
# option_anchor_outputs / option_anchors_zero_fee_htlc_tx
|
2020-08-14 03:30:39 +02:00
|
|
|
weight = 1124
|
|
|
|
else:
|
|
|
|
weight = 724
|
|
|
|
return (weight * feerate) // 1000
|
2021-04-18 06:38:22 +02:00
|
|
|
|
|
|
|
|
2021-06-28 07:08:10 +02:00
|
|
|
def closing_fee(feerate, num_outputs):
|
|
|
|
assert num_outputs == 1 or num_outputs == 2
|
2023-07-10 21:59:41 +02:00
|
|
|
# Assumes p2tr outputs
|
|
|
|
weight = 428 + (8 + 1 + 1 + 1 + 32) * 4 * num_outputs
|
2021-06-28 07:08:10 +02:00
|
|
|
return (weight * feerate) // 1000
|
|
|
|
|
|
|
|
|
2021-04-18 06:38:22 +02:00
|
|
|
def scriptpubkey_addr(scriptpubkey):
|
|
|
|
if 'addresses' in scriptpubkey:
|
|
|
|
return scriptpubkey['addresses'][0]
|
|
|
|
elif 'address' in scriptpubkey:
|
|
|
|
# Modern bitcoin (at least, git master)
|
|
|
|
return scriptpubkey['address']
|
|
|
|
return None
|