pytest: add routine to generate gossmap by topology.

We marshal it into the "compressed" format and get the decompresser to
build the actual gossmap.

Signed-off-by: Rusty Russell <rusty@rustcorp.com.au>
This commit is contained in:
Rusty Russell 2024-08-07 10:15:55 +09:30
parent 99e8e9246e
commit 53faab0838
2 changed files with 196 additions and 1 deletions

View file

@ -6,7 +6,7 @@ from utils import (
wait_for, TIMEOUT, only_one, sync_blockheight,
expected_node_features,
mine_funding_to_announce, default_ln_port, CHANNEL_SIZE,
first_scid,
first_scid, generate_gossip_store, GenChannel
)
import json
@ -18,6 +18,7 @@ import struct
import subprocess
import time
import unittest
import shutil
import socket
@ -2139,6 +2140,81 @@ def test_gossip_throttle(node_factory, bitcoind, chainparams):
assert set(out2) == set(out4)
def test_generate_gossip_store(node_factory):
l1 = node_factory.get_node(start=False)
chans = [GenChannel(0, 1),
GenChannel(0, 2, capacity_sats=5000),
GenChannel(0, 3),
GenChannel(0, 4)]
chans[2].half[0] = GenChannel.Half(enabled=False,
htlc_min=10,
htlc_max=5000000 - 10,
basefee=10,
propfee=10)
chans[2].half[1] = GenChannel.Half(htlc_min=11,
htlc_max=5000000 - 11,
basefee=11,
propfee=11)
gsfile = generate_gossip_store(chans)
# Set up l1 with this as the gossip_store
shutil.copy(gsfile.name, os.path.join(l1.daemon.lightning_dir, TEST_NETWORK, 'gossip_store'))
l1.start()
nodes = ['03c581bf310c4c97b05e5e6fed2f82d872f388ec9ab7f1feddfd5380ddb3c6531c',
'03091f559e2704cd80e41cd103ca4a60fd91010927674016b09f40c1d450368cf4',
'0255a0e1286c832286eda137bbefe17f21af265a08bbea481a6ea96b9f4b5f84ac',
'02ec99a74a8c8d10853e1a3b0806556abda6798a68a0cedca4c766b5f6cf314f22',
'02c5ad36f9c80ca70d4f88d50f17be2f1f481f37086dbf3433473765a0027ecd63']
expected = []
chancount = 0
for c in chans:
for d in (0, 1):
# listchannels direction 0 always lesser -> greater.
if nodes[c.node1] < nodes[c.node2]:
expected_dir = d
else:
expected_dir = d ^ 1
channel_flags = expected_dir
if not c.half[d].enabled:
active = False
channel_flags |= 2
else:
active = True
if d == 0:
n1 = nodes[c.node1]
n2 = nodes[c.node2]
else:
n1 = nodes[c.node2]
n2 = nodes[c.node1]
expected.append({'source': n1,
'destination': n2,
'short_channel_id': '{}x{}x{}'.format(c.node1, c.node2, chancount),
'direction': expected_dir,
'public': True,
'amount_msat': c.capacity_sats * 1000,
'message_flags': 1,
'channel_flags': channel_flags,
'active': active,
'last_update': 0,
'base_fee_millisatoshi': c.half[d].basefee,
'fee_per_millionth': c.half[d].propfee,
'delay': c.half[d].delay,
'htlc_minimum_msat': c.half[d].htlc_min,
'htlc_maximum_msat': c.half[d].htlc_max,
'features': ''})
chancount += 1
# Order is not well-defined, and sets don't like dicts :(
lchans = sorted(l1.rpc.listchannels()['channels'], key=lambda x: x['source'] + x['destination'])
expected = sorted(expected, key=lambda x: x['source'] + x['destination'])
assert lchans == expected
def test_seeker_first_peer(node_factory, bitcoind):
l1, l2, l3, l4, l5 = node_factory.get_nodes(5)

View file

@ -3,6 +3,8 @@ from pyln.testing.utils import env, only_one, wait_for, write_config, TailablePr
import bitstring
from pyln.client import Millisatoshi
from pyln.testing.utils import EXPERIMENTAL_DUAL_FUND, EXPERIMENTAL_SPLICING
import subprocess
import tempfile
import time
COMPAT = env("COMPAT", "1") == "1"
@ -430,3 +432,120 @@ def scriptpubkey_addr(scriptpubkey):
# Modern bitcoin (at least, git master)
return scriptpubkey['address']
return None
class GenChannel(object):
class Half(object):
def __init__(self, htlc_max, enabled=True, htlc_min=0, basefee=0, propfee=1, delay=6):
self.enabled = enabled
self.htlc_min = htlc_min
self.htlc_max = htlc_max
self.basefee = basefee
self.propfee = propfee
self.delay = delay
def __init__(self, node1, node2, capacity_sats=1000000):
self.node1 = node1
self.node2 = node2
self.capacity_sats = capacity_sats
self.half = [GenChannel.Half(htlc_max=capacity_sats * 1000),
GenChannel.Half(htlc_max=capacity_sats * 1000)]
def generate_gossip_store(channels):
"""Returns a gossip store file with the given channels in it.
"""
nodes = []
def write_bignum(outf, val):
if val < 253:
outf.write(val.to_bytes(1, byteorder='big'))
elif val <= 0xFFFF:
outf.write(b'\xFD')
outf.write(val.to_bytes(2, byteorder='big'))
elif val <= 0xFFFFFFFF:
outf.write(b'\xFE')
outf.write(val.to_bytes(4, byteorder='big'))
else:
outf.write(b'\xFF')
outf.write(val.to_bytes(8, byteorder='big'))
def write_dumb_template(outf, channels, propname, illegalvals=[]):
"""We don't bother uniquifing, just one entry per chan dir"""
# Template is simply all the values
write_bignum(outf, len(channels) * 2)
for c in channels:
for d in (0, 1):
v = getattr(c.half[d], propname)
assert v not in illegalvals
write_bignum(outf, v)
# Now each entry for each channel half points into the values.
for i in range(0, len(channels) * 2):
write_bignum(outf, i)
# First create nodes
for c in channels:
if c.node1 not in nodes:
nodes.append(c.node1)
if c.node2 not in nodes:
nodes.append(c.node2)
cfile = tempfile.NamedTemporaryFile(prefix='gs-compressed-')
# <HEADER> := "GOSSMAP_COMPRESSv1\0"
cfile.write(b'GOSSMAP_COMPRESSv1\x00')
# <CHANNEL_ENDS> := {channel_count} {start_nodeidx}*{channel_count} {end_nodeidx}*{channel_count}
write_bignum(cfile, len(channels))
for c in channels:
write_bignum(cfile, nodes.index(c.node1))
for c in channels:
write_bignum(cfile, nodes.index(c.node2))
# <DISABLEDS> := <DISABLED>* {channel_count*2}
# <DISABLED> := {chanidx}*2+{direction}
for i, c in enumerate(channels):
for d in (0, 1):
if not c.half[d].enabled:
write_bignum(cfile, i * 2 + d)
write_bignum(cfile, len(channels) * 2)
# <CAPACITIES> := <CAPACITY_TEMPLATES> {channel_count}*{capacity_idx}
# <CAPACITY_TEMPLATES> := {capacity_count} {channel_count}*{capacity}
max_htlc_defaults = []
write_bignum(cfile, len(channels))
for c in channels:
write_bignum(cfile, c.capacity_sats)
max_htlc_defaults.append(c.capacity_sats)
max_htlc_defaults.append(c.capacity_sats)
for i, _ in enumerate(channels):
write_bignum(cfile, i)
# <HTLC_MINS> := <HTLC_MIN_TEMPLATES> {channel_ count*2}*{htlc_min_idx}
# <HTLC_MIN_TEMPLATES> := {htlc_min_count} {htlc_min_count}*{htlc_min}
write_dumb_template(cfile, channels, 'htlc_min')
# <HTLC_MAXS> := <HTLC_MAX_TEMPLATES> {channel_count*2}*{htlc_max_idx}
# <HTLC_MAX_TEMPLATES> := {htlc_max_count} {htlc_max_count}*{htlc_max}
# 0 and 1 have special meanings, don't use them!
write_dumb_template(cfile, channels, 'htlc_max', [0, 1])
# <BASEFEES> := <BASEFEE_TEMPLATES> {channel_count*2}*{basefee_idx}
# <BASEFEE_TEMPLATES> := {basefee_count} {basefee_count}*{basefee}
write_dumb_template(cfile, channels, 'basefee')
# <PROPFEES> := <PROPFEE_TEMPLATES> {channel_count*2}*{propfee_idx}
# <PROPFEE_TEMPLATES> := {propfee_count} {propfee_count}*{propfee}
write_dumb_template(cfile, channels, 'propfee')
# <DELAYS> := <DELAY_TEMPLATES> {channel_count*2}*{delay_idx}
# <DELAY_TEMPLATES> := {delay_count} {delay_count}*{delay}
write_dumb_template(cfile, channels, 'delay')
cfile.flush()
outfile = tempfile.NamedTemporaryFile(prefix='gossip-store-')
subprocess.run(['devtools/gossmap-compress',
'decompress',
cfile.name,
outfile.name],
check=True)
cfile.close()
return outfile