2016-03-19 20:58:06 +01:00
|
|
|
#!/usr/bin/env python3
|
2019-02-21 02:03:13 +01:00
|
|
|
# Copyright (c) 2014-2019 The Bitcoin Core developers
|
2015-07-15 20:47:45 +02:00
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
2017-01-18 00:34:40 +01:00
|
|
|
"""Test descendant package tracking code."""
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2018-07-07 00:10:35 +02:00
|
|
|
from decimal import Decimal
|
|
|
|
|
|
|
|
from test_framework.messages import COIN
|
2020-07-19 09:47:05 +02:00
|
|
|
from test_framework.p2p import P2PTxInvStore
|
2015-07-15 20:47:45 +02:00
|
|
|
from test_framework.test_framework import BitcoinTestFramework
|
2019-04-07 00:38:51 +02:00
|
|
|
from test_framework.util import (
|
|
|
|
assert_equal,
|
|
|
|
assert_raises_rpc_error,
|
|
|
|
satoshi_round,
|
|
|
|
)
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2019-11-10 19:55:28 +01:00
|
|
|
# default limits
|
2015-10-06 04:16:15 +02:00
|
|
|
MAX_ANCESTORS = 25
|
|
|
|
MAX_DESCENDANTS = 25
|
2019-11-10 19:55:28 +01:00
|
|
|
# custom limits for node1
|
|
|
|
MAX_ANCESTORS_CUSTOM = 5
|
2019-11-13 03:55:48 +01:00
|
|
|
MAX_DESCENDANTS_CUSTOM = 10
|
|
|
|
assert MAX_DESCENDANTS_CUSTOM >= MAX_ANCESTORS_CUSTOM
|
2015-10-06 04:16:15 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
class MempoolPackagesTest(BitcoinTestFramework):
|
2017-06-10 00:21:21 +02:00
|
|
|
def set_test_params(self):
|
2016-05-14 13:01:31 +02:00
|
|
|
self.num_nodes = 2
|
2019-11-10 19:55:28 +01:00
|
|
|
self.extra_args = [
|
2019-11-13 03:55:48 +01:00
|
|
|
[
|
|
|
|
"-maxorphantx=1000",
|
|
|
|
"-whitelist=noban@127.0.0.1", # immediate tx relay
|
|
|
|
],
|
|
|
|
[
|
|
|
|
"-maxorphantx=1000",
|
|
|
|
"-limitancestorcount={}".format(MAX_ANCESTORS_CUSTOM),
|
|
|
|
"-limitdescendantcount={}".format(MAX_DESCENDANTS_CUSTOM),
|
|
|
|
],
|
2019-11-10 19:55:28 +01:00
|
|
|
]
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2018-09-09 19:32:37 +02:00
|
|
|
def skip_test_if_missing_module(self):
|
|
|
|
self.skip_if_no_wallet()
|
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# Build a transaction that spends parent_txid:vout
|
|
|
|
# Return amount sent
|
2015-09-23 17:46:36 +02:00
|
|
|
def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs):
|
2015-07-15 20:47:45 +02:00
|
|
|
send_value = satoshi_round((value - fee)/num_outputs)
|
|
|
|
inputs = [ {'txid' : parent_txid, 'vout' : vout} ]
|
|
|
|
outputs = {}
|
2020-08-03 01:10:56 +02:00
|
|
|
for _ in range(num_outputs):
|
2015-09-23 17:46:36 +02:00
|
|
|
outputs[node.getnewaddress()] = send_value
|
|
|
|
rawtx = node.createrawtransaction(inputs, outputs)
|
2017-09-06 01:49:18 +02:00
|
|
|
signedtx = node.signrawtransactionwithwallet(rawtx)
|
2015-09-23 17:46:36 +02:00
|
|
|
txid = node.sendrawtransaction(signedtx['hex'])
|
|
|
|
fulltx = node.getrawtransaction(txid, 1)
|
2019-02-19 23:43:44 +01:00
|
|
|
assert len(fulltx['vout']) == num_outputs # make sure we didn't generate a change output
|
2015-07-15 20:47:45 +02:00
|
|
|
return (txid, send_value)
|
|
|
|
|
|
|
|
def run_test(self):
|
2018-09-10 22:58:15 +02:00
|
|
|
# Mine some blocks and have them mature.
|
2020-05-09 22:42:15 +02:00
|
|
|
self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
|
2015-07-15 20:47:45 +02:00
|
|
|
self.nodes[0].generate(101)
|
|
|
|
utxo = self.nodes[0].listunspent(10)
|
|
|
|
txid = utxo[0]['txid']
|
|
|
|
vout = utxo[0]['vout']
|
|
|
|
value = utxo[0]['amount']
|
|
|
|
|
|
|
|
fee = Decimal("0.0001")
|
2015-10-06 04:16:15 +02:00
|
|
|
# MAX_ANCESTORS transactions off a confirmed tx should be fine
|
2015-07-15 20:47:45 +02:00
|
|
|
chain = []
|
2020-04-21 17:02:46 +02:00
|
|
|
witness_chain = []
|
2020-08-03 01:10:56 +02:00
|
|
|
for _ in range(MAX_ANCESTORS):
|
2015-09-23 17:46:36 +02:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1)
|
2015-07-15 20:47:45 +02:00
|
|
|
value = sent_value
|
|
|
|
chain.append(txid)
|
2020-04-21 17:02:46 +02:00
|
|
|
# We need the wtxids to check P2P announcements
|
|
|
|
fulltx = self.nodes[0].getrawtransaction(txid)
|
|
|
|
witnesstx = self.nodes[0].decoderawtransaction(fulltx, True)
|
|
|
|
witness_chain.append(witnesstx['hash'])
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2020-05-09 22:42:15 +02:00
|
|
|
# Wait until mempool transactions have passed initial broadcast (sent inv and received getdata)
|
|
|
|
# Otherwise, getrawmempool may be inconsistent with getmempoolentry if unbroadcast changes in between
|
2020-04-21 17:02:46 +02:00
|
|
|
self.nodes[0].p2p.wait_for_broadcast(witness_chain)
|
2020-05-09 22:42:15 +02:00
|
|
|
|
2018-02-18 16:54:20 +01:00
|
|
|
# Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
|
2015-07-15 20:47:45 +02:00
|
|
|
# count and fees should look correct
|
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
2015-10-06 04:16:15 +02:00
|
|
|
assert_equal(len(mempool), MAX_ANCESTORS)
|
2015-07-15 20:47:45 +02:00
|
|
|
descendant_count = 1
|
|
|
|
descendant_fees = 0
|
2018-04-17 20:17:13 +02:00
|
|
|
descendant_vsize = 0
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2018-04-17 20:17:13 +02:00
|
|
|
ancestor_vsize = sum([mempool[tx]['vsize'] for tx in mempool])
|
2018-02-18 16:54:20 +01:00
|
|
|
ancestor_count = MAX_ANCESTORS
|
|
|
|
ancestor_fees = sum([mempool[tx]['fee'] for tx in mempool])
|
|
|
|
|
2016-01-04 21:15:15 +01:00
|
|
|
descendants = []
|
|
|
|
ancestors = list(chain)
|
2015-07-15 20:47:45 +02:00
|
|
|
for x in reversed(chain):
|
2016-01-04 21:15:15 +01:00
|
|
|
# Check that getmempoolentry is consistent with getrawmempool
|
|
|
|
entry = self.nodes[0].getmempoolentry(x)
|
|
|
|
assert_equal(entry, mempool[x])
|
|
|
|
|
|
|
|
# Check that the descendant calculations are correct
|
2015-07-15 20:47:45 +02:00
|
|
|
assert_equal(mempool[x]['descendantcount'], descendant_count)
|
|
|
|
descendant_fees += mempool[x]['fee']
|
2015-11-19 17:18:28 +01:00
|
|
|
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'])
|
2018-04-05 10:38:57 +02:00
|
|
|
assert_equal(mempool[x]['fees']['base'], mempool[x]['fee'])
|
|
|
|
assert_equal(mempool[x]['fees']['modified'], mempool[x]['modifiedfee'])
|
2015-12-22 10:43:46 +01:00
|
|
|
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN)
|
2018-04-05 10:38:57 +02:00
|
|
|
assert_equal(mempool[x]['fees']['descendant'], descendant_fees)
|
2018-04-17 20:17:13 +02:00
|
|
|
descendant_vsize += mempool[x]['vsize']
|
|
|
|
assert_equal(mempool[x]['descendantsize'], descendant_vsize)
|
2015-07-15 20:47:45 +02:00
|
|
|
descendant_count += 1
|
|
|
|
|
2018-02-18 16:54:20 +01:00
|
|
|
# Check that ancestor calculations are correct
|
|
|
|
assert_equal(mempool[x]['ancestorcount'], ancestor_count)
|
|
|
|
assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN)
|
2018-04-17 20:17:13 +02:00
|
|
|
assert_equal(mempool[x]['ancestorsize'], ancestor_vsize)
|
|
|
|
ancestor_vsize -= mempool[x]['vsize']
|
2018-02-18 16:54:20 +01:00
|
|
|
ancestor_fees -= mempool[x]['fee']
|
|
|
|
ancestor_count -= 1
|
|
|
|
|
|
|
|
# Check that parent/child list is correct
|
|
|
|
assert_equal(mempool[x]['spentby'], descendants[-1:])
|
|
|
|
assert_equal(mempool[x]['depends'], ancestors[-2:-1])
|
|
|
|
|
2016-01-04 21:15:15 +01:00
|
|
|
# Check that getmempooldescendants is correct
|
|
|
|
assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x)))
|
2018-02-18 16:54:20 +01:00
|
|
|
|
|
|
|
# Check getmempooldescendants verbose output is correct
|
|
|
|
for descendant, dinfo in self.nodes[0].getmempooldescendants(x, True).items():
|
|
|
|
assert_equal(dinfo['depends'], [chain[chain.index(descendant)-1]])
|
|
|
|
if dinfo['descendantcount'] > 1:
|
|
|
|
assert_equal(dinfo['spentby'], [chain[chain.index(descendant)+1]])
|
|
|
|
else:
|
|
|
|
assert_equal(dinfo['spentby'], [])
|
2016-01-04 21:15:15 +01:00
|
|
|
descendants.append(x)
|
|
|
|
|
|
|
|
# Check that getmempoolancestors is correct
|
|
|
|
ancestors.remove(x)
|
|
|
|
assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x)))
|
|
|
|
|
2018-02-18 16:54:20 +01:00
|
|
|
# Check that getmempoolancestors verbose output is correct
|
|
|
|
for ancestor, ainfo in self.nodes[0].getmempoolancestors(x, True).items():
|
|
|
|
assert_equal(ainfo['spentby'], [chain[chain.index(ancestor)+1]])
|
|
|
|
if ainfo['ancestorcount'] > 1:
|
|
|
|
assert_equal(ainfo['depends'], [chain[chain.index(ancestor)-1]])
|
|
|
|
else:
|
|
|
|
assert_equal(ainfo['depends'], [])
|
|
|
|
|
|
|
|
|
2016-01-04 21:15:15 +01:00
|
|
|
# Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true
|
|
|
|
v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True)
|
|
|
|
assert_equal(len(v_ancestors), len(chain)-1)
|
|
|
|
for x in v_ancestors.keys():
|
|
|
|
assert_equal(mempool[x], v_ancestors[x])
|
2019-02-19 23:43:44 +01:00
|
|
|
assert chain[-1] not in v_ancestors.keys()
|
2016-01-04 21:15:15 +01:00
|
|
|
|
|
|
|
v_descendants = self.nodes[0].getmempooldescendants(chain[0], True)
|
|
|
|
assert_equal(len(v_descendants), len(chain)-1)
|
|
|
|
for x in v_descendants.keys():
|
|
|
|
assert_equal(mempool[x], v_descendants[x])
|
2019-02-19 23:43:44 +01:00
|
|
|
assert chain[0] not in v_descendants.keys()
|
2016-01-04 21:15:15 +01:00
|
|
|
|
2017-04-03 21:48:55 +02:00
|
|
|
# Check that ancestor modified fees includes fee deltas from
|
|
|
|
# prioritisetransaction
|
2017-04-21 18:41:01 +02:00
|
|
|
self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=1000)
|
2017-04-03 21:48:55 +02:00
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
ancestor_fees = 0
|
|
|
|
for x in chain:
|
|
|
|
ancestor_fees += mempool[x]['fee']
|
2018-04-05 10:38:57 +02:00
|
|
|
assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('0.00001'))
|
2017-04-03 21:48:55 +02:00
|
|
|
assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN + 1000)
|
2018-02-18 16:54:20 +01:00
|
|
|
|
2017-04-03 21:48:55 +02:00
|
|
|
# Undo the prioritisetransaction for later tests
|
2017-04-21 18:41:01 +02:00
|
|
|
self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000)
|
2017-04-03 21:48:55 +02:00
|
|
|
|
2015-11-19 17:18:28 +01:00
|
|
|
# Check that descendant modified fees includes fee deltas from
|
|
|
|
# prioritisetransaction
|
2017-04-21 18:41:01 +02:00
|
|
|
self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=1000)
|
2015-11-19 17:18:28 +01:00
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
|
|
|
|
descendant_fees = 0
|
|
|
|
for x in reversed(chain):
|
|
|
|
descendant_fees += mempool[x]['fee']
|
2018-04-05 10:38:57 +02:00
|
|
|
assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('0.00001'))
|
2015-12-22 10:43:46 +01:00
|
|
|
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000)
|
2015-11-19 17:18:28 +01:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# Adding one more transaction on to the chain should fail.
|
2017-07-12 16:33:46 +02:00
|
|
|
assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1)
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2015-11-19 17:18:28 +01:00
|
|
|
# Check that prioritising a tx before it's added to the mempool works
|
2016-01-15 02:35:21 +01:00
|
|
|
# First clear the mempool by mining a block.
|
2015-11-19 17:18:28 +01:00
|
|
|
self.nodes[0].generate(1)
|
2019-04-07 00:19:45 +02:00
|
|
|
self.sync_blocks()
|
2016-01-15 02:35:21 +01:00
|
|
|
assert_equal(len(self.nodes[0].getrawmempool()), 0)
|
|
|
|
# Prioritise a transaction that has been mined, then add it back to the
|
|
|
|
# mempool by using invalidateblock.
|
2017-04-21 18:41:01 +02:00
|
|
|
self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=2000)
|
2015-11-19 17:18:28 +01:00
|
|
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
2016-01-15 02:35:21 +01:00
|
|
|
# Keep node1's tip synced with node0
|
|
|
|
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
|
|
|
|
|
|
|
|
# Now check that the transaction is in the mempool, with the right modified fee
|
2015-11-19 17:18:28 +01:00
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
|
|
|
|
descendant_fees = 0
|
|
|
|
for x in reversed(chain):
|
|
|
|
descendant_fees += mempool[x]['fee']
|
|
|
|
if (x == chain[-1]):
|
|
|
|
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002))
|
2018-04-05 10:38:57 +02:00
|
|
|
assert_equal(mempool[x]['fees']['modified'], mempool[x]['fee']+satoshi_round(0.00002))
|
2015-12-22 10:43:46 +01:00
|
|
|
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000)
|
2018-04-05 10:38:57 +02:00
|
|
|
assert_equal(mempool[x]['fees']['descendant'], descendant_fees+satoshi_round(0.00002))
|
2015-11-19 17:18:28 +01:00
|
|
|
|
2019-11-10 19:55:28 +01:00
|
|
|
# Check that node1's mempool is as expected (-> custom ancestor limit)
|
|
|
|
mempool0 = self.nodes[0].getrawmempool(False)
|
|
|
|
mempool1 = self.nodes[1].getrawmempool(False)
|
|
|
|
assert_equal(len(mempool1), MAX_ANCESTORS_CUSTOM)
|
|
|
|
assert set(mempool1).issubset(set(mempool0))
|
|
|
|
for tx in chain[:MAX_ANCESTORS_CUSTOM]:
|
|
|
|
assert tx in mempool1
|
|
|
|
# TODO: more detailed check of node1's mempool (fees etc.)
|
2020-04-29 23:19:37 +02:00
|
|
|
# check transaction unbroadcast info (should be false if in both mempools)
|
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
for tx in mempool:
|
|
|
|
assert_equal(mempool[tx]['unbroadcast'], False)
|
2015-09-23 17:46:36 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# TODO: test ancestor size limits
|
|
|
|
|
|
|
|
# Now test descendant chain limits
|
|
|
|
txid = utxo[1]['txid']
|
|
|
|
value = utxo[1]['amount']
|
|
|
|
vout = utxo[1]['vout']
|
|
|
|
|
|
|
|
transaction_package = []
|
2018-02-18 16:54:20 +01:00
|
|
|
tx_children = []
|
2015-07-15 20:47:45 +02:00
|
|
|
# First create one parent tx with 10 children
|
2015-09-23 17:46:36 +02:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10)
|
2015-07-15 20:47:45 +02:00
|
|
|
parent_transaction = txid
|
2016-03-19 20:58:06 +01:00
|
|
|
for i in range(10):
|
2015-07-15 20:47:45 +02:00
|
|
|
transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
|
|
|
|
|
2017-03-07 20:08:59 +01:00
|
|
|
# Sign and send up to MAX_DESCENDANT transactions chained off the parent tx
|
2019-11-13 03:55:48 +01:00
|
|
|
chain = [] # save sent txs for the purpose of checking node1's mempool later (see below)
|
2020-08-03 01:10:56 +02:00
|
|
|
for _ in range(MAX_DESCENDANTS - 1):
|
2015-07-15 20:47:45 +02:00
|
|
|
utxo = transaction_package.pop(0)
|
2017-03-07 20:08:59 +01:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
|
2019-11-13 03:55:48 +01:00
|
|
|
chain.append(txid)
|
2018-02-18 16:54:20 +01:00
|
|
|
if utxo['txid'] is parent_transaction:
|
|
|
|
tx_children.append(txid)
|
2017-03-07 20:08:59 +01:00
|
|
|
for j in range(10):
|
|
|
|
transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
|
|
|
|
|
|
|
|
mempool = self.nodes[0].getrawmempool(True)
|
|
|
|
assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS)
|
2018-02-18 16:54:20 +01:00
|
|
|
assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children))
|
|
|
|
|
|
|
|
for child in tx_children:
|
|
|
|
assert_equal(mempool[child]['depends'], [parent_transaction])
|
2017-03-07 20:08:59 +01:00
|
|
|
|
|
|
|
# Sending one more chained transaction will fail
|
|
|
|
utxo = transaction_package.pop(0)
|
2017-07-12 16:33:46 +02:00
|
|
|
assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
|
2015-07-15 20:47:45 +02:00
|
|
|
|
2019-11-13 03:55:48 +01:00
|
|
|
# Check that node1's mempool is as expected, containing:
|
|
|
|
# - txs from previous ancestor test (-> custom ancestor limit)
|
|
|
|
# - parent tx for descendant test
|
|
|
|
# - txs chained off parent tx (-> custom descendant limit)
|
2020-08-17 17:50:47 +02:00
|
|
|
self.wait_until(lambda: len(self.nodes[1].getrawmempool(False)) ==
|
|
|
|
MAX_ANCESTORS_CUSTOM + 1 + MAX_DESCENDANTS_CUSTOM, timeout=10)
|
2019-11-13 03:55:48 +01:00
|
|
|
mempool0 = self.nodes[0].getrawmempool(False)
|
|
|
|
mempool1 = self.nodes[1].getrawmempool(False)
|
|
|
|
assert set(mempool1).issubset(set(mempool0))
|
|
|
|
assert parent_transaction in mempool1
|
|
|
|
for tx in chain[:MAX_DESCENDANTS_CUSTOM]:
|
|
|
|
assert tx in mempool1
|
|
|
|
for tx in chain[MAX_DESCENDANTS_CUSTOM:]:
|
|
|
|
assert tx not in mempool1
|
|
|
|
# TODO: more detailed check of node1's mempool (fees etc.)
|
2015-09-23 17:46:36 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
# TODO: test descendant size limits
|
|
|
|
|
2015-09-23 17:46:36 +02:00
|
|
|
# Test reorg handling
|
|
|
|
# First, the basics:
|
|
|
|
self.nodes[0].generate(1)
|
2019-04-07 00:19:45 +02:00
|
|
|
self.sync_blocks()
|
2015-09-23 17:46:36 +02:00
|
|
|
self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash())
|
|
|
|
self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash())
|
|
|
|
|
|
|
|
# Now test the case where node1 has a transaction T in its mempool that
|
|
|
|
# depends on transactions A and B which are in a mined block, and the
|
|
|
|
# block containing A and B is disconnected, AND B is not accepted back
|
|
|
|
# into node1's mempool because its ancestor count is too high.
|
|
|
|
|
|
|
|
# Create 8 transactions, like so:
|
|
|
|
# Tx0 -> Tx1 (vout0)
|
|
|
|
# \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7
|
|
|
|
#
|
|
|
|
# Mine them in the next block, then generate a new tx8 that spends
|
|
|
|
# Tx1 and Tx7, and add to node1's mempool, then disconnect the
|
|
|
|
# last block.
|
|
|
|
|
|
|
|
# Create tx0 with 2 outputs
|
|
|
|
utxo = self.nodes[0].listunspent()
|
|
|
|
txid = utxo[0]['txid']
|
|
|
|
value = utxo[0]['amount']
|
|
|
|
vout = utxo[0]['vout']
|
|
|
|
|
|
|
|
send_value = satoshi_round((value - fee)/2)
|
|
|
|
inputs = [ {'txid' : txid, 'vout' : vout} ]
|
|
|
|
outputs = {}
|
2020-08-03 01:10:56 +02:00
|
|
|
for _ in range(2):
|
2015-09-23 17:46:36 +02:00
|
|
|
outputs[self.nodes[0].getnewaddress()] = send_value
|
|
|
|
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
2017-09-06 01:49:18 +02:00
|
|
|
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
|
2015-09-23 17:46:36 +02:00
|
|
|
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
|
|
|
|
tx0_id = txid
|
|
|
|
value = send_value
|
|
|
|
|
|
|
|
# Create tx1
|
2017-07-20 23:21:41 +02:00
|
|
|
tx1_id, _ = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1)
|
2015-09-23 17:46:36 +02:00
|
|
|
|
|
|
|
# Create tx2-7
|
|
|
|
vout = 1
|
|
|
|
txid = tx0_id
|
2020-08-03 01:10:56 +02:00
|
|
|
for _ in range(6):
|
2015-09-23 17:46:36 +02:00
|
|
|
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1)
|
|
|
|
vout = 0
|
|
|
|
value = sent_value
|
|
|
|
|
|
|
|
# Mine these in a block
|
|
|
|
self.nodes[0].generate(1)
|
|
|
|
self.sync_all()
|
|
|
|
|
|
|
|
# Now generate tx8, with a big fee
|
|
|
|
inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ]
|
|
|
|
outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee }
|
|
|
|
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
|
2017-09-06 01:49:18 +02:00
|
|
|
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
|
2015-09-23 17:46:36 +02:00
|
|
|
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
|
2019-04-07 00:19:45 +02:00
|
|
|
self.sync_mempools()
|
2018-02-18 16:54:20 +01:00
|
|
|
|
2015-09-23 17:46:36 +02:00
|
|
|
# Now try to disconnect the tip on each node...
|
|
|
|
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
|
|
|
|
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
|
2019-04-07 00:19:45 +02:00
|
|
|
self.sync_blocks()
|
2015-09-23 17:46:36 +02:00
|
|
|
|
2015-07-15 20:47:45 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
MempoolPackagesTest().main()
|