mirror of
https://github.com/bitcoin/bitcoin.git
synced 2024-11-20 02:25:40 +01:00
Merge bitcoin/bitcoin#22229: test: consolidate to f-strings (part 1)
68faa87881
test: use f-strings in mining_*.py tests (fanquake)c2a5d560df
test: use f-strings in interface_*.py tests (fanquake)86d958262d
test: use f-strings in feature_proxy.py (fanquake)31bdb33dcb
test: use f-strings in feature_segwit.py (fanquake)b166d54c3c
test: use f-strings in feature_versionbits_warning.py (fanquake)cf6d66bf94
test: use f-strings in feature_settings.py (fanquake)6651d77f22
test: use f-strings in feature_pruning.py (fanquake)961f5813ba
test: use f-strings in feature_notifications.py (fanquake)1a546e6f6c
test: use f-strings in feature_minchainwork.py (fanquake)6679eceacc
test: use f-strings in feature_logging.py (fanquake)fb633933ab
test: use f-strings in feature_loadblock.py (fanquake)e9ca8b254d
test: use f-strings in feature_help.py (fanquake)ff7e330999
test: use f-strings in feature_filelock.py (fanquake)d5a6adc5e4
test: use f-strings in feature_fee_estimation.py (fanquake)a2de33cbdc
test: use f-strings in feature_dersig.py (fanquake)a2502cc63f
test: use f-strings in feature_dbcrash.py (fanquake)3e2f84e7a9
test: use f-strings in feature_csv_activation.py (fanquake)e2f1fd8ee9
test: use f-strings in feature_config_args.py (fanquake)36d33d32b1
test: use f-strings in feature_cltv.py (fanquake)dca173cc04
test: use f-strings in feature_blocksdir.py (fanquake)5453e87062
test: use f-strings in feature_backwards_compatibility.py (fanquake)6f3d5ad67a
test: use f-strings in feature_asmap.py (fanquake) Pull request description: Rather than using 3 different ways to build/format strings (sometimes all in the same test, i.e [`feature_config_args.py`](https://github.com/bitcoin/bitcoin/blob/master/test/functional/feature_config_args.py)), consolidate to using [f-strings (3.6+)](https://docs.python.org/3/reference/lexical_analysis.html#f-strings), which are generally more concise / readable, as well as more performant than existing methods. This deals with the `feature_*.py`, `interface_*.py` and `mining_*.py` tests. See also: [PEP 498](https://www.python.org/dev/peps/pep-0498/) ACKs for top commit: mjdietzx: reACK68faa87881
Zero-1729: crACK68faa87881
Tree-SHA512: d4e1a42e07d96d2c552387a46da1534223c4ce408703d7568ad2ef580797dd68d9695b8d19666b567af37f44de6e430e8be5db5d5404ba8fcecf9f5b026a6efb
This commit is contained in:
commit
38975eccd4
@ -31,8 +31,8 @@ ASMAP = '../../src/test/data/asmap.raw' # path to unit test skeleton asmap
|
|||||||
VERSION = 'fec61fa21a9f46f3b17bdcd660d7f4cd90b966aad3aec593c99b35f0aca15853'
|
VERSION = 'fec61fa21a9f46f3b17bdcd660d7f4cd90b966aad3aec593c99b35f0aca15853'
|
||||||
|
|
||||||
def expected_messages(filename):
|
def expected_messages(filename):
|
||||||
return ['Opened asmap file "{}" (59 bytes) from disk'.format(filename),
|
return [f'Opened asmap file "{filename}" (59 bytes) from disk',
|
||||||
'Using asmap version {} for IP bucketing'.format(VERSION)]
|
f'Using asmap version {VERSION} for IP bucketing']
|
||||||
|
|
||||||
class AsmapTest(BitcoinTestFramework):
|
class AsmapTest(BitcoinTestFramework):
|
||||||
def set_test_params(self):
|
def set_test_params(self):
|
||||||
@ -50,7 +50,7 @@ class AsmapTest(BitcoinTestFramework):
|
|||||||
filename = os.path.join(self.datadir, 'my-map-file.map')
|
filename = os.path.join(self.datadir, 'my-map-file.map')
|
||||||
shutil.copyfile(self.asmap_raw, filename)
|
shutil.copyfile(self.asmap_raw, filename)
|
||||||
with self.node.assert_debug_log(expected_messages(filename)):
|
with self.node.assert_debug_log(expected_messages(filename)):
|
||||||
self.start_node(0, ['-asmap={}'.format(filename)])
|
self.start_node(0, [f'-asmap={filename}'])
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
|
|
||||||
def test_asmap_with_relative_path(self):
|
def test_asmap_with_relative_path(self):
|
||||||
@ -60,13 +60,13 @@ class AsmapTest(BitcoinTestFramework):
|
|||||||
filename = os.path.join(self.datadir, name)
|
filename = os.path.join(self.datadir, name)
|
||||||
shutil.copyfile(self.asmap_raw, filename)
|
shutil.copyfile(self.asmap_raw, filename)
|
||||||
with self.node.assert_debug_log(expected_messages(filename)):
|
with self.node.assert_debug_log(expected_messages(filename)):
|
||||||
self.start_node(0, ['-asmap={}'.format(name)])
|
self.start_node(0, [f'-asmap={name}'])
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
|
|
||||||
def test_default_asmap(self):
|
def test_default_asmap(self):
|
||||||
shutil.copyfile(self.asmap_raw, self.default_asmap)
|
shutil.copyfile(self.asmap_raw, self.default_asmap)
|
||||||
for arg in ['-asmap', '-asmap=']:
|
for arg in ['-asmap', '-asmap=']:
|
||||||
self.log.info('Test bitcoind {} (using default map file)'.format(arg))
|
self.log.info(f'Test bitcoind {arg} (using default map file)')
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
with self.node.assert_debug_log(expected_messages(self.default_asmap)):
|
with self.node.assert_debug_log(expected_messages(self.default_asmap)):
|
||||||
self.start_node(0, [arg])
|
self.start_node(0, [arg])
|
||||||
@ -75,7 +75,7 @@ class AsmapTest(BitcoinTestFramework):
|
|||||||
def test_default_asmap_with_missing_file(self):
|
def test_default_asmap_with_missing_file(self):
|
||||||
self.log.info('Test bitcoind -asmap with missing default map file')
|
self.log.info('Test bitcoind -asmap with missing default map file')
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
msg = "Error: Could not find asmap file \"{}\"".format(self.default_asmap)
|
msg = f"Error: Could not find asmap file \"{self.default_asmap}\""
|
||||||
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
|
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
|
||||||
|
|
||||||
def test_empty_asmap(self):
|
def test_empty_asmap(self):
|
||||||
@ -83,7 +83,7 @@ class AsmapTest(BitcoinTestFramework):
|
|||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
with open(self.default_asmap, "w", encoding="utf-8") as f:
|
with open(self.default_asmap, "w", encoding="utf-8") as f:
|
||||||
f.write("")
|
f.write("")
|
||||||
msg = "Error: Could not parse asmap file \"{}\"".format(self.default_asmap)
|
msg = f"Error: Could not parse asmap file \"{self.default_asmap}\""
|
||||||
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
|
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
|
||||||
os.remove(self.default_asmap)
|
os.remove(self.default_asmap)
|
||||||
|
|
||||||
|
@ -366,7 +366,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||||||
assert_equal(load_res['warning'], '')
|
assert_equal(load_res['warning'], '')
|
||||||
wallet = node_master.get_wallet_rpc("u1_v16")
|
wallet = node_master.get_wallet_rpc("u1_v16")
|
||||||
info = wallet.getaddressinfo(v16_addr)
|
info = wallet.getaddressinfo(v16_addr)
|
||||||
descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + v16_pubkey + ")"
|
descriptor = f"wpkh([{info['hdmasterfingerprint']}{hdkeypath[1:]}]{v16_pubkey})"
|
||||||
assert_equal(info["desc"], descsum_create(descriptor))
|
assert_equal(info["desc"], descsum_create(descriptor))
|
||||||
|
|
||||||
# Now copy that same wallet back to 0.16 to make sure no automatic upgrade breaks it
|
# Now copy that same wallet back to 0.16 to make sure no automatic upgrade breaks it
|
||||||
@ -389,7 +389,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
|
|||||||
node_master.loadwallet("u1_v17")
|
node_master.loadwallet("u1_v17")
|
||||||
wallet = node_master.get_wallet_rpc("u1_v17")
|
wallet = node_master.get_wallet_rpc("u1_v17")
|
||||||
info = wallet.getaddressinfo(address)
|
info = wallet.getaddressinfo(address)
|
||||||
descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + pubkey + ")"
|
descriptor = f"wpkh([{info['hdmasterfingerprint']}{hdkeypath[1:]}]{pubkey})"
|
||||||
assert_equal(info["desc"], descsum_create(descriptor))
|
assert_equal(info["desc"], descsum_create(descriptor))
|
||||||
|
|
||||||
# Now copy that same wallet back to 0.17 to make sure no automatic upgrade breaks it
|
# Now copy that same wallet back to 0.17 to make sure no automatic upgrade breaks it
|
||||||
|
@ -24,10 +24,10 @@ class BlocksdirTest(BitcoinTestFramework):
|
|||||||
initialize_datadir(self.options.tmpdir, 0, self.chain)
|
initialize_datadir(self.options.tmpdir, 0, self.chain)
|
||||||
self.log.info("Starting with nonexistent blocksdir ...")
|
self.log.info("Starting with nonexistent blocksdir ...")
|
||||||
blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir')
|
blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir')
|
||||||
self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path))
|
self.nodes[0].assert_start_raises_init_error([f"-blocksdir={blocksdir_path}"], f'Error: Specified blocks directory "{blocksdir_path}" does not exist.')
|
||||||
os.mkdir(blocksdir_path)
|
os.mkdir(blocksdir_path)
|
||||||
self.log.info("Starting with existing blocksdir ...")
|
self.log.info("Starting with existing blocksdir ...")
|
||||||
self.start_node(0, ["-blocksdir=" + blocksdir_path])
|
self.start_node(0, [f"-blocksdir={blocksdir_path}"])
|
||||||
self.log.info("mining blocks..")
|
self.log.info("mining blocks..")
|
||||||
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
|
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
|
||||||
assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat"))
|
assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat"))
|
||||||
|
@ -135,7 +135,7 @@ class BIP65Test(BitcoinTestFramework):
|
|||||||
block.nVersion = 3
|
block.nVersion = 3
|
||||||
block.solve()
|
block.solve()
|
||||||
|
|
||||||
with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
|
with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000003)']):
|
||||||
peer.send_and_ping(msg_block(block))
|
peer.send_and_ping(msg_block(block))
|
||||||
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
||||||
peer.sync_with_ping()
|
peer.sync_with_ping()
|
||||||
@ -173,8 +173,7 @@ class BIP65Test(BitcoinTestFramework):
|
|||||||
block.hashMerkleRoot = block.calc_merkle_root()
|
block.hashMerkleRoot = block.calc_merkle_root()
|
||||||
block.solve()
|
block.solve()
|
||||||
|
|
||||||
with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with {}'.format(
|
with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with {expected_cltv_reject_reason}']):
|
||||||
block.vtx[-1].hash, expected_cltv_reject_reason)]):
|
|
||||||
peer.send_and_ping(msg_block(block))
|
peer.send_and_ping(msg_block(block))
|
||||||
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
||||||
peer.sync_with_ping()
|
peer.sync_with_ping()
|
||||||
|
@ -24,7 +24,7 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
|
inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
|
||||||
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
|
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
|
||||||
conf.write('includeconf={}\n'.format(inc_conf_file_path))
|
conf.write(f'includeconf={inc_conf_file_path}\n')
|
||||||
|
|
||||||
self.nodes[0].assert_start_raises_init_error(
|
self.nodes[0].assert_start_raises_init_error(
|
||||||
expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli=1',
|
expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli=1',
|
||||||
@ -43,13 +43,13 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||||||
if self.is_wallet_compiled():
|
if self.is_wallet_compiled():
|
||||||
with open(inc_conf_file_path, 'w', encoding='utf8') as conf:
|
with open(inc_conf_file_path, 'w', encoding='utf8') as conf:
|
||||||
conf.write("wallet=foo\n")
|
conf.write("wallet=foo\n")
|
||||||
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Config setting for -wallet only applied on %s network when in [%s] section.' % (self.chain, self.chain))
|
self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: Config setting for -wallet only applied on {self.chain} network when in [{self.chain}] section.')
|
||||||
|
|
||||||
main_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'bitcoin_main.conf')
|
main_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'bitcoin_main.conf')
|
||||||
util.write_config(main_conf_file_path, n=0, chain='', extra_config='includeconf={}\n'.format(inc_conf_file_path))
|
util.write_config(main_conf_file_path, n=0, chain='', extra_config=f'includeconf={inc_conf_file_path}\n')
|
||||||
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
||||||
conf.write('acceptnonstdtxn=1\n')
|
conf.write('acceptnonstdtxn=1\n')
|
||||||
self.nodes[0].assert_start_raises_init_error(extra_args=["-conf={}".format(main_conf_file_path)], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
|
self.nodes[0].assert_start_raises_init_error(extra_args=[f"-conf={main_conf_file_path}"], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
|
||||||
|
|
||||||
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
||||||
conf.write('nono\n')
|
conf.write('nono\n')
|
||||||
@ -69,14 +69,14 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
inc_conf_file2_path = os.path.join(self.nodes[0].datadir, 'include2.conf')
|
inc_conf_file2_path = os.path.join(self.nodes[0].datadir, 'include2.conf')
|
||||||
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
|
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
|
||||||
conf.write('includeconf={}\n'.format(inc_conf_file2_path))
|
conf.write(f'includeconf={inc_conf_file2_path}\n')
|
||||||
|
|
||||||
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
||||||
conf.write('testnot.datadir=1\n')
|
conf.write('testnot.datadir=1\n')
|
||||||
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
|
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
|
||||||
conf.write('[testnet]\n')
|
conf.write('[testnet]\n')
|
||||||
self.restart_node(0)
|
self.restart_node(0)
|
||||||
self.nodes[0].stop_node(expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + inc_conf_file2_path + ':1 Section [testnet] is not recognized.')
|
self.nodes[0].stop_node(expected_stderr=f'Warning: {inc_conf_file_path}:1 Section [testnot] is not recognized.{os.linesep}{inc_conf_file2_path}:1 Section [testnet] is not recognized.')
|
||||||
|
|
||||||
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
|
||||||
conf.write('') # clear
|
conf.write('') # clear
|
||||||
@ -105,8 +105,8 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||||||
'Command-line arg: rpcpassword=****',
|
'Command-line arg: rpcpassword=****',
|
||||||
'Command-line arg: rpcuser=****',
|
'Command-line arg: rpcuser=****',
|
||||||
'Command-line arg: torpassword=****',
|
'Command-line arg: torpassword=****',
|
||||||
'Config file arg: %s="1"' % self.chain,
|
f'Config file arg: {self.chain}="1"',
|
||||||
'Config file arg: [%s] server="1"' % self.chain,
|
f'Config file arg: [{self.chain}] server="1"',
|
||||||
],
|
],
|
||||||
unexpected_msgs=[
|
unexpected_msgs=[
|
||||||
'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
|
'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
|
||||||
@ -235,7 +235,7 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
# Check that using -datadir argument on non-existent directory fails
|
# Check that using -datadir argument on non-existent directory fails
|
||||||
self.nodes[0].datadir = new_data_dir
|
self.nodes[0].datadir = new_data_dir
|
||||||
self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
|
self.nodes[0].assert_start_raises_init_error([f'-datadir={new_data_dir}'], f'Error: Specified data directory "{new_data_dir}" does not exist.')
|
||||||
|
|
||||||
# Check that using non-existent datadir in conf file fails
|
# Check that using non-existent datadir in conf file fails
|
||||||
conf_file = os.path.join(default_data_dir, "bitcoin.conf")
|
conf_file = os.path.join(default_data_dir, "bitcoin.conf")
|
||||||
@ -243,21 +243,21 @@ class ConfArgsTest(BitcoinTestFramework):
|
|||||||
# datadir needs to be set before [chain] section
|
# datadir needs to be set before [chain] section
|
||||||
conf_file_contents = open(conf_file, encoding='utf8').read()
|
conf_file_contents = open(conf_file, encoding='utf8').read()
|
||||||
with open(conf_file, 'w', encoding='utf8') as f:
|
with open(conf_file, 'w', encoding='utf8') as f:
|
||||||
f.write("datadir=" + new_data_dir + "\n")
|
f.write(f"datadir={new_data_dir}\n")
|
||||||
f.write(conf_file_contents)
|
f.write(conf_file_contents)
|
||||||
|
|
||||||
self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error: Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
|
self.nodes[0].assert_start_raises_init_error([f'-conf={conf_file}'], f'Error: Error reading configuration file: specified data directory "{new_data_dir}" does not exist.')
|
||||||
|
|
||||||
# Create the directory and ensure the config file now works
|
# Create the directory and ensure the config file now works
|
||||||
os.mkdir(new_data_dir)
|
os.mkdir(new_data_dir)
|
||||||
self.start_node(0, ['-conf='+conf_file])
|
self.start_node(0, [f'-conf={conf_file}'])
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks'))
|
assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks'))
|
||||||
|
|
||||||
# Ensure command line argument overrides datadir in conf
|
# Ensure command line argument overrides datadir in conf
|
||||||
os.mkdir(new_data_dir_2)
|
os.mkdir(new_data_dir_2)
|
||||||
self.nodes[0].datadir = new_data_dir_2
|
self.nodes[0].datadir = new_data_dir_2
|
||||||
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file])
|
self.start_node(0, [f'-datadir={new_data_dir_2}', f'-conf={conf_file}'])
|
||||||
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'blocks'))
|
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'blocks'))
|
||||||
|
|
||||||
|
|
||||||
|
@ -247,7 +247,7 @@ class BIP68_112_113Test(BitcoinTestFramework):
|
|||||||
self.send_blocks(test_blocks)
|
self.send_blocks(test_blocks)
|
||||||
|
|
||||||
assert_equal(self.tipheight, CSV_ACTIVATION_HEIGHT - 2)
|
assert_equal(self.tipheight, CSV_ACTIVATION_HEIGHT - 2)
|
||||||
self.log.info("Height = {}, CSV not yet active (will activate for block {}, not {})".format(self.tipheight, CSV_ACTIVATION_HEIGHT, CSV_ACTIVATION_HEIGHT - 1))
|
self.log.info(f"Height = {self.tipheight}, CSV not yet active (will activate for block {CSV_ACTIVATION_HEIGHT}, not {CSV_ACTIVATION_HEIGHT - 1})")
|
||||||
assert not softfork_active(self.nodes[0], 'csv')
|
assert not softfork_active(self.nodes[0], 'csv')
|
||||||
|
|
||||||
# Test both version 1 and version 2 transactions for all tests
|
# Test both version 1 and version 2 transactions for all tests
|
||||||
|
@ -102,7 +102,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# perhaps we generated a test case that blew up our cache?
|
# perhaps we generated a test case that blew up our cache?
|
||||||
# TODO: If this happens a lot, we should try to restart without -dbcrashratio
|
# TODO: If this happens a lot, we should try to restart without -dbcrashratio
|
||||||
# and make sure that recovery happens.
|
# and make sure that recovery happens.
|
||||||
raise AssertionError("Unable to successfully restart node %d in allotted time", node_index)
|
raise AssertionError(f"Unable to successfully restart node {node_index} in allotted time")
|
||||||
|
|
||||||
def submit_block_catch_error(self, node_index, block):
|
def submit_block_catch_error(self, node_index, block):
|
||||||
"""Try submitting a block to the given node.
|
"""Try submitting a block to the given node.
|
||||||
@ -114,10 +114,10 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
self.nodes[node_index].submitblock(block)
|
self.nodes[node_index].submitblock(block)
|
||||||
return True
|
return True
|
||||||
except (http.client.CannotSendRequest, http.client.RemoteDisconnected) as e:
|
except (http.client.CannotSendRequest, http.client.RemoteDisconnected) as e:
|
||||||
self.log.debug("node %d submitblock raised exception: %s", node_index, e)
|
self.log.debug(f"node {node_index} submitblock raised exception: {e}")
|
||||||
return False
|
return False
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
self.log.debug("node %d submitblock raised OSError exception: errno=%s", node_index, e.errno)
|
self.log.debug(f"node {node_index} submitblock raised OSError exception: errno={e.errno}")
|
||||||
if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]:
|
if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]:
|
||||||
# The node has likely crashed
|
# The node has likely crashed
|
||||||
return False
|
return False
|
||||||
@ -142,15 +142,15 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# Deliver each block to each other node
|
# Deliver each block to each other node
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
nodei_utxo_hash = None
|
nodei_utxo_hash = None
|
||||||
self.log.debug("Syncing blocks to node %d", i)
|
self.log.debug(f"Syncing blocks to node {i}")
|
||||||
for (block_hash, block) in blocks:
|
for (block_hash, block) in blocks:
|
||||||
# Get the block from node3, and submit to node_i
|
# Get the block from node3, and submit to node_i
|
||||||
self.log.debug("submitting block %s", block_hash)
|
self.log.debug(f"submitting block {block_hash}")
|
||||||
if not self.submit_block_catch_error(i, block):
|
if not self.submit_block_catch_error(i, block):
|
||||||
# TODO: more carefully check that the crash is due to -dbcrashratio
|
# TODO: more carefully check that the crash is due to -dbcrashratio
|
||||||
# (change the exit code perhaps, and check that here?)
|
# (change the exit code perhaps, and check that here?)
|
||||||
self.wait_for_node_exit(i, timeout=30)
|
self.wait_for_node_exit(i, timeout=30)
|
||||||
self.log.debug("Restarting node %d after block hash %s", i, block_hash)
|
self.log.debug(f"Restarting node {i} after block hash {block_hash}")
|
||||||
nodei_utxo_hash = self.restart_node(i, block_hash)
|
nodei_utxo_hash = self.restart_node(i, block_hash)
|
||||||
assert nodei_utxo_hash is not None
|
assert nodei_utxo_hash is not None
|
||||||
self.restart_counts[i] += 1
|
self.restart_counts[i] += 1
|
||||||
@ -167,7 +167,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# - we only update the utxo cache after a node restart, since flushing
|
# - we only update the utxo cache after a node restart, since flushing
|
||||||
# the cache is a no-op at that point
|
# the cache is a no-op at that point
|
||||||
if nodei_utxo_hash is not None:
|
if nodei_utxo_hash is not None:
|
||||||
self.log.debug("Checking txoutsetinfo matches for node %d", i)
|
self.log.debug(f"Checking txoutsetinfo matches for node {i}")
|
||||||
assert_equal(nodei_utxo_hash, node3_utxo_hash)
|
assert_equal(nodei_utxo_hash, node3_utxo_hash)
|
||||||
|
|
||||||
def verify_utxo_hash(self):
|
def verify_utxo_hash(self):
|
||||||
@ -218,14 +218,14 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# Start by creating a lot of utxos on node3
|
# Start by creating a lot of utxos on node3
|
||||||
initial_height = self.nodes[3].getblockcount()
|
initial_height = self.nodes[3].getblockcount()
|
||||||
utxo_list = create_confirmed_utxos(self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000)
|
utxo_list = create_confirmed_utxos(self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000)
|
||||||
self.log.info("Prepped %d utxo entries", len(utxo_list))
|
self.log.info(f"Prepped {len(utxo_list)} utxo entries")
|
||||||
|
|
||||||
# Sync these blocks with the other nodes
|
# Sync these blocks with the other nodes
|
||||||
block_hashes_to_sync = []
|
block_hashes_to_sync = []
|
||||||
for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1):
|
for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1):
|
||||||
block_hashes_to_sync.append(self.nodes[3].getblockhash(height))
|
block_hashes_to_sync.append(self.nodes[3].getblockhash(height))
|
||||||
|
|
||||||
self.log.debug("Syncing %d blocks with other nodes", len(block_hashes_to_sync))
|
self.log.debug(f"Syncing {len(block_hashes_to_sync)} blocks with other nodes")
|
||||||
# Syncing the blocks could cause nodes to crash, so the test begins here.
|
# Syncing the blocks could cause nodes to crash, so the test begins here.
|
||||||
self.sync_node3blocks(block_hashes_to_sync)
|
self.sync_node3blocks(block_hashes_to_sync)
|
||||||
|
|
||||||
@ -235,18 +235,18 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# each time through the loop, generate a bunch of transactions,
|
# each time through the loop, generate a bunch of transactions,
|
||||||
# and then either mine a single new block on the tip, or some-sized reorg.
|
# and then either mine a single new block on the tip, or some-sized reorg.
|
||||||
for i in range(40):
|
for i in range(40):
|
||||||
self.log.info("Iteration %d, generating 2500 transactions %s", i, self.restart_counts)
|
self.log.info(f"Iteration {i}, generating 2500 transactions {self.restart_counts}")
|
||||||
# Generate a bunch of small-ish transactions
|
# Generate a bunch of small-ish transactions
|
||||||
self.generate_small_transactions(self.nodes[3], 2500, utxo_list)
|
self.generate_small_transactions(self.nodes[3], 2500, utxo_list)
|
||||||
# Pick a random block between current tip, and starting tip
|
# Pick a random block between current tip, and starting tip
|
||||||
current_height = self.nodes[3].getblockcount()
|
current_height = self.nodes[3].getblockcount()
|
||||||
random_height = random.randint(starting_tip_height, current_height)
|
random_height = random.randint(starting_tip_height, current_height)
|
||||||
self.log.debug("At height %d, considering height %d", current_height, random_height)
|
self.log.debug(f"At height {current_height}, considering height {random_height}")
|
||||||
if random_height > starting_tip_height:
|
if random_height > starting_tip_height:
|
||||||
# Randomly reorg from this point with some probability (1/4 for
|
# Randomly reorg from this point with some probability (1/4 for
|
||||||
# tip, 1/5 for tip-1, ...)
|
# tip, 1/5 for tip-1, ...)
|
||||||
if random.random() < 1.0 / (current_height + 4 - random_height):
|
if random.random() < 1.0 / (current_height + 4 - random_height):
|
||||||
self.log.debug("Invalidating block at height %d", random_height)
|
self.log.debug(f"Invalidating block at height {random_height}")
|
||||||
self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height))
|
self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height))
|
||||||
|
|
||||||
# Now generate new blocks until we pass the old tip height
|
# Now generate new blocks until we pass the old tip height
|
||||||
@ -258,10 +258,10 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# new address to avoid mining a block that has just been invalidated
|
# new address to avoid mining a block that has just been invalidated
|
||||||
address=self.nodes[3].getnewaddress(),
|
address=self.nodes[3].getnewaddress(),
|
||||||
))
|
))
|
||||||
self.log.debug("Syncing %d new blocks...", len(block_hashes))
|
self.log.debug(f"Syncing {len(block_hashes)} new blocks...")
|
||||||
self.sync_node3blocks(block_hashes)
|
self.sync_node3blocks(block_hashes)
|
||||||
utxo_list = self.nodes[3].listunspent()
|
utxo_list = self.nodes[3].listunspent()
|
||||||
self.log.debug("Node3 utxo count: %d", len(utxo_list))
|
self.log.debug(f"Node3 utxo count: {len(utxo_list)}")
|
||||||
|
|
||||||
# Check that the utxo hashes agree with node3
|
# Check that the utxo hashes agree with node3
|
||||||
# Useful side effect: each utxo cache gets flushed here, so that we
|
# Useful side effect: each utxo cache gets flushed here, so that we
|
||||||
@ -269,7 +269,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
self.verify_utxo_hash()
|
self.verify_utxo_hash()
|
||||||
|
|
||||||
# Check the test coverage
|
# Check the test coverage
|
||||||
self.log.info("Restarted nodes: %s; crashes on restart: %d", self.restart_counts, self.crashed_on_restart)
|
self.log.info(f"Restarted nodes: {self.restart_counts}; crashes on restart: {self.crashed_on_restart}")
|
||||||
|
|
||||||
# If no nodes were restarted, we didn't test anything.
|
# If no nodes were restarted, we didn't test anything.
|
||||||
assert self.restart_counts != [0, 0, 0]
|
assert self.restart_counts != [0, 0, 0]
|
||||||
@ -280,7 +280,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
|
|||||||
# Warn if any of the nodes escaped restart.
|
# Warn if any of the nodes escaped restart.
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
if self.restart_counts[i] == 0:
|
if self.restart_counts[i] == 0:
|
||||||
self.log.warning("Node %d never crashed during utxo flush!", i)
|
self.log.warning(f"Node {i} never crashed during utxo flush!")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -104,7 +104,7 @@ class BIP66Test(BitcoinTestFramework):
|
|||||||
block.rehash()
|
block.rehash()
|
||||||
block.solve()
|
block.solve()
|
||||||
|
|
||||||
with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000002)'.format(block.hash)]):
|
with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000002)']):
|
||||||
peer.send_and_ping(msg_block(block))
|
peer.send_and_ping(msg_block(block))
|
||||||
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
||||||
peer.sync_with_ping()
|
peer.sync_with_ping()
|
||||||
@ -134,7 +134,7 @@ class BIP66Test(BitcoinTestFramework):
|
|||||||
block.rehash()
|
block.rehash()
|
||||||
block.solve()
|
block.solve()
|
||||||
|
|
||||||
with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)'.format(block.vtx[-1].hash)]):
|
with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)']):
|
||||||
peer.send_and_ping(msg_block(block))
|
peer.send_and_ping(msg_block(block))
|
||||||
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
|
||||||
peer.sync_with_ping()
|
peer.sync_with_ping()
|
||||||
|
@ -72,7 +72,7 @@ def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee
|
|||||||
total_in += t["amount"]
|
total_in += t["amount"]
|
||||||
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
|
tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b""))
|
||||||
if total_in <= amount + fee:
|
if total_in <= amount + fee:
|
||||||
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount + fee, total_in))
|
raise RuntimeError(f"Insufficient funds: need {amount + fee}, have {total_in}")
|
||||||
tx.vout.append(CTxOut(int((total_in - amount - fee) * COIN), P2SH_1))
|
tx.vout.append(CTxOut(int((total_in - amount - fee) * COIN), P2SH_1))
|
||||||
tx.vout.append(CTxOut(int(amount * COIN), P2SH_2))
|
tx.vout.append(CTxOut(int(amount * COIN), P2SH_2))
|
||||||
# These transactions don't need to be signed, but we still have to insert
|
# These transactions don't need to be signed, but we still have to insert
|
||||||
@ -124,8 +124,7 @@ def check_raw_estimates(node, fees_seen):
|
|||||||
assert_greater_than(feerate, 0)
|
assert_greater_than(feerate, 0)
|
||||||
|
|
||||||
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
|
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
|
||||||
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
|
raise AssertionError(f"Estimated fee ({feerate}) out of range ({min(fees_seen)},{max(fees_seen)})")
|
||||||
% (feerate, min(fees_seen), max(fees_seen)))
|
|
||||||
|
|
||||||
def check_smart_estimates(node, fees_seen):
|
def check_smart_estimates(node, fees_seen):
|
||||||
"""Call estimatesmartfee and verify that the estimates meet certain invariants."""
|
"""Call estimatesmartfee and verify that the estimates meet certain invariants."""
|
||||||
@ -138,11 +137,9 @@ def check_smart_estimates(node, fees_seen):
|
|||||||
assert_greater_than(feerate, 0)
|
assert_greater_than(feerate, 0)
|
||||||
|
|
||||||
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
|
if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen):
|
||||||
raise AssertionError("Estimated fee (%f) out of range (%f,%f)"
|
raise AssertionError(f"Estimated fee ({feerate}) out of range ({min(fees_seen)},{max(fees_seen)})")
|
||||||
% (feerate, min(fees_seen), max(fees_seen)))
|
|
||||||
if feerate - delta > last_feerate:
|
if feerate - delta > last_feerate:
|
||||||
raise AssertionError("Estimated fee (%f) larger than last fee (%f) for lower number of confirms"
|
raise AssertionError(f"Estimated fee ({feerate}) larger than last fee ({last_feerate}) for lower number of confirms")
|
||||||
% (feerate, last_feerate))
|
|
||||||
last_feerate = feerate
|
last_feerate = feerate
|
||||||
|
|
||||||
if i == 0:
|
if i == 0:
|
||||||
|
@ -22,11 +22,11 @@ class FilelockTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
def run_test(self):
|
def run_test(self):
|
||||||
datadir = os.path.join(self.nodes[0].datadir, self.chain)
|
datadir = os.path.join(self.nodes[0].datadir, self.chain)
|
||||||
self.log.info("Using datadir {}".format(datadir))
|
self.log.info(f"Using datadir {datadir}")
|
||||||
|
|
||||||
self.log.info("Check that we can't start a second bitcoind instance using the same datadir")
|
self.log.info("Check that we can't start a second bitcoind instance using the same datadir")
|
||||||
expected_msg = "Error: Cannot obtain a lock on data directory {0}. {1} is probably already running.".format(datadir, self.config['environment']['PACKAGE_NAME'])
|
expected_msg = f"Error: Cannot obtain a lock on data directory {datadir}. {self.config['environment']['PACKAGE_NAME']} is probably already running."
|
||||||
self.nodes[1].assert_start_raises_init_error(extra_args=['-datadir={}'.format(self.nodes[0].datadir), '-noserver'], expected_msg=expected_msg)
|
self.nodes[1].assert_start_raises_init_error(extra_args=[f'-datadir={self.nodes[0].datadir}', '-noserver'], expected_msg=expected_msg)
|
||||||
|
|
||||||
if self.is_wallet_compiled():
|
if self.is_wallet_compiled():
|
||||||
def check_wallet_filelock(descriptors):
|
def check_wallet_filelock(descriptors):
|
||||||
@ -38,7 +38,7 @@ class FilelockTest(BitcoinTestFramework):
|
|||||||
expected_msg = "Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another bitcoind?"
|
expected_msg = "Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another bitcoind?"
|
||||||
else:
|
else:
|
||||||
expected_msg = "Error: Error initializing wallet database environment"
|
expected_msg = "Error: Error initializing wallet database environment"
|
||||||
self.nodes[1].assert_start_raises_init_error(extra_args=['-walletdir={}'.format(wallet_dir), '-wallet=' + wallet_name, '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
|
self.nodes[1].assert_start_raises_init_error(extra_args=[f'-walletdir={wallet_dir}', f'-wallet={wallet_name}', '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
|
||||||
|
|
||||||
if self.is_bdb_compiled():
|
if self.is_bdb_compiled():
|
||||||
check_wallet_filelock(False)
|
check_wallet_filelock(False)
|
||||||
|
@ -40,14 +40,14 @@ class HelpTest(BitcoinTestFramework):
|
|||||||
# Node should exit immediately and output help to stdout.
|
# Node should exit immediately and output help to stdout.
|
||||||
output, _ = self.get_node_output(ret_code_expected=0)
|
output, _ = self.get_node_output(ret_code_expected=0)
|
||||||
assert b'Options' in output
|
assert b'Options' in output
|
||||||
self.log.info("Help text received: {} (...)".format(output[0:60]))
|
self.log.info(f"Help text received: {output[0:60]} (...)")
|
||||||
|
|
||||||
self.log.info("Start bitcoin with -version for version information")
|
self.log.info("Start bitcoin with -version for version information")
|
||||||
self.nodes[0].start(extra_args=['-version'])
|
self.nodes[0].start(extra_args=['-version'])
|
||||||
# Node should exit immediately and output version to stdout.
|
# Node should exit immediately and output version to stdout.
|
||||||
output, _ = self.get_node_output(ret_code_expected=0)
|
output, _ = self.get_node_output(ret_code_expected=0)
|
||||||
assert b'version' in output
|
assert b'version' in output
|
||||||
self.log.info("Version text received: {} (...)".format(output[0:60]))
|
self.log.info(f"Version text received: {output[0:60]} (...)")
|
||||||
|
|
||||||
# Test that arguments not in the help results in an error
|
# Test that arguments not in the help results in an error
|
||||||
self.log.info("Start bitcoind with -fakearg to make sure it does not start")
|
self.log.info("Start bitcoind with -fakearg to make sure it does not start")
|
||||||
@ -55,7 +55,7 @@ class HelpTest(BitcoinTestFramework):
|
|||||||
# Node should exit immediately and output an error to stderr
|
# Node should exit immediately and output an error to stderr
|
||||||
_, output = self.get_node_output(ret_code_expected=1)
|
_, output = self.get_node_output(ret_code_expected=1)
|
||||||
assert b'Error parsing command line arguments' in output
|
assert b'Error parsing command line arguments' in output
|
||||||
self.log.info("Error message received: {} (...)".format(output[0:60]))
|
self.log.info(f"Error message received: {output[0:60]} (...)")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -45,17 +45,17 @@ class LoadblockTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
self.log.info("Create linearization config file")
|
self.log.info("Create linearization config file")
|
||||||
with open(cfg_file, "a", encoding="utf-8") as cfg:
|
with open(cfg_file, "a", encoding="utf-8") as cfg:
|
||||||
cfg.write("datadir={}\n".format(data_dir))
|
cfg.write(f"datadir={data_dir}\n")
|
||||||
cfg.write("rpcuser={}\n".format(node_url.username))
|
cfg.write(f"rpcuser={node_url.username}\n")
|
||||||
cfg.write("rpcpassword={}\n".format(node_url.password))
|
cfg.write(f"rpcpassword={node_url.password}\n")
|
||||||
cfg.write("port={}\n".format(node_url.port))
|
cfg.write(f"port={node_url.port}\n")
|
||||||
cfg.write("host={}\n".format(node_url.hostname))
|
cfg.write(f"host={node_url.hostname}\n")
|
||||||
cfg.write("output_file={}\n".format(bootstrap_file))
|
cfg.write(f"output_file={bootstrap_file}\n")
|
||||||
cfg.write("max_height=100\n")
|
cfg.write(f"max_height=100\n")
|
||||||
cfg.write("netmagic=fabfb5da\n")
|
cfg.write(f"netmagic=fabfb5da\n")
|
||||||
cfg.write("input={}\n".format(blocks_dir))
|
cfg.write(f"input={blocks_dir}\n")
|
||||||
cfg.write("genesis={}\n".format(genesis_block))
|
cfg.write(f"genesis={genesis_block}\n")
|
||||||
cfg.write("hashlist={}\n".format(hash_list.name))
|
cfg.write(f"hashlist={hash_list.name}\n")
|
||||||
|
|
||||||
base_dir = self.config["environment"]["SRCDIR"]
|
base_dir = self.config["environment"]["SRCDIR"]
|
||||||
linearize_dir = os.path.join(base_dir, "contrib", "linearize")
|
linearize_dir = os.path.join(base_dir, "contrib", "linearize")
|
||||||
@ -72,7 +72,7 @@ class LoadblockTest(BitcoinTestFramework):
|
|||||||
check=True)
|
check=True)
|
||||||
|
|
||||||
self.log.info("Restart second, unsynced node with bootstrap file")
|
self.log.info("Restart second, unsynced node with bootstrap file")
|
||||||
self.restart_node(1, extra_args=["-loadblock=" + bootstrap_file])
|
self.restart_node(1, extra_args=[f"-loadblock={bootstrap_file}"])
|
||||||
assert_equal(self.nodes[1].getblockcount(), 100) # start_node is blocking on all block files being imported
|
assert_equal(self.nodes[1].getblockcount(), 100) # start_node is blocking on all block files being imported
|
||||||
|
|
||||||
assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100)
|
assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100)
|
||||||
|
@ -29,7 +29,7 @@ class LoggingTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
# test alternative log file name outside datadir
|
# test alternative log file name outside datadir
|
||||||
tempname = os.path.join(self.options.tmpdir, "foo.log")
|
tempname = os.path.join(self.options.tmpdir, "foo.log")
|
||||||
self.restart_node(0, ["-debuglogfile=%s" % tempname])
|
self.restart_node(0, [f"-debuglogfile={tempname}"])
|
||||||
assert os.path.isfile(tempname)
|
assert os.path.isfile(tempname)
|
||||||
|
|
||||||
# check that invalid log (relative) will cause error
|
# check that invalid log (relative) will cause error
|
||||||
@ -37,26 +37,26 @@ class LoggingTest(BitcoinTestFramework):
|
|||||||
invalidname = os.path.join("foo", "foo.log")
|
invalidname = os.path.join("foo", "foo.log")
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
exp_stderr = r"Error: Could not open debug log file \S+$"
|
exp_stderr = r"Error: Could not open debug log file \S+$"
|
||||||
self.nodes[0].assert_start_raises_init_error(["-debuglogfile=%s" % (invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX)
|
self.nodes[0].assert_start_raises_init_error([f"-debuglogfile={invalidname}"], exp_stderr, match=ErrorMatch.FULL_REGEX)
|
||||||
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
|
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
|
||||||
|
|
||||||
# check that invalid log (relative) works after path exists
|
# check that invalid log (relative) works after path exists
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
os.mkdir(invdir)
|
os.mkdir(invdir)
|
||||||
self.start_node(0, ["-debuglogfile=%s" % (invalidname)])
|
self.start_node(0, [f"-debuglogfile={invalidname}"])
|
||||||
assert os.path.isfile(os.path.join(invdir, "foo.log"))
|
assert os.path.isfile(os.path.join(invdir, "foo.log"))
|
||||||
|
|
||||||
# check that invalid log (absolute) will cause error
|
# check that invalid log (absolute) will cause error
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
invdir = os.path.join(self.options.tmpdir, "foo")
|
invdir = os.path.join(self.options.tmpdir, "foo")
|
||||||
invalidname = os.path.join(invdir, "foo.log")
|
invalidname = os.path.join(invdir, "foo.log")
|
||||||
self.nodes[0].assert_start_raises_init_error(["-debuglogfile=%s" % invalidname], exp_stderr, match=ErrorMatch.FULL_REGEX)
|
self.nodes[0].assert_start_raises_init_error([f"-debuglogfile={invalidname}"], exp_stderr, match=ErrorMatch.FULL_REGEX)
|
||||||
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
|
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
|
||||||
|
|
||||||
# check that invalid log (absolute) works after path exists
|
# check that invalid log (absolute) works after path exists
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
os.mkdir(invdir)
|
os.mkdir(invdir)
|
||||||
self.start_node(0, ["-debuglogfile=%s" % (invalidname)])
|
self.start_node(0, [f"-debuglogfile={invalidname}"])
|
||||||
assert os.path.isfile(os.path.join(invdir, "foo.log"))
|
assert os.path.isfile(os.path.join(invdir, "foo.log"))
|
||||||
|
|
||||||
# check that -nodebuglogfile disables logging
|
# check that -nodebuglogfile disables logging
|
||||||
@ -67,7 +67,7 @@ class LoggingTest(BitcoinTestFramework):
|
|||||||
assert not os.path.isfile(default_log_path)
|
assert not os.path.isfile(default_log_path)
|
||||||
|
|
||||||
# just sanity check no crash here
|
# just sanity check no crash here
|
||||||
self.restart_node(0, ["-debuglogfile=%s" % os.devnull])
|
self.restart_node(0, [f"-debuglogfile={os.devnull}"])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -45,16 +45,16 @@ class MinimumChainWorkTest(BitcoinTestFramework):
|
|||||||
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
|
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
|
||||||
# minchainwork is exceeded
|
# minchainwork is exceeded
|
||||||
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
|
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
|
||||||
self.log.info("Testing relay across node %d (minChainWork = %d)", 1, self.node_min_work[1])
|
self.log.info(f"Testing relay across node 1 (minChainWork = {self.node_min_work[1]})")
|
||||||
|
|
||||||
starting_blockcount = self.nodes[2].getblockcount()
|
starting_blockcount = self.nodes[2].getblockcount()
|
||||||
|
|
||||||
num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
|
num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
|
||||||
self.log.info("Generating %d blocks on node0", num_blocks_to_generate)
|
self.log.info(f"Generating {num_blocks_to_generate} blocks on node0")
|
||||||
hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate,
|
hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate,
|
||||||
self.nodes[0].get_deterministic_priv_key().address)
|
self.nodes[0].get_deterministic_priv_key().address)
|
||||||
|
|
||||||
self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork'])
|
self.log.info(f"Node0 current chain work: {self.nodes[0].getblockheader(hashes[-1])['chainwork']}")
|
||||||
|
|
||||||
# Sleep a few seconds and verify that node2 didn't get any new blocks
|
# Sleep a few seconds and verify that node2 didn't get any new blocks
|
||||||
# or headers. We sleep, rather than sync_blocks(node0, node1) because
|
# or headers. We sleep, rather than sync_blocks(node0, node1) because
|
||||||
@ -63,7 +63,7 @@ class MinimumChainWorkTest(BitcoinTestFramework):
|
|||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
|
|
||||||
self.log.info("Verifying node 2 has no more blocks than before")
|
self.log.info("Verifying node 2 has no more blocks than before")
|
||||||
self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes])
|
self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
|
||||||
# Node2 shouldn't have any new headers yet, because node1 should not
|
# Node2 shouldn't have any new headers yet, because node1 should not
|
||||||
# have relayed anything.
|
# have relayed anything.
|
||||||
assert_equal(len(self.nodes[2].getchaintips()), 1)
|
assert_equal(len(self.nodes[2].getchaintips()), 1)
|
||||||
@ -84,7 +84,7 @@ class MinimumChainWorkTest(BitcoinTestFramework):
|
|||||||
# continue the test.
|
# continue the test.
|
||||||
|
|
||||||
self.sync_all()
|
self.sync_all()
|
||||||
self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes])
|
self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
MinimumChainWorkTest().main()
|
MinimumChainWorkTest().main()
|
||||||
|
@ -20,7 +20,7 @@ FILE_CHARS_DISALLOWED = '/\\?%*:|"<>' if os.name == 'nt' else '/'
|
|||||||
UNCONFIRMED_HASH_STRING = 'unconfirmed'
|
UNCONFIRMED_HASH_STRING = 'unconfirmed'
|
||||||
|
|
||||||
def notify_outputname(walletname, txid):
|
def notify_outputname(walletname, txid):
|
||||||
return txid if os.name == 'nt' else '{}_{}'.format(walletname, txid)
|
return txid if os.name == 'nt' else f'{walletname}_{txid}'
|
||||||
|
|
||||||
|
|
||||||
class NotificationsTest(BitcoinTestFramework):
|
class NotificationsTest(BitcoinTestFramework):
|
||||||
@ -39,11 +39,11 @@ class NotificationsTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
# -alertnotify and -blocknotify on node0, walletnotify on node1
|
# -alertnotify and -blocknotify on node0, walletnotify on node1
|
||||||
self.extra_args = [[
|
self.extra_args = [[
|
||||||
"-alertnotify=echo > {}".format(os.path.join(self.alertnotify_dir, '%s')),
|
f"-alertnotify=echo > {os.path.join(self.alertnotify_dir, '%s')}",
|
||||||
"-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s')),
|
f"-blocknotify=echo > {os.path.join(self.blocknotify_dir, '%s')}",
|
||||||
], [
|
], [
|
||||||
"-rescan",
|
"-rescan",
|
||||||
"-walletnotify=echo %h_%b > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s'))),
|
f"-walletnotify=echo %h_%b > {os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s'))}",
|
||||||
]]
|
]]
|
||||||
self.wallet_names = [self.default_wallet_name, self.wallet]
|
self.wallet_names = [self.default_wallet_name, self.wallet]
|
||||||
super().setup_network()
|
super().setup_network()
|
||||||
@ -54,12 +54,12 @@ class NotificationsTest(BitcoinTestFramework):
|
|||||||
seed = "cTdGmKFWpbvpKQ7ejrdzqYT2hhjyb3GPHnLAK7wdi5Em67YLwSm9"
|
seed = "cTdGmKFWpbvpKQ7ejrdzqYT2hhjyb3GPHnLAK7wdi5Em67YLwSm9"
|
||||||
xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v"
|
xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v"
|
||||||
desc_imports = [{
|
desc_imports = [{
|
||||||
"desc": descsum_create("wpkh(" + xpriv + "/0/*)"),
|
"desc": descsum_create(f"wpkh({xpriv}/0/*)"),
|
||||||
"timestamp": 0,
|
"timestamp": 0,
|
||||||
"active": True,
|
"active": True,
|
||||||
"keypool": True,
|
"keypool": True,
|
||||||
},{
|
},{
|
||||||
"desc": descsum_create("wpkh(" + xpriv + "/1/*)"),
|
"desc": descsum_create(f"wpkh({xpriv}/1/*)"),
|
||||||
"timestamp": 0,
|
"timestamp": 0,
|
||||||
"active": True,
|
"active": True,
|
||||||
"keypool": True,
|
"keypool": True,
|
||||||
|
@ -97,14 +97,14 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
# Note: proxies are not used to connect to local nodes. This is because the proxy to
|
# Note: proxies are not used to connect to local nodes. This is because the proxy to
|
||||||
# use is based on CService.GetNetwork(), which returns NET_UNROUTABLE for localhost.
|
# use is based on CService.GetNetwork(), which returns NET_UNROUTABLE for localhost.
|
||||||
args = [
|
args = [
|
||||||
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
|
['-listen', f'-proxy={self.conf1.addr[0]}:{self.conf1.addr[1]}','-proxyrandomize=1'],
|
||||||
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),
|
['-listen', f'-proxy={self.conf1.addr[0]}:{self.conf1.addr[1]}',f'-onion={self.conf2.addr[0]}:{self.conf2.addr[1]}',
|
||||||
'-i2psam=%s:%i' % (self.i2p_sam), '-i2pacceptincoming=0', '-proxyrandomize=0'],
|
f'-i2psam={self.i2p_sam[0]}:{self.i2p_sam[1]}', '-i2pacceptincoming=0', '-proxyrandomize=0'],
|
||||||
['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
|
['-listen', f'-proxy={self.conf2.addr[0]}:{self.conf2.addr[1]}','-proxyrandomize=1'],
|
||||||
[]
|
[]
|
||||||
]
|
]
|
||||||
if self.have_ipv6:
|
if self.have_ipv6:
|
||||||
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
|
args[3] = ['-listen', f'-proxy=[{self.conf3.addr[0]}]:{self.conf3.addr[1]}','-proxyrandomize=0', '-noonion']
|
||||||
self.add_nodes(self.num_nodes, extra_args=args)
|
self.add_nodes(self.num_nodes, extra_args=args)
|
||||||
self.start_nodes()
|
self.start_nodes()
|
||||||
|
|
||||||
@ -116,7 +116,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
def node_test(self, node, proxies, auth, test_onion=True):
|
def node_test(self, node, proxies, auth, test_onion=True):
|
||||||
rv = []
|
rv = []
|
||||||
addr = "15.61.23.23:1234"
|
addr = "15.61.23.23:1234"
|
||||||
self.log.debug("Test: outgoing IPv4 connection through node for address {}".format(addr))
|
self.log.debug(f"Test: outgoing IPv4 connection through node for address {addr}")
|
||||||
node.addnode(addr, "onetry")
|
node.addnode(addr, "onetry")
|
||||||
cmd = proxies[0].queue.get()
|
cmd = proxies[0].queue.get()
|
||||||
assert isinstance(cmd, Socks5Command)
|
assert isinstance(cmd, Socks5Command)
|
||||||
@ -132,7 +132,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
if self.have_ipv6:
|
if self.have_ipv6:
|
||||||
addr = "[1233:3432:2434:2343:3234:2345:6546:4534]:5443"
|
addr = "[1233:3432:2434:2343:3234:2345:6546:4534]:5443"
|
||||||
self.log.debug("Test: outgoing IPv6 connection through node for address {}".format(addr))
|
self.log.debug(f"Test: outgoing IPv6 connection through node for address {addr}")
|
||||||
node.addnode(addr, "onetry")
|
node.addnode(addr, "onetry")
|
||||||
cmd = proxies[1].queue.get()
|
cmd = proxies[1].queue.get()
|
||||||
assert isinstance(cmd, Socks5Command)
|
assert isinstance(cmd, Socks5Command)
|
||||||
@ -148,7 +148,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
if test_onion:
|
if test_onion:
|
||||||
addr = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion:8333"
|
addr = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion:8333"
|
||||||
self.log.debug("Test: outgoing onion connection through node for address {}".format(addr))
|
self.log.debug(f"Test: outgoing onion connection through node for address {addr}")
|
||||||
node.addnode(addr, "onetry")
|
node.addnode(addr, "onetry")
|
||||||
cmd = proxies[2].queue.get()
|
cmd = proxies[2].queue.get()
|
||||||
assert isinstance(cmd, Socks5Command)
|
assert isinstance(cmd, Socks5Command)
|
||||||
@ -162,7 +162,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
self.network_test(node, addr, network=NET_ONION)
|
self.network_test(node, addr, network=NET_ONION)
|
||||||
|
|
||||||
addr = "node.noumenon:8333"
|
addr = "node.noumenon:8333"
|
||||||
self.log.debug("Test: outgoing DNS name connection through node for address {}".format(addr))
|
self.log.debug(f"Test: outgoing DNS name connection through node for address {addr}")
|
||||||
node.addnode(addr, "onetry")
|
node.addnode(addr, "onetry")
|
||||||
cmd = proxies[3].queue.get()
|
cmd = proxies[3].queue.get()
|
||||||
assert isinstance(cmd, Socks5Command)
|
assert isinstance(cmd, Socks5Command)
|
||||||
@ -218,12 +218,12 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
n1 = networks_dict(self.nodes[1].getnetworkinfo())
|
n1 = networks_dict(self.nodes[1].getnetworkinfo())
|
||||||
assert_equal(NETWORKS, n1.keys())
|
assert_equal(NETWORKS, n1.keys())
|
||||||
for net in ['ipv4', 'ipv6']:
|
for net in ['ipv4', 'ipv6']:
|
||||||
assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
|
assert_equal(n1[net]['proxy'], f'{self.conf1.addr[0]}:{self.conf1.addr[1]}')
|
||||||
assert_equal(n1[net]['proxy_randomize_credentials'], False)
|
assert_equal(n1[net]['proxy_randomize_credentials'], False)
|
||||||
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
|
assert_equal(n1['onion']['proxy'], f'{self.conf2.addr[0]}:{self.conf2.addr[1]}')
|
||||||
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
|
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
|
||||||
assert_equal(n1['onion']['reachable'], True)
|
assert_equal(n1['onion']['reachable'], True)
|
||||||
assert_equal(n1['i2p']['proxy'], '%s:%i' % (self.i2p_sam))
|
assert_equal(n1['i2p']['proxy'], f'{self.i2p_sam[0]}:{self.i2p_sam[1]}')
|
||||||
assert_equal(n1['i2p']['proxy_randomize_credentials'], False)
|
assert_equal(n1['i2p']['proxy_randomize_credentials'], False)
|
||||||
assert_equal(n1['i2p']['reachable'], True)
|
assert_equal(n1['i2p']['reachable'], True)
|
||||||
|
|
||||||
@ -234,7 +234,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
expected_proxy = ''
|
expected_proxy = ''
|
||||||
expected_randomize = False
|
expected_randomize = False
|
||||||
else:
|
else:
|
||||||
expected_proxy = '%s:%i' % (self.conf2.addr)
|
expected_proxy = f'{self.conf2.addr[0]}:{self.conf2.addr[1]}'
|
||||||
expected_randomize = True
|
expected_randomize = True
|
||||||
assert_equal(n2[net]['proxy'], expected_proxy)
|
assert_equal(n2[net]['proxy'], expected_proxy)
|
||||||
assert_equal(n2[net]['proxy_randomize_credentials'], expected_randomize)
|
assert_equal(n2[net]['proxy_randomize_credentials'], expected_randomize)
|
||||||
@ -248,7 +248,7 @@ class ProxyTest(BitcoinTestFramework):
|
|||||||
if net == NET_I2P:
|
if net == NET_I2P:
|
||||||
expected_proxy = ''
|
expected_proxy = ''
|
||||||
else:
|
else:
|
||||||
expected_proxy = '[%s]:%i' % (self.conf3.addr)
|
expected_proxy = f'[{self.conf3.addr[0]}]:{self.conf3.addr[1]}'
|
||||||
assert_equal(n3[net]['proxy'], expected_proxy)
|
assert_equal(n3[net]['proxy'], expected_proxy)
|
||||||
assert_equal(n3[net]['proxy_randomize_credentials'], False)
|
assert_equal(n3[net]['proxy_randomize_credentials'], False)
|
||||||
assert_equal(n3['onion']['reachable'], False)
|
assert_equal(n3['onion']['reachable'], False)
|
||||||
|
@ -148,7 +148,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
def test_height_min(self):
|
def test_height_min(self):
|
||||||
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
|
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
|
||||||
self.log.info("Success")
|
self.log.info("Success")
|
||||||
self.log.info("Though we're already using more than 550MiB, current usage: %d" % calc_usage(self.prunedir))
|
self.log.info(f"Though we're already using more than 550MiB, current usage: {calc_usage(self.prunedir)}")
|
||||||
self.log.info("Mining 25 more blocks should cause the first block file to be pruned")
|
self.log.info("Mining 25 more blocks should cause the first block file to be pruned")
|
||||||
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
|
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
|
||||||
mine_large_blocks(self.nodes[0], 25)
|
mine_large_blocks(self.nodes[0], 25)
|
||||||
@ -158,7 +158,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
|
|
||||||
self.log.info("Success")
|
self.log.info("Success")
|
||||||
usage = calc_usage(self.prunedir)
|
usage = calc_usage(self.prunedir)
|
||||||
self.log.info("Usage should be below target: %d" % usage)
|
self.log.info(f"Usage should be below target: {usage}")
|
||||||
assert_greater_than(550, usage)
|
assert_greater_than(550, usage)
|
||||||
|
|
||||||
def create_chain_with_staleblocks(self):
|
def create_chain_with_staleblocks(self):
|
||||||
@ -181,18 +181,18 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
self.connect_nodes(0, 2)
|
self.connect_nodes(0, 2)
|
||||||
self.sync_blocks(self.nodes[0:3])
|
self.sync_blocks(self.nodes[0:3])
|
||||||
|
|
||||||
self.log.info("Usage can be over target because of high stale rate: %d" % calc_usage(self.prunedir))
|
self.log.info(f"Usage can be over target because of high stale rate: {calc_usage(self.prunedir)}")
|
||||||
|
|
||||||
def reorg_test(self):
|
def reorg_test(self):
|
||||||
# Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip
|
# Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip
|
||||||
# This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain
|
# This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain
|
||||||
|
|
||||||
height = self.nodes[1].getblockcount()
|
height = self.nodes[1].getblockcount()
|
||||||
self.log.info("Current block height: %d" % height)
|
self.log.info(f"Current block height: {height}")
|
||||||
|
|
||||||
self.forkheight = height - 287
|
self.forkheight = height - 287
|
||||||
self.forkhash = self.nodes[1].getblockhash(self.forkheight)
|
self.forkhash = self.nodes[1].getblockhash(self.forkheight)
|
||||||
self.log.info("Invalidating block %s at height %d" % (self.forkhash, self.forkheight))
|
self.log.info(f"Invalidating block {self.forkhash} at height {self.forkheight}")
|
||||||
self.nodes[1].invalidateblock(self.forkhash)
|
self.nodes[1].invalidateblock(self.forkhash)
|
||||||
|
|
||||||
# We've now switched to our previously mined-24 block fork on node 1, but that's not what we want
|
# We've now switched to our previously mined-24 block fork on node 1, but that's not what we want
|
||||||
@ -204,7 +204,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
|
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
|
||||||
|
|
||||||
assert self.nodes[1].getblockcount() == self.forkheight - 1
|
assert self.nodes[1].getblockcount() == self.forkheight - 1
|
||||||
self.log.info("New best height: %d" % self.nodes[1].getblockcount())
|
self.log.info(f"New best height: {self.nodes[1].getblockcount()}")
|
||||||
|
|
||||||
# Disconnect node1 and generate the new chain
|
# Disconnect node1 and generate the new chain
|
||||||
self.disconnect_nodes(0, 1)
|
self.disconnect_nodes(0, 1)
|
||||||
@ -218,8 +218,8 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
self.connect_nodes(1, 2)
|
self.connect_nodes(1, 2)
|
||||||
self.sync_blocks(self.nodes[0:3], timeout=120)
|
self.sync_blocks(self.nodes[0:3], timeout=120)
|
||||||
|
|
||||||
self.log.info("Verify height on node 2: %d" % self.nodes[2].getblockcount())
|
self.log.info(f"Verify height on node 2: {self.nodes[2].getblockcount()}")
|
||||||
self.log.info("Usage possibly still high because of stale blocks in block files: %d" % calc_usage(self.prunedir))
|
self.log.info(f"Usage possibly still high because of stale blocks in block files: {calc_usage(self.prunedir)}")
|
||||||
|
|
||||||
self.log.info("Mine 220 more large blocks so we have requisite history")
|
self.log.info("Mine 220 more large blocks so we have requisite history")
|
||||||
|
|
||||||
@ -227,7 +227,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
self.sync_blocks(self.nodes[0:3], timeout=120)
|
self.sync_blocks(self.nodes[0:3], timeout=120)
|
||||||
|
|
||||||
usage = calc_usage(self.prunedir)
|
usage = calc_usage(self.prunedir)
|
||||||
self.log.info("Usage should be below target: %d" % usage)
|
self.log.info(f"Usage should be below target: {usage}")
|
||||||
assert_greater_than(550, usage)
|
assert_greater_than(550, usage)
|
||||||
|
|
||||||
def reorg_back(self):
|
def reorg_back(self):
|
||||||
@ -235,7 +235,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
|
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
|
||||||
with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
|
with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
|
||||||
self.nodes[2].verifychain(checklevel=4, nblocks=0)
|
self.nodes[2].verifychain(checklevel=4, nblocks=0)
|
||||||
self.log.info("Will need to redownload block %d" % self.forkheight)
|
self.log.info(f"Will need to redownload block {self.forkheight}")
|
||||||
|
|
||||||
# Verify that we have enough history to reorg back to the fork point
|
# Verify that we have enough history to reorg back to the fork point
|
||||||
# Although this is more than 288 blocks, because this chain was written more recently
|
# Although this is more than 288 blocks, because this chain was written more recently
|
||||||
@ -259,7 +259,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
# At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg
|
# At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg
|
||||||
if self.nodes[2].getblockcount() < self.mainchainheight:
|
if self.nodes[2].getblockcount() < self.mainchainheight:
|
||||||
blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
|
blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
|
||||||
self.log.info("Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: %d" % blocks_to_mine)
|
self.log.info(f"Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: {blocks_to_mine}")
|
||||||
self.nodes[0].invalidateblock(curchainhash)
|
self.nodes[0].invalidateblock(curchainhash)
|
||||||
assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
|
assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
|
||||||
assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
|
assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
|
||||||
@ -296,7 +296,7 @@ class PruneTest(BitcoinTestFramework):
|
|||||||
assert_equal(ret, node.getblockchaininfo()['pruneheight'])
|
assert_equal(ret, node.getblockchaininfo()['pruneheight'])
|
||||||
|
|
||||||
def has_block(index):
|
def has_block(index):
|
||||||
return os.path.isfile(os.path.join(self.nodes[node_number].datadir, self.chain, "blocks", "blk{:05}.dat".format(index)))
|
return os.path.isfile(os.path.join(self.nodes[node_number].datadir, self.chain, "blocks", f"blk{index:05}.dat"))
|
||||||
|
|
||||||
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
|
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
|
||||||
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
|
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
|
||||||
|
@ -65,7 +65,7 @@ def find_spendable_utxo(node, min_value):
|
|||||||
if utxo['spendable']:
|
if utxo['spendable']:
|
||||||
return utxo
|
return utxo
|
||||||
|
|
||||||
raise AssertionError("Unspent output equal or higher than %s not found" % min_value)
|
raise AssertionError(f"Unspent output equal or higher than {min_value} not found")
|
||||||
|
|
||||||
txs_mined = {} # txindex from txid to blockhash
|
txs_mined = {} # txindex from txid to blockhash
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class SettingsTest(BitcoinTestFramework):
|
|||||||
with altsettings.open("w") as fp:
|
with altsettings.open("w") as fp:
|
||||||
fp.write('{"key": "value"}')
|
fp.write('{"key": "value"}')
|
||||||
with node.assert_debug_log(expected_msgs=['Setting file arg: key = "value"']):
|
with node.assert_debug_log(expected_msgs=['Setting file arg: key = "value"']):
|
||||||
self.start_node(0, extra_args=["-settings={}".format(altsettings)])
|
self.start_node(0, extra_args=[f"-settings={altsettings}"])
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ VB_TOP_BITS = 0x20000000
|
|||||||
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
|
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
|
||||||
VB_UNKNOWN_VERSION = VB_TOP_BITS | (1 << VB_UNKNOWN_BIT)
|
VB_UNKNOWN_VERSION = VB_TOP_BITS | (1 << VB_UNKNOWN_BIT)
|
||||||
|
|
||||||
WARN_UNKNOWN_RULES_ACTIVE = "Unknown new rules activated (versionbit {})".format(VB_UNKNOWN_BIT)
|
WARN_UNKNOWN_RULES_ACTIVE = f"Unknown new rules activated (versionbit {VB_UNKNOWN_BIT})"
|
||||||
VB_PATTERN = re.compile("Unknown new rules activated.*versionbit")
|
VB_PATTERN = re.compile("Unknown new rules activated.*versionbit")
|
||||||
|
|
||||||
class VersionBitsWarningTest(BitcoinTestFramework):
|
class VersionBitsWarningTest(BitcoinTestFramework):
|
||||||
@ -34,7 +34,7 @@ class VersionBitsWarningTest(BitcoinTestFramework):
|
|||||||
# Open and close to create zero-length file
|
# Open and close to create zero-length file
|
||||||
with open(self.alert_filename, 'w', encoding='utf8'):
|
with open(self.alert_filename, 'w', encoding='utf8'):
|
||||||
pass
|
pass
|
||||||
self.extra_args = [["-alertnotify=echo %s >> \"" + self.alert_filename + "\""]]
|
self.extra_args = [[f"-alertnotify=echo %s >> \"{self.alert_filename}\""]]
|
||||||
self.setup_nodes()
|
self.setup_nodes()
|
||||||
|
|
||||||
def send_blocks_with_version(self, peer, numblocks, version):
|
def send_blocks_with_version(self, peer, numblocks, version):
|
||||||
|
@ -87,12 +87,12 @@ class TestBitcoinCli(BitcoinTestFramework):
|
|||||||
user, password = get_auth_cookie(self.nodes[0].datadir, self.chain)
|
user, password = get_auth_cookie(self.nodes[0].datadir, self.chain)
|
||||||
|
|
||||||
self.log.info("Test -stdinrpcpass option")
|
self.log.info("Test -stdinrpcpass option")
|
||||||
assert_equal(BLOCKS, self.nodes[0].cli('-rpcuser={}'.format(user), '-stdinrpcpass', input=password).getblockcount())
|
assert_equal(BLOCKS, self.nodes[0].cli(f'-rpcuser={user}', '-stdinrpcpass', input=password).getblockcount())
|
||||||
assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli('-rpcuser={}'.format(user), '-stdinrpcpass', input='foo').echo)
|
assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli(f'-rpcuser={user}', '-stdinrpcpass', input='foo').echo)
|
||||||
|
|
||||||
self.log.info("Test -stdin and -stdinrpcpass")
|
self.log.info("Test -stdin and -stdinrpcpass")
|
||||||
assert_equal(['foo', 'bar'], self.nodes[0].cli('-rpcuser={}'.format(user), '-stdin', '-stdinrpcpass', input=password + '\nfoo\nbar').echo())
|
assert_equal(['foo', 'bar'], self.nodes[0].cli(f'-rpcuser={user}', '-stdin', '-stdinrpcpass', input=f'{password}\nfoo\nbar').echo())
|
||||||
assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli('-rpcuser={}'.format(user), '-stdin', '-stdinrpcpass', input='foo').echo)
|
assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli(f'-rpcuser={user}', '-stdin', '-stdinrpcpass', input='foo').echo)
|
||||||
|
|
||||||
self.log.info("Test connecting to a non-existing server")
|
self.log.info("Test connecting to a non-existing server")
|
||||||
assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo)
|
assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo)
|
||||||
@ -150,8 +150,8 @@ class TestBitcoinCli(BitcoinTestFramework):
|
|||||||
w1 = self.nodes[0].get_wallet_rpc(wallets[0])
|
w1 = self.nodes[0].get_wallet_rpc(wallets[0])
|
||||||
w2 = self.nodes[0].get_wallet_rpc(wallets[1])
|
w2 = self.nodes[0].get_wallet_rpc(wallets[1])
|
||||||
w3 = self.nodes[0].get_wallet_rpc(wallets[2])
|
w3 = self.nodes[0].get_wallet_rpc(wallets[2])
|
||||||
rpcwallet2 = '-rpcwallet={}'.format(wallets[1])
|
rpcwallet2 = f'-rpcwallet={wallets[1]}'
|
||||||
rpcwallet3 = '-rpcwallet={}'.format(wallets[2])
|
rpcwallet3 = f'-rpcwallet={wallets[2]}'
|
||||||
w1.walletpassphrase(password, self.rpc_timeout)
|
w1.walletpassphrase(password, self.rpc_timeout)
|
||||||
w2.encryptwallet(password)
|
w2.encryptwallet(password)
|
||||||
w1.sendtoaddress(w2.getnewaddress(), amounts[1])
|
w1.sendtoaddress(w2.getnewaddress(), amounts[1])
|
||||||
@ -162,7 +162,7 @@ class TestBitcoinCli(BitcoinTestFramework):
|
|||||||
|
|
||||||
self.log.info("Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance")
|
self.log.info("Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance")
|
||||||
for i in range(len(wallets)):
|
for i in range(len(wallets)):
|
||||||
cli_get_info_string = self.nodes[0].cli('-getinfo', '-rpcwallet={}'.format(wallets[i])).send_cli()
|
cli_get_info_string = self.nodes[0].cli('-getinfo', f'-rpcwallet={wallets[i]}').send_cli()
|
||||||
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
|
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
|
||||||
assert 'Balances' not in cli_get_info_string
|
assert 'Balances' not in cli_get_info_string
|
||||||
assert_equal(cli_get_info["Wallet"], wallets[i])
|
assert_equal(cli_get_info["Wallet"], wallets[i])
|
||||||
@ -296,7 +296,7 @@ class TestBitcoinCli(BitcoinTestFramework):
|
|||||||
self.log.info("Test -version with node stopped")
|
self.log.info("Test -version with node stopped")
|
||||||
self.stop_node(0)
|
self.stop_node(0)
|
||||||
cli_response = self.nodes[0].cli('-version').send_cli()
|
cli_response = self.nodes[0].cli('-version').send_cli()
|
||||||
assert "{} RPC client version".format(self.config['environment']['PACKAGE_NAME']) in cli_response
|
assert f"{self.config['environment']['PACKAGE_NAME']} RPC client version" in cli_response
|
||||||
|
|
||||||
self.log.info("Test -rpcwait option successfully waits for RPC connection")
|
self.log.info("Test -rpcwait option successfully waits for RPC connection")
|
||||||
self.nodes[0].start() # start node without RPC connection
|
self.nodes[0].start() # start node without RPC connection
|
||||||
|
@ -24,8 +24,8 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
# lowlevel check for http persistent connection #
|
# lowlevel check for http persistent connection #
|
||||||
#################################################
|
#################################################
|
||||||
url = urllib.parse.urlparse(self.nodes[0].url)
|
url = urllib.parse.urlparse(self.nodes[0].url)
|
||||||
authpair = url.username + ':' + url.password
|
authpair = f'{url.username}:{url.password}'
|
||||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}"}
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(url.hostname, url.port)
|
conn = http.client.HTTPConnection(url.hostname, url.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
@ -42,7 +42,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
#same should be if we add keep-alive because this should be the std. behaviour
|
#same should be if we add keep-alive because this should be the std. behaviour
|
||||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
|
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}", "Connection": "keep-alive"}
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(url.hostname, url.port)
|
conn = http.client.HTTPConnection(url.hostname, url.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
@ -59,7 +59,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
#now do the same with "Connection: close"
|
#now do the same with "Connection: close"
|
||||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
|
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}", "Connection":"close"}
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(url.hostname, url.port)
|
conn = http.client.HTTPConnection(url.hostname, url.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
@ -70,8 +70,8 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
#node1 (2nd node) is running with disabled keep-alive option
|
#node1 (2nd node) is running with disabled keep-alive option
|
||||||
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
|
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
|
||||||
authpair = urlNode1.username + ':' + urlNode1.password
|
authpair = f'{urlNode1.username}:{urlNode1.password}'
|
||||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}"}
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
|
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
@ -81,8 +81,8 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
|
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
|
||||||
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
|
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
|
||||||
authpair = urlNode2.username + ':' + urlNode2.password
|
authpair = f'{urlNode2.username}:{urlNode2.password}'
|
||||||
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
|
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}"}
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
@ -94,13 +94,13 @@ class HTTPBasicsTest (BitcoinTestFramework):
|
|||||||
# Check excessive request size
|
# Check excessive request size
|
||||||
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
conn.request('GET', '/' + ('x'*1000), '', headers)
|
conn.request('GET', f'/{"x"*1000}', '', headers)
|
||||||
out1 = conn.getresponse()
|
out1 = conn.getresponse()
|
||||||
assert_equal(out1.status, http.client.NOT_FOUND)
|
assert_equal(out1.status, http.client.NOT_FOUND)
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
|
||||||
conn.connect()
|
conn.connect()
|
||||||
conn.request('GET', '/' + ('x'*10000), '', headers)
|
conn.request('GET', f'/{"x"*10000}', '', headers)
|
||||||
out1 = conn.getresponse()
|
out1 = conn.getresponse()
|
||||||
assert_equal(out1.status, http.client.BAD_REQUEST)
|
assert_equal(out1.status, http.client.BAD_REQUEST)
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
rest_uri += '.hex'
|
rest_uri += '.hex'
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(self.url.hostname, self.url.port)
|
conn = http.client.HTTPConnection(self.url.hostname, self.url.port)
|
||||||
self.log.debug('%s %s %s', http_method, rest_uri, body)
|
self.log.debug(f'{http_method} {rest_uri} {body}')
|
||||||
if http_method == 'GET':
|
if http_method == 'GET':
|
||||||
conn.request('GET', rest_uri)
|
conn.request('GET', rest_uri)
|
||||||
elif http_method == 'POST':
|
elif http_method == 'POST':
|
||||||
@ -92,11 +92,11 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
self.log.info("Test the /tx URI")
|
self.log.info("Test the /tx URI")
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/tx/{}".format(txid))
|
json_obj = self.test_rest_request(f"/tx/{txid}")
|
||||||
assert_equal(json_obj['txid'], txid)
|
assert_equal(json_obj['txid'], txid)
|
||||||
|
|
||||||
# Check hex format response
|
# Check hex format response
|
||||||
hex_response = self.test_rest_request("/tx/{}".format(txid), req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
hex_response = self.test_rest_request(f"/tx/{txid}", req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
||||||
assert_greater_than_or_equal(int(hex_response.getheader('content-length')),
|
assert_greater_than_or_equal(int(hex_response.getheader('content-length')),
|
||||||
json_obj['size']*2)
|
json_obj['size']*2)
|
||||||
|
|
||||||
@ -114,7 +114,7 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
assert_equal(self.nodes[1].getbalance(), Decimal("0.1"))
|
assert_equal(self.nodes[1].getbalance(), Decimal("0.1"))
|
||||||
|
|
||||||
# Check chainTip response
|
# Check chainTip response
|
||||||
json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
|
json_obj = self.test_rest_request(f"/getutxos/{spending[0]}-{spending[1]}")
|
||||||
assert_equal(json_obj['chaintipHash'], bb_hash)
|
assert_equal(json_obj['chaintipHash'], bb_hash)
|
||||||
|
|
||||||
# Make sure there is one utxo
|
# Make sure there is one utxo
|
||||||
@ -123,7 +123,7 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
self.log.info("Query a spent TXO using the /getutxos URI")
|
self.log.info("Query a spent TXO using the /getutxos URI")
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent))
|
json_obj = self.test_rest_request(f"/getutxos/{spent[0]}-{spent[1]}")
|
||||||
|
|
||||||
# Check chainTip response
|
# Check chainTip response
|
||||||
assert_equal(json_obj['chaintipHash'], bb_hash)
|
assert_equal(json_obj['chaintipHash'], bb_hash)
|
||||||
@ -136,7 +136,7 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
self.log.info("Query two TXOs using the /getutxos URI")
|
self.log.info("Query two TXOs using the /getutxos URI")
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/{}-{}/{}-{}".format(*(spending + spent)))
|
json_obj = self.test_rest_request(f"/getutxos/{spending[0]}-{spending[1]}/{spent[0]}-{spent[1]}")
|
||||||
|
|
||||||
assert_equal(len(json_obj['utxos']), 1)
|
assert_equal(len(json_obj['utxos']), 1)
|
||||||
assert_equal(json_obj['bitmap'], "10")
|
assert_equal(json_obj['bitmap'], "10")
|
||||||
@ -163,32 +163,32 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
# do a tx and don't sync
|
# do a tx and don't sync
|
||||||
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
|
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
|
||||||
json_obj = self.test_rest_request("/tx/{}".format(txid))
|
json_obj = self.test_rest_request(f"/tx/{txid}")
|
||||||
# get the spent output to later check for utxo (should be spent by then)
|
# get the spent output to later check for utxo (should be spent by then)
|
||||||
spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout'])
|
spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout'])
|
||||||
# get n of 0.1 outpoint
|
# get n of 0.1 outpoint
|
||||||
n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1'))
|
n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1'))
|
||||||
spending = (txid, n)
|
spending = (txid, n)
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
|
json_obj = self.test_rest_request(f"/getutxos/{spending[0]}-{spending[1]}")
|
||||||
assert_equal(len(json_obj['utxos']), 0)
|
assert_equal(len(json_obj['utxos']), 0)
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending))
|
json_obj = self.test_rest_request(f"/getutxos/checkmempool/{spending[0]}-{spending[1]}")
|
||||||
assert_equal(len(json_obj['utxos']), 1)
|
assert_equal(len(json_obj['utxos']), 1)
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent))
|
json_obj = self.test_rest_request(f"/getutxos/{spent[0]}-{spent[1]}")
|
||||||
assert_equal(len(json_obj['utxos']), 1)
|
assert_equal(len(json_obj['utxos']), 1)
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent))
|
json_obj = self.test_rest_request(f"/getutxos/checkmempool/{spent[0]}-{spent[1]}")
|
||||||
assert_equal(len(json_obj['utxos']), 0)
|
assert_equal(len(json_obj['utxos']), 0)
|
||||||
|
|
||||||
self.nodes[0].generate(1)
|
self.nodes[0].generate(1)
|
||||||
self.sync_all()
|
self.sync_all()
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
|
json_obj = self.test_rest_request(f"/getutxos/{spending[0]}-{spending[1]}")
|
||||||
assert_equal(len(json_obj['utxos']), 1)
|
assert_equal(len(json_obj['utxos']), 1)
|
||||||
|
|
||||||
json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending))
|
json_obj = self.test_rest_request(f"/getutxos/checkmempool/{spending[0]}-{spending[1]}")
|
||||||
assert_equal(len(json_obj['utxos']), 1)
|
assert_equal(len(json_obj['utxos']), 1)
|
||||||
|
|
||||||
# Do some invalid requests
|
# Do some invalid requests
|
||||||
@ -197,11 +197,11 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ)
|
self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ)
|
||||||
|
|
||||||
# Test limits
|
# Test limits
|
||||||
long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)])
|
long_uri = '/'.join([f"{txid}-{n_}" for n_ in range(20)])
|
||||||
self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ)
|
self.test_rest_request(f"/getutxos/checkmempool/{long_uri}", http_method='POST', status=400, ret_type=RetType.OBJ)
|
||||||
|
|
||||||
long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)])
|
long_uri = '/'.join([f'{txid}-{n_}' for n_ in range(15)])
|
||||||
self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200)
|
self.test_rest_request(f"/getutxos/checkmempool/{long_uri}", http_method='POST', status=200)
|
||||||
|
|
||||||
self.nodes[0].generate(1) # generate block to not affect upcoming tests
|
self.nodes[0].generate(1) # generate block to not affect upcoming tests
|
||||||
self.sync_all()
|
self.sync_all()
|
||||||
@ -215,42 +215,42 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
|
|
||||||
# Check result if block is not in the active chain
|
# Check result if block is not in the active chain
|
||||||
self.nodes[0].invalidateblock(bb_hash)
|
self.nodes[0].invalidateblock(bb_hash)
|
||||||
assert_equal(self.test_rest_request('/headers/1/{}'.format(bb_hash)), [])
|
assert_equal(self.test_rest_request(f'/headers/1/{bb_hash}'), [])
|
||||||
self.test_rest_request('/block/{}'.format(bb_hash))
|
self.test_rest_request(f'/block/{bb_hash}')
|
||||||
self.nodes[0].reconsiderblock(bb_hash)
|
self.nodes[0].reconsiderblock(bb_hash)
|
||||||
|
|
||||||
# Check binary format
|
# Check binary format
|
||||||
response = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ)
|
response = self.test_rest_request(f"/block/{bb_hash}", req_type=ReqType.BIN, ret_type=RetType.OBJ)
|
||||||
assert_greater_than(int(response.getheader('content-length')), BLOCK_HEADER_SIZE)
|
assert_greater_than(int(response.getheader('content-length')), BLOCK_HEADER_SIZE)
|
||||||
response_bytes = response.read()
|
response_bytes = response.read()
|
||||||
|
|
||||||
# Compare with block header
|
# Compare with block header
|
||||||
response_header = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ)
|
response_header = self.test_rest_request(f"/headers/1/{bb_hash}", req_type=ReqType.BIN, ret_type=RetType.OBJ)
|
||||||
assert_equal(int(response_header.getheader('content-length')), BLOCK_HEADER_SIZE)
|
assert_equal(int(response_header.getheader('content-length')), BLOCK_HEADER_SIZE)
|
||||||
response_header_bytes = response_header.read()
|
response_header_bytes = response_header.read()
|
||||||
assert_equal(response_bytes[:BLOCK_HEADER_SIZE], response_header_bytes)
|
assert_equal(response_bytes[:BLOCK_HEADER_SIZE], response_header_bytes)
|
||||||
|
|
||||||
# Check block hex format
|
# Check block hex format
|
||||||
response_hex = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
response_hex = self.test_rest_request(f"/block/{bb_hash}", req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
||||||
assert_greater_than(int(response_hex.getheader('content-length')), BLOCK_HEADER_SIZE*2)
|
assert_greater_than(int(response_hex.getheader('content-length')), BLOCK_HEADER_SIZE*2)
|
||||||
response_hex_bytes = response_hex.read().strip(b'\n')
|
response_hex_bytes = response_hex.read().strip(b'\n')
|
||||||
assert_equal(response_bytes.hex().encode(), response_hex_bytes)
|
assert_equal(response_bytes.hex().encode(), response_hex_bytes)
|
||||||
|
|
||||||
# Compare with hex block header
|
# Compare with hex block header
|
||||||
response_header_hex = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
response_header_hex = self.test_rest_request(f"/headers/1/{bb_hash}", req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
||||||
assert_greater_than(int(response_header_hex.getheader('content-length')), BLOCK_HEADER_SIZE*2)
|
assert_greater_than(int(response_header_hex.getheader('content-length')), BLOCK_HEADER_SIZE*2)
|
||||||
response_header_hex_bytes = response_header_hex.read(BLOCK_HEADER_SIZE*2)
|
response_header_hex_bytes = response_header_hex.read(BLOCK_HEADER_SIZE*2)
|
||||||
assert_equal(response_bytes[:BLOCK_HEADER_SIZE].hex().encode(), response_header_hex_bytes)
|
assert_equal(response_bytes[:BLOCK_HEADER_SIZE].hex().encode(), response_header_hex_bytes)
|
||||||
|
|
||||||
# Check json format
|
# Check json format
|
||||||
block_json_obj = self.test_rest_request("/block/{}".format(bb_hash))
|
block_json_obj = self.test_rest_request(f"/block/{bb_hash}")
|
||||||
assert_equal(block_json_obj['hash'], bb_hash)
|
assert_equal(block_json_obj['hash'], bb_hash)
|
||||||
assert_equal(self.test_rest_request("/blockhashbyheight/{}".format(block_json_obj['height']))['blockhash'], bb_hash)
|
assert_equal(self.test_rest_request(f"/blockhashbyheight/{block_json_obj['height']}")['blockhash'], bb_hash)
|
||||||
|
|
||||||
# Check hex/bin format
|
# Check hex/bin format
|
||||||
resp_hex = self.test_rest_request("/blockhashbyheight/{}".format(block_json_obj['height']), req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
resp_hex = self.test_rest_request(f"/blockhashbyheight/{block_json_obj['height']}", req_type=ReqType.HEX, ret_type=RetType.OBJ)
|
||||||
assert_equal(resp_hex.read().decode('utf-8').rstrip(), bb_hash)
|
assert_equal(resp_hex.read().decode('utf-8').rstrip(), bb_hash)
|
||||||
resp_bytes = self.test_rest_request("/blockhashbyheight/{}".format(block_json_obj['height']), req_type=ReqType.BIN, ret_type=RetType.BYTES)
|
resp_bytes = self.test_rest_request(f"/blockhashbyheight/{block_json_obj['height']}", req_type=ReqType.BIN, ret_type=RetType.BYTES)
|
||||||
blockhash = resp_bytes[::-1].hex()
|
blockhash = resp_bytes[::-1].hex()
|
||||||
assert_equal(blockhash, bb_hash)
|
assert_equal(blockhash, bb_hash)
|
||||||
|
|
||||||
@ -264,7 +264,7 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
self.test_rest_request("/blockhashbyheight/", ret_type=RetType.OBJ, status=400)
|
self.test_rest_request("/blockhashbyheight/", ret_type=RetType.OBJ, status=400)
|
||||||
|
|
||||||
# Compare with json block header
|
# Compare with json block header
|
||||||
json_obj = self.test_rest_request("/headers/1/{}".format(bb_hash))
|
json_obj = self.test_rest_request(f"/headers/1/{bb_hash}")
|
||||||
assert_equal(len(json_obj), 1) # ensure that there is one header in the json response
|
assert_equal(len(json_obj), 1) # ensure that there is one header in the json response
|
||||||
assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same
|
assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same
|
||||||
|
|
||||||
@ -276,7 +276,7 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
# See if we can get 5 headers in one response
|
# See if we can get 5 headers in one response
|
||||||
self.nodes[1].generate(5)
|
self.nodes[1].generate(5)
|
||||||
self.sync_all()
|
self.sync_all()
|
||||||
json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash))
|
json_obj = self.test_rest_request(f"/headers/5/{bb_hash}")
|
||||||
assert_equal(len(json_obj), 5) # now we should have 5 header objects
|
assert_equal(len(json_obj), 5) # now we should have 5 header objects
|
||||||
|
|
||||||
self.log.info("Test tx inclusion in the /mempool and /block URIs")
|
self.log.info("Test tx inclusion in the /mempool and /block URIs")
|
||||||
@ -306,13 +306,13 @@ class RESTTest (BitcoinTestFramework):
|
|||||||
self.sync_all()
|
self.sync_all()
|
||||||
|
|
||||||
# Check if the 3 tx show up in the new block
|
# Check if the 3 tx show up in the new block
|
||||||
json_obj = self.test_rest_request("/block/{}".format(newblockhash[0]))
|
json_obj = self.test_rest_request(f"/block/{newblockhash[0]}")
|
||||||
non_coinbase_txs = {tx['txid'] for tx in json_obj['tx']
|
non_coinbase_txs = {tx['txid'] for tx in json_obj['tx']
|
||||||
if 'coinbase' not in tx['vin'][0]}
|
if 'coinbase' not in tx['vin'][0]}
|
||||||
assert_equal(non_coinbase_txs, set(txs))
|
assert_equal(non_coinbase_txs, set(txs))
|
||||||
|
|
||||||
# Check the same but without tx details
|
# Check the same but without tx details
|
||||||
json_obj = self.test_rest_request("/block/notxdetails/{}".format(newblockhash[0]))
|
json_obj = self.test_rest_request(f"/block/notxdetails/{newblockhash[0]}")
|
||||||
for tx in txs:
|
for tx in txs:
|
||||||
assert tx in json_obj['tx']
|
assert tx in json_obj['tx']
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ def expect_http_status(expected_http_status, expected_rpc_code,
|
|||||||
fcn, *args):
|
fcn, *args):
|
||||||
try:
|
try:
|
||||||
fcn(*args)
|
fcn(*args)
|
||||||
raise AssertionError("Expected RPC error %d, got none" % expected_rpc_code)
|
raise AssertionError(f"Expected RPC error {expected_rpc_code}, got none")
|
||||||
except JSONRPCException as exc:
|
except JSONRPCException as exc:
|
||||||
assert_equal(exc.error["code"], expected_rpc_code)
|
assert_equal(exc.error["code"], expected_rpc_code)
|
||||||
assert_equal(exc.http_status, expected_http_status)
|
assert_equal(exc.http_status, expected_http_status)
|
||||||
|
@ -132,7 +132,7 @@ class ZMQTest (BitcoinTestFramework):
|
|||||||
socket = self.ctx.socket(zmq.SUB)
|
socket = self.ctx.socket(zmq.SUB)
|
||||||
subscribers.append(ZMQSubscriber(socket, topic.encode()))
|
subscribers.append(ZMQSubscriber(socket, topic.encode()))
|
||||||
|
|
||||||
self.restart_node(0, ["-zmqpub%s=%s" % (topic, address) for topic, address in services] +
|
self.restart_node(0, [f"-zmqpub{topic}={address}" for topic, address in services] +
|
||||||
self.extra_args[0])
|
self.extra_args[0])
|
||||||
|
|
||||||
for i, sub in enumerate(subscribers):
|
for i, sub in enumerate(subscribers):
|
||||||
@ -184,7 +184,7 @@ class ZMQTest (BitcoinTestFramework):
|
|||||||
rawtx = subs[3]
|
rawtx = subs[3]
|
||||||
|
|
||||||
num_blocks = 5
|
num_blocks = 5
|
||||||
self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks})
|
self.log.info(f"Generate {num_blocks} blocks (and {num_blocks} coinbase txes)")
|
||||||
genhashes = self.nodes[0].generatetoaddress(num_blocks, ADDRESS_BCRT1_UNSPENDABLE)
|
genhashes = self.nodes[0].generatetoaddress(num_blocks, ADDRESS_BCRT1_UNSPENDABLE)
|
||||||
|
|
||||||
self.sync_all()
|
self.sync_all()
|
||||||
@ -504,7 +504,7 @@ class ZMQTest (BitcoinTestFramework):
|
|||||||
if mempool_sequence is not None:
|
if mempool_sequence is not None:
|
||||||
zmq_mem_seq = mempool_sequence
|
zmq_mem_seq = mempool_sequence
|
||||||
if zmq_mem_seq > get_raw_seq:
|
if zmq_mem_seq > get_raw_seq:
|
||||||
raise Exception("We somehow jumped mempool sequence numbers! zmq_mem_seq: {} > get_raw_seq: {}".format(zmq_mem_seq, get_raw_seq))
|
raise Exception(f"We somehow jumped mempool sequence numbers! zmq_mem_seq: {zmq_mem_seq} > get_raw_seq: {get_raw_seq}")
|
||||||
|
|
||||||
# 4) Moving forward, we apply the delta to our local view
|
# 4) Moving forward, we apply the delta to our local view
|
||||||
# remaining txs(5) + 1 rbf(A+R) + 1 block connect + 1 final tx
|
# remaining txs(5) + 1 rbf(A+R) + 1 block connect + 1 final tx
|
||||||
@ -520,7 +520,7 @@ class ZMQTest (BitcoinTestFramework):
|
|||||||
assert mempool_sequence > expected_sequence
|
assert mempool_sequence > expected_sequence
|
||||||
r_gap += mempool_sequence - expected_sequence
|
r_gap += mempool_sequence - expected_sequence
|
||||||
else:
|
else:
|
||||||
raise Exception("WARNING: txhash has unexpected mempool sequence value: {} vs expected {}".format(mempool_sequence, expected_sequence))
|
raise Exception(f"WARNING: txhash has unexpected mempool sequence value: {mempool_sequence} vs expected {expected_sequence}")
|
||||||
if label == "A":
|
if label == "A":
|
||||||
assert hash_str not in mempool_view
|
assert hash_str not in mempool_view
|
||||||
mempool_view.add(hash_str)
|
mempool_view.add(hash_str)
|
||||||
|
@ -65,10 +65,10 @@ class MiningTest(BitcoinTestFramework):
|
|||||||
assert_equal(mining_info['currentblockweight'], 4000)
|
assert_equal(mining_info['currentblockweight'], 4000)
|
||||||
|
|
||||||
self.log.info('test blockversion')
|
self.log.info('test blockversion')
|
||||||
self.restart_node(0, extra_args=['-mocktime={}'.format(t), '-blockversion=1337'])
|
self.restart_node(0, extra_args=[f'-mocktime={t}', '-blockversion=1337'])
|
||||||
self.connect_nodes(0, 1)
|
self.connect_nodes(0, 1)
|
||||||
assert_equal(1337, self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)['version'])
|
assert_equal(1337, self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)['version'])
|
||||||
self.restart_node(0, extra_args=['-mocktime={}'.format(t)])
|
self.restart_node(0, extra_args=[f'-mocktime={t}'])
|
||||||
self.connect_nodes(0, 1)
|
self.connect_nodes(0, 1)
|
||||||
assert_equal(VERSIONBITS_TOP_BITS + (1 << VERSIONBITS_DEPLOYMENT_TESTDUMMY_BIT), self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)['version'])
|
assert_equal(VERSIONBITS_TOP_BITS + (1 << VERSIONBITS_DEPLOYMENT_TESTDUMMY_BIT), self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)['version'])
|
||||||
self.restart_node(0)
|
self.restart_node(0)
|
||||||
|
Loading…
Reference in New Issue
Block a user