mirror of
https://github.com/bitcoin/bitcoin.git
synced 2024-11-19 01:42:58 +01:00
Merge bitcoin/bitcoin#31000: bench: add support for custom data directory
fa66e0887c
bench: add support for custom data directory (furszy)ad9c2cceda
test, bench: specialize working directory name (furszy) Pull request description: Expands the benchmark framework with the existing `-testdatadir` arg, enabling the ability to change the benchmark data directory. This is useful for running benchmarks on different storage devices, and not just under the OS `/tmp/` directory. A good use case is #28574, where we are benchmarking the wallet migration process on an HDD. ACKs for top commit: maflcko: re-ACKfa66e0887c
achow101: ACKfa66e0887c
tdb3: re ACKfa66e0887c
hodlinator: re-ACKfa66e0887c
pablomartin4btc: re-ACKfa66e0887c
Tree-SHA512: 4e87206c07e26fe193c07074ae9eb0cc9c70a58aeea8cf27d18fb5425d77e4b00dbe0e6d6a75c17b427744e9066458b9a84e5ef7b0420f02a4fccb9c5ef4dacc
This commit is contained in:
commit
4228259294
@ -27,9 +27,26 @@ using util::Join;
|
|||||||
|
|
||||||
const std::function<void(const std::string&)> G_TEST_LOG_FUN{};
|
const std::function<void(const std::string&)> G_TEST_LOG_FUN{};
|
||||||
|
|
||||||
const std::function<std::vector<const char*>()> G_TEST_COMMAND_LINE_ARGUMENTS{};
|
/**
|
||||||
|
* Retrieves the available test setup command line arguments that may be used
|
||||||
|
* in the benchmark. They will be used only if the benchmark utilizes a
|
||||||
|
* 'BasicTestingSetup' or any child of it.
|
||||||
|
*/
|
||||||
|
static std::function<std::vector<const char*>()> g_bench_command_line_args{};
|
||||||
|
const std::function<std::vector<const char*>()> G_TEST_COMMAND_LINE_ARGUMENTS = []() {
|
||||||
|
return g_bench_command_line_args();
|
||||||
|
};
|
||||||
|
|
||||||
const std::function<std::string()> G_TEST_GET_FULL_NAME{};
|
/**
|
||||||
|
* Retrieve the name of the currently in-use benchmark.
|
||||||
|
* This is applicable only to benchmarks that utilize the unit test
|
||||||
|
* framework context setup (e.g. ones using 'MakeNoLogFileContext<TestingSetup>()').
|
||||||
|
* It places the datadir of each benchmark run within their respective benchmark name.
|
||||||
|
*/
|
||||||
|
static std::string g_running_benchmark_name;
|
||||||
|
const std::function<std::string()> G_TEST_GET_FULL_NAME = []() {
|
||||||
|
return g_running_benchmark_name;
|
||||||
|
};
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
@ -94,6 +111,14 @@ void BenchRunner::RunAll(const Args& args)
|
|||||||
std::cout << "Running with -sanity-check option, output is being suppressed as benchmark results will be useless." << std::endl;
|
std::cout << "Running with -sanity-check option, output is being suppressed as benchmark results will be useless." << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Load inner test setup args
|
||||||
|
g_bench_command_line_args = [&args]() {
|
||||||
|
std::vector<const char*> ret;
|
||||||
|
ret.reserve(args.setup_args.size());
|
||||||
|
for (const auto& arg : args.setup_args) ret.emplace_back(arg.c_str());
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
std::vector<ankerl::nanobench::Result> benchmarkResults;
|
std::vector<ankerl::nanobench::Result> benchmarkResults;
|
||||||
for (const auto& [name, bench_func] : benchmarks()) {
|
for (const auto& [name, bench_func] : benchmarks()) {
|
||||||
const auto& [func, priority_level] = bench_func;
|
const auto& [func, priority_level] = bench_func;
|
||||||
@ -117,6 +142,7 @@ void BenchRunner::RunAll(const Args& args)
|
|||||||
bench.output(nullptr);
|
bench.output(nullptr);
|
||||||
}
|
}
|
||||||
bench.name(name);
|
bench.name(name);
|
||||||
|
g_running_benchmark_name = name;
|
||||||
if (args.min_time > 0ms) {
|
if (args.min_time > 0ms) {
|
||||||
// convert to nanos before dividing to reduce rounding errors
|
// convert to nanos before dividing to reduce rounding errors
|
||||||
std::chrono::nanoseconds min_time_ns = args.min_time;
|
std::chrono::nanoseconds min_time_ns = args.min_time;
|
||||||
|
@ -61,6 +61,7 @@ struct Args {
|
|||||||
fs::path output_json;
|
fs::path output_json;
|
||||||
std::string regex_filter;
|
std::string regex_filter;
|
||||||
uint8_t priority;
|
uint8_t priority;
|
||||||
|
std::vector<std::string> setup_args;
|
||||||
};
|
};
|
||||||
|
|
||||||
class BenchRunner
|
class BenchRunner
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
#include <tinyformat.h>
|
#include <tinyformat.h>
|
||||||
#include <util/fs.h>
|
#include <util/fs.h>
|
||||||
#include <util/string.h>
|
#include <util/string.h>
|
||||||
|
#include <test/util/setup_common.h>
|
||||||
|
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
@ -27,6 +28,7 @@ static const std::string DEFAULT_PRIORITY{"all"};
|
|||||||
static void SetupBenchArgs(ArgsManager& argsman)
|
static void SetupBenchArgs(ArgsManager& argsman)
|
||||||
{
|
{
|
||||||
SetupHelpOptions(argsman);
|
SetupHelpOptions(argsman);
|
||||||
|
SetupCommonTestArgs(argsman);
|
||||||
|
|
||||||
argsman.AddArg("-asymptote=<n1,n2,n3,...>", "Test asymptotic growth of the runtime of an algorithm, if supported by the benchmark", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
argsman.AddArg("-asymptote=<n1,n2,n3,...>", "Test asymptotic growth of the runtime of an algorithm, if supported by the benchmark", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
||||||
argsman.AddArg("-filter=<regex>", strprintf("Regular expression filter to select benchmark by name (default: %s)", DEFAULT_BENCH_FILTER), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
argsman.AddArg("-filter=<regex>", strprintf("Regular expression filter to select benchmark by name (default: %s)", DEFAULT_BENCH_FILTER), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS);
|
||||||
@ -60,6 +62,18 @@ static uint8_t parsePriorityLevel(const std::string& str) {
|
|||||||
return levels;
|
return levels;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static std::vector<std::string> parseTestSetupArgs(const ArgsManager& argsman)
|
||||||
|
{
|
||||||
|
// Parses unit test framework arguments supported by the benchmark framework.
|
||||||
|
std::vector<std::string> args;
|
||||||
|
static std::vector<std::string> AVAILABLE_ARGS = {"-testdatadir"};
|
||||||
|
for (const std::string& arg_name : AVAILABLE_ARGS) {
|
||||||
|
auto op_arg = argsman.GetArg(arg_name);
|
||||||
|
if (op_arg) args.emplace_back(strprintf("%s=%s", arg_name, *op_arg));
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
int main(int argc, char** argv)
|
int main(int argc, char** argv)
|
||||||
{
|
{
|
||||||
ArgsManager argsman;
|
ArgsManager argsman;
|
||||||
@ -131,6 +145,7 @@ int main(int argc, char** argv)
|
|||||||
args.regex_filter = argsman.GetArg("-filter", DEFAULT_BENCH_FILTER);
|
args.regex_filter = argsman.GetArg("-filter", DEFAULT_BENCH_FILTER);
|
||||||
args.sanity_check = argsman.GetBoolArg("-sanity-check", false);
|
args.sanity_check = argsman.GetBoolArg("-sanity-check", false);
|
||||||
args.priority = parsePriorityLevel(argsman.GetArg("-priority-level", DEFAULT_PRIORITY));
|
args.priority = parsePriorityLevel(argsman.GetArg("-priority-level", DEFAULT_PRIORITY));
|
||||||
|
args.setup_args = parseTestSetupArgs(argsman);
|
||||||
|
|
||||||
benchmark::BenchRunner::RunAll(args);
|
benchmark::BenchRunner::RunAll(args);
|
||||||
|
|
||||||
|
@ -75,8 +75,6 @@ using node::VerifyLoadedChainstate;
|
|||||||
const std::function<std::string(const char*)> G_TRANSLATION_FUN = nullptr;
|
const std::function<std::string(const char*)> G_TRANSLATION_FUN = nullptr;
|
||||||
|
|
||||||
constexpr inline auto TEST_DIR_PATH_ELEMENT{"test_common bitcoin"}; // Includes a space to catch possible path escape issues.
|
constexpr inline auto TEST_DIR_PATH_ELEMENT{"test_common bitcoin"}; // Includes a space to catch possible path escape issues.
|
||||||
/** Random context to get unique temp data dirs. Separate from m_rng, which can be seeded from a const env var */
|
|
||||||
static FastRandomContext g_rng_temp_path;
|
|
||||||
|
|
||||||
struct NetworkSetup
|
struct NetworkSetup
|
||||||
{
|
{
|
||||||
@ -87,8 +85,7 @@ struct NetworkSetup
|
|||||||
};
|
};
|
||||||
static NetworkSetup g_networksetup_instance;
|
static NetworkSetup g_networksetup_instance;
|
||||||
|
|
||||||
/** Register test-only arguments */
|
void SetupCommonTestArgs(ArgsManager& argsman)
|
||||||
static void SetupUnitTestArgs(ArgsManager& argsman)
|
|
||||||
{
|
{
|
||||||
argsman.AddArg("-testdatadir", strprintf("Custom data directory (default: %s<random_string>)", fs::PathToString(fs::temp_directory_path() / TEST_DIR_PATH_ELEMENT / "")),
|
argsman.AddArg("-testdatadir", strprintf("Custom data directory (default: %s<random_string>)", fs::PathToString(fs::temp_directory_path() / TEST_DIR_PATH_ELEMENT / "")),
|
||||||
ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST);
|
ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST);
|
||||||
@ -127,7 +124,7 @@ BasicTestingSetup::BasicTestingSetup(const ChainType chainType, TestOpts opts)
|
|||||||
gArgs.ClearPathCache();
|
gArgs.ClearPathCache();
|
||||||
{
|
{
|
||||||
SetupServerArgs(*m_node.args);
|
SetupServerArgs(*m_node.args);
|
||||||
SetupUnitTestArgs(*m_node.args);
|
SetupCommonTestArgs(*m_node.args);
|
||||||
std::string error;
|
std::string error;
|
||||||
if (!m_node.args->ParseParameters(arguments.size(), arguments.data(), error)) {
|
if (!m_node.args->ParseParameters(arguments.size(), arguments.data(), error)) {
|
||||||
m_node.args->ClearArgs();
|
m_node.args->ClearArgs();
|
||||||
@ -139,10 +136,10 @@ BasicTestingSetup::BasicTestingSetup(const ChainType chainType, TestOpts opts)
|
|||||||
// data directories use a random name that doesn't overlap with other tests.
|
// data directories use a random name that doesn't overlap with other tests.
|
||||||
SeedRandomForTest(SeedRand::FIXED_SEED);
|
SeedRandomForTest(SeedRand::FIXED_SEED);
|
||||||
|
|
||||||
|
const std::string test_name{G_TEST_GET_FULL_NAME ? G_TEST_GET_FULL_NAME() : ""};
|
||||||
if (!m_node.args->IsArgSet("-testdatadir")) {
|
if (!m_node.args->IsArgSet("-testdatadir")) {
|
||||||
// By default, the data directory has a random name
|
const auto now{TicksSinceEpoch<std::chrono::nanoseconds>(SystemClock::now())};
|
||||||
const auto rand_str{g_rng_temp_path.rand256().ToString()};
|
m_path_root = fs::temp_directory_path() / TEST_DIR_PATH_ELEMENT / test_name / util::ToString(now);
|
||||||
m_path_root = fs::temp_directory_path() / TEST_DIR_PATH_ELEMENT / rand_str;
|
|
||||||
TryCreateDirectories(m_path_root);
|
TryCreateDirectories(m_path_root);
|
||||||
} else {
|
} else {
|
||||||
// Custom data directory
|
// Custom data directory
|
||||||
@ -151,8 +148,7 @@ BasicTestingSetup::BasicTestingSetup(const ChainType chainType, TestOpts opts)
|
|||||||
if (root_dir.empty()) ExitFailure("-testdatadir argument is empty, please specify a path");
|
if (root_dir.empty()) ExitFailure("-testdatadir argument is empty, please specify a path");
|
||||||
|
|
||||||
root_dir = fs::absolute(root_dir);
|
root_dir = fs::absolute(root_dir);
|
||||||
const std::string test_path{G_TEST_GET_FULL_NAME ? G_TEST_GET_FULL_NAME() : ""};
|
m_path_lock = root_dir / TEST_DIR_PATH_ELEMENT / fs::PathFromString(test_name);
|
||||||
m_path_lock = root_dir / TEST_DIR_PATH_ELEMENT / fs::PathFromString(test_path);
|
|
||||||
m_path_root = m_path_lock / "datadir";
|
m_path_root = m_path_lock / "datadir";
|
||||||
|
|
||||||
// Try to obtain the lock; if unsuccessful don't disturb the existing test.
|
// Try to obtain the lock; if unsuccessful don't disturb the existing test.
|
||||||
|
@ -45,6 +45,9 @@ extern const std::function<std::string()> G_TEST_GET_FULL_NAME;
|
|||||||
|
|
||||||
static constexpr CAmount CENT{1000000};
|
static constexpr CAmount CENT{1000000};
|
||||||
|
|
||||||
|
/** Register common test args. Shared across binaries that rely on the test framework. */
|
||||||
|
void SetupCommonTestArgs(ArgsManager& argsman);
|
||||||
|
|
||||||
struct TestOpts {
|
struct TestOpts {
|
||||||
std::vector<const char*> extra_args{};
|
std::vector<const char*> extra_args{};
|
||||||
bool coins_db_in_memory{true};
|
bool coins_db_in_memory{true};
|
||||||
|
Loading…
Reference in New Issue
Block a user