mirror of https://github.com/OpenRCT2/OpenRCT2.git
Allow specyfing more than one input file for sprite sort benchmark
This commit is contained in:
parent
b38e600b15
commit
cba2dd96ec
|
@ -159,24 +159,6 @@ static void BM_paint_session_arrange(benchmark::State& state, const std::vector<
|
||||||
|
|
||||||
static int cmdline_for_bench_sprite_sort(int argc, const char** argv)
|
static int cmdline_for_bench_sprite_sort(int argc, const char** argv)
|
||||||
{
|
{
|
||||||
const char* inputFileName = nullptr;
|
|
||||||
int argStart = 0;
|
|
||||||
if (argc >= 1 && platform_file_exists(argv[0]))
|
|
||||||
{
|
|
||||||
inputFileName = argv[0];
|
|
||||||
argStart = 1; // skip file name
|
|
||||||
}
|
|
||||||
// Google benchmark does stuff to argv. It doesn't modify the pointees,
|
|
||||||
// but it wants to reorder the pointers, so present a copy of them.
|
|
||||||
std::vector<char*> argv_for_benchmark;
|
|
||||||
// argv[0] is expected to contain the binary name. It's only for logging purposes, don't bother.
|
|
||||||
argv_for_benchmark.push_back(nullptr);
|
|
||||||
for (int i = argStart; i < argc; i++)
|
|
||||||
{
|
|
||||||
argv_for_benchmark.push_back((char*)argv[i]);
|
|
||||||
}
|
|
||||||
// Update argc with all the changes made
|
|
||||||
argc = (int)argv_for_benchmark.size();
|
|
||||||
{
|
{
|
||||||
// Register some basic "baseline" benchmark
|
// Register some basic "baseline" benchmark
|
||||||
std::vector<paint_session> sessions(1);
|
std::vector<paint_session> sessions(1);
|
||||||
|
@ -190,13 +172,31 @@ static int cmdline_for_bench_sprite_sort(int argc, const char** argv)
|
||||||
}
|
}
|
||||||
benchmark::RegisterBenchmark("baseline", BM_paint_session_arrange, sessions);
|
benchmark::RegisterBenchmark("baseline", BM_paint_session_arrange, sessions);
|
||||||
}
|
}
|
||||||
if (inputFileName != nullptr)
|
|
||||||
|
// Google benchmark does stuff to argv. It doesn't modify the pointees,
|
||||||
|
// but it wants to reorder the pointers, so present a copy of them.
|
||||||
|
std::vector<char*> argv_for_benchmark;
|
||||||
|
|
||||||
|
// argv[0] is expected to contain the binary name. It's only for logging purposes, don't bother.
|
||||||
|
argv_for_benchmark.push_back(nullptr);
|
||||||
|
|
||||||
|
// Extract file names from argument list. If there is no such file, consider it benchmark option.
|
||||||
|
for (int i = 0; i < argc; i++)
|
||||||
{
|
{
|
||||||
// Register benchmark for sv6 if valid
|
if (platform_file_exists(argv[i]))
|
||||||
std::vector<paint_session> sessions = extract_paint_session(inputFileName);
|
{
|
||||||
if (!sessions.empty())
|
// Register benchmark for sv6 if valid
|
||||||
benchmark::RegisterBenchmark(inputFileName, BM_paint_session_arrange, sessions);
|
std::vector<paint_session> sessions = extract_paint_session(argv[i]);
|
||||||
|
if (!sessions.empty())
|
||||||
|
benchmark::RegisterBenchmark(argv[i], BM_paint_session_arrange, sessions);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
argv_for_benchmark.push_back((char*)argv[i]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
// Update argc with all the changes made
|
||||||
|
argc = (int)argv_for_benchmark.size();
|
||||||
::benchmark::Initialize(&argc, &argv_for_benchmark[0]);
|
::benchmark::Initialize(&argc, &argv_for_benchmark[0]);
|
||||||
if (::benchmark::ReportUnrecognizedArguments(argc, &argv_for_benchmark[0]))
|
if (::benchmark::ReportUnrecognizedArguments(argc, &argv_for_benchmark[0]))
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -228,7 +228,7 @@ const CommandLineCommand CommandLine::BenchSpriteSortCommands[]{
|
||||||
#ifdef USE_BENCHMARK
|
#ifdef USE_BENCHMARK
|
||||||
DefineCommand(
|
DefineCommand(
|
||||||
"",
|
"",
|
||||||
"[<file>] [--benchmark_list_tests={true|false}] [--benchmark_filter=<regex>] [--benchmark_min_time=<min_time>] "
|
"[<file>]... [--benchmark_list_tests={true|false}] [--benchmark_filter=<regex>] [--benchmark_min_time=<min_time>] "
|
||||||
"[--benchmark_repetitions=<num_repetitions>] [--benchmark_report_aggregates_only={true|false}] "
|
"[--benchmark_repetitions=<num_repetitions>] [--benchmark_report_aggregates_only={true|false}] "
|
||||||
"[--benchmark_format=<console|json|csv>] [--benchmark_out=<filename>] [--benchmark_out_format=<json|console|csv>] "
|
"[--benchmark_format=<console|json|csv>] [--benchmark_out=<filename>] [--benchmark_out_format=<json|console|csv>] "
|
||||||
"[--benchmark_color={auto|true|false}] [--benchmark_counters_tabular={true|false}] [--v=<verbosity>]",
|
"[--benchmark_color={auto|true|false}] [--benchmark_counters_tabular={true|false}] [--v=<verbosity>]",
|
||||||
|
|
Loading…
Reference in New Issue