6#ifndef CHEBYSHEV_BENCHMARK_H
7#define CHEBYSHEV_BENCHMARK_H
43 template<
typename InputType,
typename Function>
45 Function func,
const std::vector<InputType>& input) {
47 if (input.size() == 0)
51 __volatile__
auto c = func(input[0]);
55 for (
unsigned int j = 0; j < input.size(); ++j)
88 "name",
"averageRuntime",
"stdevRuntime",
"runsPerSecond"
108 std::map <std::string, std::vector<benchmark_result>> benchmarkResults {};
111 std::mutex benchmarkMutex;
114 std::vector<std::thread> benchmarkThreads {};
118 bool wasTerminated {
false};
127 std::shared_ptr<output::output_context>
output;
131 std::shared_ptr<random::random_context>
random;
141 std::string moduleName,
143 const char** argv =
nullptr) {
147 output = std::make_shared<output::output_context>();
148 random = std::make_shared<random::random_context>();
152 for (
int i = 1; i < argc; ++i)
155 std::cout <<
"Starting benchmarks of the ";
156 std::cout << moduleName <<
" module ..." << std::endl;
159 benchmarkResults.clear();
160 wasTerminated =
false;
173 std::lock_guard<std::mutex> lock(benchmarkMutex);
175 unsigned int totalBenchmarks = 0;
176 unsigned int failedBenchmarks = 0;
178 for (
const auto& pair : benchmarkResults) {
179 for (
const auto& testCase : pair.second) {
181 failedBenchmarks += testCase.failed ? 1 : 0;
187 if(
output->settings.outputToFile &&
188 !
output->settings.outputFiles.size() &&
197 std::vector<std::string> outputFiles;
214 std::cout << totalBenchmarks <<
" total tests, ";
215 std::cout << failedBenchmarks <<
" failed";
218 if (totalBenchmarks > 0) {
220 const double percent = (failedBenchmarks / (double) totalBenchmarks) * 100;
221 std::cout <<
" (" << std::setprecision(3) << percent <<
"%)" << std::endl;
224 std::cout <<
"\nNo benchmarks were run!" << std::endl;
229 std::exit(failedBenchmarks);
232 wasTerminated =
true;
238 std::string moduleName,
int argc = 0,
239 const char** argv =
nullptr) {
241 setup(moduleName, argc, argv);
256 std::lock_guard<std::mutex> lock(benchmarkMutex);
257 benchmarkResults = other.benchmarkResults;
267 std::lock_guard<std::mutex> lock(benchmarkMutex);
268 benchmarkResults = other.benchmarkResults;
287 template<
typename InputType =
double,
typename Function>
289 const std::string& name,
291 const std::vector<InputType>& input,
292 unsigned int runs = 0,
293 bool quiet =
false) {
298 auto task = [
this, name, func, input, runs, quiet]() {
304 long double averageRuntime;
307 long double sumSquares;
310 long double totalRuntime;
316 totalRuntime =
runtime(func, input);
317 averageRuntime = totalRuntime / input.size();
320 for (
unsigned int i = 1; i < runs; ++i) {
324 const long double currentRun =
runtime(func, input);
325 const long double currentAverage = currentRun / input.size();
326 totalRuntime += currentRun;
328 const long double tmp = averageRuntime;
329 averageRuntime = tmp + (currentAverage - tmp) / (i + 1);
330 sumSquares += (currentAverage - tmp)
331 * (currentAverage - averageRuntime);
343 res.iterations = input.size();
344 res.totalRuntime = totalRuntime;
345 res.averageRuntime = averageRuntime;
346 res.runsPerSecond = 1000.0 / res.averageRuntime;
351 res.stdevRuntime = std::sqrt(sumSquares / (runs - 1));
353 std::lock_guard<std::mutex> lock(benchmarkMutex);
354 benchmarkResults[name].push_back(res);
358 benchmarkThreads.emplace_back(task) : task();
368 template <
typename InputType =
double,
typename Function>
370 const std::string& name,
383 std::vector<InputType> input (opt.
iterations);
385 for (
unsigned int i = 0; i < opt.
iterations; ++i)
391 benchmarkThreads.emplace_back([
this, input, name, func, opt, seed]() {
397 long double averageRuntime;
400 long double sumSquares;
403 long double totalRuntime;
409 totalRuntime =
runtime(func, input);
410 averageRuntime = totalRuntime / input.size();
413 for (
unsigned int i = 1; i < opt.
runs; ++i) {
417 const long double currentRun =
runtime(func, input);
418 const long double currentAverage = currentRun / input.size();
419 totalRuntime += currentRun;
421 const long double tmp = averageRuntime;
422 averageRuntime = tmp + (currentAverage - tmp) / (i + 1);
423 sumSquares += (currentAverage - tmp)
424 * (currentAverage - averageRuntime);
436 res.iterations = input.size();
437 res.totalRuntime = totalRuntime;
438 res.averageRuntime = averageRuntime;
439 res.runsPerSecond = 1000.0 / res.averageRuntime;
441 res.quiet = opt.
quiet;
445 res.stdevRuntime = std::sqrt(sumSquares / (opt.
runs - 1));
447 std::lock_guard<std::mutex> lock(benchmarkMutex);
448 benchmarkResults[name].push_back(res);
461 template<
typename InputType =
double,
typename Function>
463 const std::string& name,
466 unsigned int runs = 0,
467 unsigned int iterations = 0,
468 bool quiet =
false) {
489 for (
auto& t : benchmarkThreads)
493 benchmarkThreads.clear();
503 inline std::vector<benchmark_result>
get_benchmark(
const std::string& name) {
506 return benchmarkResults[name];
519 return benchmarkResults[name].at(i);
531 int argc = 0,
const char** argv =
nullptr) {
Structures for the benchmark module.
Benchmark module context, handling benchmark requests concurrently.
Definition benchmark.h:103
void wait_results()
Wait for all concurrent benchmarks to finish execution.
Definition benchmark.h:487
benchmark_context(std::string moduleName, int argc=0, const char **argv=nullptr)
Default constructor setting up the context.
Definition benchmark.h:237
benchmark_context(const benchmark_context &other)
Custom copy constructor to avoid copying std::mutex.
Definition benchmark.h:254
void benchmark(const std::string &name, Function func, benchmark_options< InputType > opt)
Run a benchmark on a generic function, with the given options.
Definition benchmark.h:369
benchmark_context & operator=(const benchmark_context &other)
Custom assignment operator to avoid copying std::mutex.
Definition benchmark.h:265
void setup(std::string moduleName, int argc=0, const char **argv=nullptr)
Setup the benchmark environment.
Definition benchmark.h:140
void benchmark(const std::string &name, Function func, InputGenerator< InputType > inputGenerator, unsigned int runs=0, unsigned int iterations=0, bool quiet=false)
Run a benchmark on a generic function, with the given argument options.
Definition benchmark.h:462
std::shared_ptr< random::random_context > random
Random module settings for the context, dynamically allocated and possibly shared between multiple co...
Definition benchmark.h:131
std::vector< benchmark_result > get_benchmark(const std::string &name)
Get a list of benchmarks results associated to the given name or label.
Definition benchmark.h:503
benchmark_result get_benchmark(const std::string &name, unsigned int i)
Get a benchmark result associated to the given name or label and index.
Definition benchmark.h:516
void benchmark(const std::string &name, Function func, const std::vector< InputType > &input, unsigned int runs=0, bool quiet=false)
Run a benchmark on a generic function, with the given input vector.
Definition benchmark.h:288
benchmark_settings settings
Settings for the benchmark context.
Definition benchmark.h:123
~benchmark_context()
Terminate the benchmark module.
Definition benchmark.h:246
std::shared_ptr< output::output_context > output
Output module settings for the context, dynamically allocated and possibly shared between multiple co...
Definition benchmark.h:127
void terminate(bool exit=false)
Terminate the benchmarking environment.
Definition benchmark.h:168
Timer class to measure elapsed time in milliseconds.
Definition timer.h:18
#define CHEBYSHEV_BENCHMARK_ITER
Default number of benchmark iterations.
Definition common.h:22
#define CHEBYSHEV_BENCHMARK_RUNS
Default number of benchmark runs.
Definition common.h:27
Input generators for benchmarks.
long double runtime(Function func, const std::vector< InputType > &input)
Measure the total runtime of a function over the given input for a single run.
Definition benchmark.h:44
std::function< InputType(random::random_source &)> InputGenerator
A function which takes in a random source and returns a generated input element.
Definition benchmark_structures.h:65
benchmark_context make_context(const std::string &moduleName, int argc=0, const char **argv=nullptr)
Construct a benchmarking context with the given parameters.
Definition benchmark.h:530
General namespace of the framework.
Definition benchmark.h:22
The output module, with formatting capabilities.
The pseudorandom number generation and sampling module.
A structure holding the options of a benchmark.
Definition benchmark_structures.h:71
unsigned int iterations
Number of iterations.
Definition benchmark_structures.h:77
uint64_t seed
The seed to use for randomized input generation (by default, a random seed is generated using the ran...
Definition benchmark_structures.h:87
bool quiet
Whether to print to standard output or not.
Definition benchmark_structures.h:83
unsigned int runs
Number of runs (run with the same input values).
Definition benchmark_structures.h:74
InputGenerator< InputType > inputGenerator
The function to use to generate input for the benchmark.
Definition benchmark_structures.h:80
Structure holding the results of a benchmark.
Definition benchmark_structures.h:23
std::string name
Identifying name of the function or test case.
Definition benchmark_structures.h:26
Global settings of the benchmark module, used in benchmark_context.
Definition benchmark.h:64
std::vector< std::string > benchmarkOutputFiles
The files to write benchmark results to (if empty, all results are output to a generic file).
Definition benchmark.h:84
unsigned int defaultRuns
Default number of runs.
Definition benchmark.h:73
std::map< std::string, bool > pickedBenchmarks
Target benchmarks marked for execution (all benchmarks will be executed if empty)
Definition benchmark.h:80
std::vector< std::string > benchmarkColumns
Default columns to print for benchmarks.
Definition benchmark.h:87
std::string moduleName
Name of the module currently being benchmarked.
Definition benchmark.h:67
bool multithreading
Whether to use multithreading for the execution of benchmarks (defaults to true).
Definition benchmark.h:96
unsigned int defaultIterations
Default number of iterations.
Definition benchmark.h:70
std::vector< std::string > outputFiles
The files to write all benchmark results to.
Definition benchmark.h:76
A source of pseudorandom numbers.
Definition random.h:39
uint64_t get_seed()
Get the seed used to generate the random source.
Definition random.h:59
A timer class to measure elapsed time in milliseconds.