Theoretica
A C++ numerical and automatic mathematical library
benchmark.h
Go to the documentation of this file.
1 
5 
6 #ifndef CHEBYSHEV_BENCHMARK_H
7 #define CHEBYSHEV_BENCHMARK_H
8 
9 #include <ctime>
10 #include <iostream>
11 
12 #include "./core/random.h"
13 #include "./benchmark/timer.h"
14 #include "./benchmark/generator.h"
16 
17 
18 namespace chebyshev {
19 
20 
27  namespace benchmark {
28 
29 
32 
35  bool quiet = false;
36 
39 
42 
45 
47  bool outputToFile = true;
48 
50  std::vector<std::string> outputFiles {};
51 
54  std::map<std::string, bool> pickedBenchmarks {};
55 
58  std::vector<std::string> benchmarkOutputFiles {};
59 
61  std::vector<std::string> benchmarkColumns = {
62  "name", "averageRuntime", "stdevRuntime", "runsPerSecond"
63  };
64 
65  } settings;
66 
67 
70 
72  unsigned int totalBenchmarks = 0;
73 
75  unsigned int failedBenchmarks = 0;
76 
78  std::map<std::string, std::vector<benchmark_result>> benchmarkResults {};
79 
80  } results;
81 
82 
89  inline void setup(
90  std::string moduleName,
91  int argc = 0,
92  const char** argv = nullptr) {
93 
94  // Initialize list of picked tests
95  if(argc && argv)
96  for (int i = 1; i < argc; ++i)
97  settings.pickedBenchmarks[argv[i]] = true;
98 
99  std::cout << "Starting benchmarks of the "
100  << moduleName << " module ..." << std::endl;
101 
102  settings.moduleName = moduleName;
103  results.totalBenchmarks = 0;
104  results.failedBenchmarks = 0;
105 
106  random::setup();
107  output::setup();
108  }
109 
110 
115  inline void terminate(bool exit = true) {
116 
117  output::settings.quiet = settings.quiet;
118 
119  // Output to file is true but no specific files are specified, add default output file.
120  if( settings.outputToFile &&
121  !output::settings.outputFiles.size() &&
122  !settings.benchmarkOutputFiles.size() &&
123  !settings.outputFiles.size()) {
124 
125  settings.outputFiles = { settings.moduleName + "_results" };
126  }
127 
128  std::vector<std::string> outputFiles;
129 
130  // Print benchmark results
131  outputFiles = settings.outputFiles;
132  outputFiles.insert(outputFiles.end(), settings.benchmarkOutputFiles.begin(), settings.benchmarkOutputFiles.end());
133 
134  output::print_results(results.benchmarkResults, settings.benchmarkColumns, outputFiles);
135 
136  std::cout << "Finished benchmarking " << settings.moduleName << '\n';
137  std::cout << results.totalBenchmarks << " total benchmarks, "
138  << results.failedBenchmarks << " failed (" << std::setprecision(3) <<
139  (results.failedBenchmarks / (double) results.totalBenchmarks) * 100 << "%)"
140  << '\n';
141 
142  // Discard previous results
143  results = benchmark_results();
144 
145  if(exit) {
147  std::exit(results.failedBenchmarks);
148  }
149  }
150 
151 
161  template<typename InputType, typename Function>
162  inline long double runtime(Function func, const std::vector<InputType>& input) {
163 
164  if (input.size() == 0)
165  return 0.0;
166 
167  // Dummy variable
168  __volatile__ auto c = func(input[0]);
169 
170  timer t = timer();
171 
172  for (unsigned int j = 0; j < input.size(); ++j)
173  c += func(input[j]);
174 
175  return t();
176  }
177 
178 
186  template<typename InputType = double, typename Function>
187  inline void benchmark(
188  const std::string& name,
189  Function func,
190  const std::vector<InputType>& input,
191  unsigned int runs = settings.defaultRuns,
192  bool quiet = false) {
193 
194  // Whether the benchmark failed because of an exception
195  bool failed = false;
196 
197  // Running average
198  long double averageRuntime;
199 
200  // Running total sum of squares
201  long double sumSquares;
202 
203  // Total runtime
204  long double totalRuntime;
205 
206  try {
207 
208  // Use Welford's algorithm to compute the average and the variance
209  totalRuntime = runtime(func, input);
210  averageRuntime = totalRuntime / input.size();
211  sumSquares = 0.0;
212 
213  for (unsigned int i = 1; i < runs; ++i) {
214 
215  // Compute the runtime for a single run
216  // and update the running estimates
217  const long double currentRun = runtime(func, input);
218  const long double currentAverage = currentRun / input.size();
219  totalRuntime += currentRun;
220 
221  const long double tmp = averageRuntime;
222  averageRuntime = tmp + (currentAverage - tmp) / (i + 1);
223  sumSquares += (currentAverage - tmp)
224  * (currentAverage - averageRuntime);
225  }
226 
227  } catch(...) {
228 
229  // Catch any exception and mark the benchmark as failed
230  failed = true;
231  }
232 
233  benchmark_result res {};
234  res.name = name;
235  res.runs = runs;
236  res.iterations = input.size();
237  res.totalRuntime = totalRuntime;
238  res.averageRuntime = averageRuntime;
239  res.runsPerSecond = 1000.0 / res.averageRuntime;
240  res.failed = failed;
241  res.quiet = quiet;
242 
243  if (runs > 1)
244  res.stdevRuntime = std::sqrt(sumSquares / (runs - 1));
245 
246  results.totalBenchmarks++;
247  if(failed)
248  results.failedBenchmarks++;
249 
250  results.benchmarkResults[name].push_back(res);
251  }
252 
253 
260  template<typename InputType = double, typename Function>
261  inline void benchmark(
262  const std::string& name,
263  Function func,
264  const benchmark_options<InputType>& opt) {
265 
266  // Generate input set
267  std::vector<InputType> input (opt.iterations);
268  for (unsigned int i = 0; i < opt.iterations; ++i)
269  input[i] = opt.inputGenerator(i);
270 
271  // Benchmark over input set
272  benchmark(name, func, input, opt.runs, opt.quiet);
273  }
274 
275 
284  template<typename InputType = double, typename Function>
285  inline void benchmark(
286  const std::string& name,
287  Function func,
288  unsigned int runs = settings.defaultRuns,
289  unsigned int iterations = settings.defaultIterations,
290  InputGenerator<InputType> inputGenerator = generator::uniform1D(0, 1),
291  bool quiet = false) {
292 
294  opt.runs = runs;
295  opt.iterations = iterations;
296  opt.inputGenerator = inputGenerator;
297  opt.quiet = quiet;
298 
299  benchmark(name, func, opt);
300  }
301  }
302 }
303 
304 #endif
Structures for the benchmark module.
Timer class to measure elapsed time in milliseconds.
Definition: timer.h:18
#define CHEBYSHEV_BENCHMARK_ITER
Default number of benchmark iterations.
Definition: common.h:22
#define CHEBYSHEV_BENCHMARK_RUNS
Default number of benchmark runs.
Definition: common.h:27
Input generators for benchmarks.
auto uniform1D(long double a, long double b)
Uniform generator over a domain.
Definition: generator.h:21
long double runtime(Function func, const std::vector< InputType > &input)
Measure the total runtime of a function over the given input for a single run.
Definition: benchmark.h:162
void benchmark(const std::string &name, Function func, const std::vector< InputType > &input, unsigned int runs=settings.defaultRuns, bool quiet=false)
Run a benchmark on a generic function, with the given input vector.
Definition: benchmark.h:187
std::function< InputType(unsigned int)> InputGenerator
A function which takes in an index and returns a generated input element.
Definition: benchmark_structures.h:62
void setup(std::string moduleName, int argc=0, const char **argv=nullptr)
Setup the benchmark environment.
Definition: benchmark.h:89
void benchmark(const std::string &name, Function func, unsigned int runs=settings.defaultRuns, unsigned int iterations=settings.defaultIterations, InputGenerator< InputType > inputGenerator=generator::uniform1D(0, 1), bool quiet=false)
Run a benchmark on a generic function, with the given argument options.
Definition: benchmark.h:285
void terminate(bool exit=true)
Terminate the benchmarking environment.
Definition: benchmark.h:115
void print_results(const std::map< std::string, std::vector< ResultType >> &results, const std::vector< std::string > &fields, const std::vector< std::string > &filenames)
Print the test results to standard output and output files with their given formats,...
Definition: output.h:907
void setup()
Setup printing to the output stream with default options.
Definition: output.h:539
void terminate()
Terminate the output module by closing all output files and resetting its settings.
Definition: output.h:598
std::string string(size_t length)
Generate a random string made of human-readable ASCII characters.
Definition: random.h:102
void setup(uint64_t seed=0)
Initialize the random module.
Definition: random.h:32
General namespace of the framework.
Definition: benchmark_structures.h:16
Vector sqrt(const Vector &v)
Parallel element-wise evaluation of the sqrt function.
Definition: parallel.h:143
The pseudorandom number generation and sampling module.
A structure holding the options of a benchmark.
Definition: benchmark_structures.h:68
unsigned int iterations
Number of iterations.
Definition: benchmark_structures.h:74
bool quiet
Whether to print to standard output or not.
Definition: benchmark_structures.h:80
unsigned int runs
Number of runs (run with the same input values).
Definition: benchmark_structures.h:71
InputGenerator< InputType > inputGenerator
The function to use to generate input for the benchmark.
Definition: benchmark_structures.h:77
Structure holding the results of a benchmark.
Definition: benchmark_structures.h:23
std::string name
Identifying name of the function or test case.
Definition: benchmark_structures.h:26
benchmarks.
Definition: benchmark.h:69
std::map< std::string, std::vector< benchmark_result > > benchmarkResults
Results of the benchmarks.
Definition: benchmark.h:78
unsigned int failedBenchmarks
Number of failed benchmarks.
Definition: benchmark.h:75
unsigned int totalBenchmarks
Total number of benchmarks.
Definition: benchmark.h:72
of the benchmark module
Definition: benchmark.h:31
std::vector< std::string > benchmarkOutputFiles
The files to write benchmark results to (if empty, all results are output to a generic file).
Definition: benchmark.h:58
unsigned int defaultRuns
Default number of runs.
Definition: benchmark.h:44
std::map< std::string, bool > pickedBenchmarks
Target benchmarks marked for execution (all benchmarks will be executed if empty)
Definition: benchmark.h:54
std::vector< std::string > benchmarkColumns
Default columns to print for benchmarks.
Definition: benchmark.h:61
std::string moduleName
Name of the module currently being benchmarked.
Definition: benchmark.h:38
bool outputToFile
Whether to output results to a file.
Definition: benchmark.h:47
unsigned int defaultIterations
Default number of iterations.
Definition: benchmark.h:41
std::vector< std::string > outputFiles
The files to write all benchmark results to.
Definition: benchmark.h:50
bool quiet
Whether to print benchmark results to standard output.
Definition: benchmark.h:35
A timer class to measure elapsed time in milliseconds.