forked from DigitalInBlue/Celero
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTestFixture.h
156 lines (138 loc) · 4.75 KB
/
TestFixture.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
#ifndef H_CELERO_TESTFIXTURE_H
#define H_CELERO_TESTFIXTURE_H
///
/// \author John Farrier
///
/// \copyright Copyright 2015, 2016, 2017 John Farrier
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
#include <cstddef>
#include <cstdint>
#include <limits>
#include <celero/Export.h>
#include <celero/ThreadLocal.h>
#include <celero/Timer.h>
#include <vector>
namespace celero
{
class Benchmark;
///
/// \class TestFixture
///
/// \author John Farrier
///
class CELERO_EXPORT TestFixture
{
public:
///
/// Default Constructor.
///
TestFixture();
///
/// Virtual destructor for inheritance.
///
virtual ~TestFixture();
enum class Constants : int64_t
{
#ifdef _MSC_VER
#if(_MSC_VER < 1900)
NoProblemSpaceValue = -9223372036854775807
#else
NoProblemSpaceValue = std::numeric_limits<int64_t>::min()
#endif
#else
NoProblemSpaceValue = std::numeric_limits<int64_t>::min()
#endif
};
///
/// Allows a test fixture to supply values to use for experiments.
///
/// This is used to create multiple runs of the same experiment
/// and varrying the data set size, for example. The second value
/// of the pair is an optional override for the number of iterations
/// to be used. If zero is specified, then the default number of
/// iterations is used.
///
/// It is only guaranteed that the constructor is called prior to this function being called.
///
virtual std::vector<std::pair<int64_t, uint64_t>> getExperimentValues() const
{
return std::vector<std::pair<int64_t, uint64_t>>();
};
///
/// Provide a units result scale of each experiment value.
///
/// If the value is greater than 0 then additional statistic value will be printed
/// in output - [ xxxx units/sec ]. For example for measure speed of
/// file IO operations method might return 1024 * 1024 to get megabytes
/// per second.
///
/// It is only guaranteed that the constructor is called prior to this function being called.
///
virtual double getExperimentValueResultScale() const
{
return 1.0;
};
///
/// Allows the text fixture to run code that will be executed once immediately before the benchmark.
///
/// Unlike setUp, the evaluation of this function IS included in the total experiment execution
/// time.
///
/// \param experimentValue The value for the experiment. This can be ignored if the test does not utilize experiment values.
///
virtual void onExperimentStart(int64_t experimentValue);
///
/// Allows the text fixture to run code that will be executed once immediately after the benchmark.
/// Unlike tearDown, the evaluation of this function IS included in the total experiment execution
/// time.
///
virtual void onExperimentEnd();
///
/// Set up the test fixture before benchmark execution.
///
/// This code is NOT included in the benchmark timing.
/// It is executed once before all iterations are executed and between each Sample.
/// Your experiment should NOT rely on "setUp" methods to be called before EACH experiment run, only between each sample.
///
/// \param experimentValue The value for the experiment. This can be ignored if the test does not utilize experiment values.
///
virtual void setUp(int64_t experimentValue);
///
/// Called after test completion to destroy the fixture.
///
/// This code is NOT included in the benchmark timing.
/// It is executed once after all iterations are executed and between each Sample.
/// Your experiment should NOT rely on "tearDown" methods to be called after EACH experiment run, only between each sample.
///
virtual void tearDown();
///
///
/// \param threads The number of working threads.
/// \param iterations The number of times to loop over the UserBenchmark function.
/// \param experimentValue The experiment value to pass in setUp function.
///
/// \return Returns the number of microseconds the run took.
///
virtual uint64_t run(uint64_t threads, uint64_t iterations, int64_t experimentValue);
protected:
/// Executed for each operation the benchmarking test is run.
virtual void UserBenchmark();
///
/// Only used for baseline cases. Used to define a hard-coded execution time vs. actually making a measurement.
///
virtual uint64_t HardCodedMeasurement() const;
};
}
#endif