aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorEric Fiselier <eric@efcs.ca>2016-05-23 19:24:56 -0600
committerEric Fiselier <eric@efcs.ca>2016-05-23 19:24:56 -0600
commit43017f8b1510a50855e6b0ea145a534d3d754068 (patch)
tree2a40473d89a11a26cea9875a48f6ada74b4501d0 /test
parentf434ce3fb650d40db186780ea3506269d5035ffd (diff)
downloadgoogle-benchmark-43017f8b1510a50855e6b0ea145a534d3d754068.tar.gz
Add SkipWithError(...) function.
Diffstat (limited to 'test')
-rw-r--r--test/CMakeLists.txt6
-rw-r--r--test/diagnostics_test.cc61
-rw-r--r--test/skip_with_error_test.cc161
3 files changed, 228 insertions, 0 deletions
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index a10a53a..efbe7bb 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -36,6 +36,12 @@ add_test(options_benchmarks options_test --benchmark_min_time=0.01)
compile_benchmark_test(basic_test)
add_test(basic_benchmark basic_test --benchmark_min_time=0.01)
+compile_benchmark_test(diagnostics_test)
+add_test(diagnostics_test diagnostics_test --benchmark_min_time=0.01)
+
+compile_benchmark_test(skip_with_error_test)
+add_test(skip_with_error_test skip_with_error_test --benchmark_min_time=0.01)
+
compile_benchmark_test(fixture_test)
add_test(fixture_test fixture_test --benchmark_min_time=0.01)
diff --git a/test/diagnostics_test.cc b/test/diagnostics_test.cc
new file mode 100644
index 0000000..60fa3b1
--- /dev/null
+++ b/test/diagnostics_test.cc
@@ -0,0 +1,61 @@
+// Testing:
+// State::PauseTiming()
+// State::ResumeTiming()
+// Test that CHECK's within these function diagnose when they are called
+// outside of the KeepRunning() loop.
+//
+// NOTE: Users should NOT include or use src/check.h. This is only done in
+// order to test library internals.
+
+#include "benchmark/benchmark_api.h"
+#include "../src/check.h"
+#include <stdexcept>
+#include <cstdlib>
+
+#if defined(__GNUC__) && !defined(__EXCEPTIONS)
+#define TEST_HAS_NO_EXCEPTIONS
+#endif
+
+void TestHandler() {
+#ifndef TEST_HAS_NO_EXCEPTIONS
+ throw std::logic_error("");
+#else
+ std::abort();
+#endif
+}
+
+void try_invalid_pause_resume(benchmark::State& state) {
+#if !defined(NDEBUG) && !defined(TEST_HAS_NO_EXCEPTIONS)
+ try {
+ state.PauseTiming();
+ std::abort();
+ } catch (std::logic_error const&) {}
+ try {
+ state.ResumeTiming();
+ std::abort();
+ } catch (std::logic_error const&) {}
+#else
+ (void)state; // avoid unused warning
+#endif
+}
+
+void BM_diagnostic_test(benchmark::State& state) {
+ static bool called_once = false;
+
+ if (called_once == false) try_invalid_pause_resume(state);
+
+ while (state.KeepRunning()) {
+ benchmark::DoNotOptimize(state.iterations());
+ }
+
+ if (called_once == false) try_invalid_pause_resume(state);
+
+ called_once = true;
+}
+BENCHMARK(BM_diagnostic_test);
+
+int main(int argc, char** argv) {
+ benchmark::internal::GetAbortHandler() = &TestHandler;
+ benchmark::Initialize(&argc, argv);
+ benchmark::RunSpecifiedBenchmarks();
+}
diff --git a/test/skip_with_error_test.cc b/test/skip_with_error_test.cc
new file mode 100644
index 0000000..dafbd64
--- /dev/null
+++ b/test/skip_with_error_test.cc
@@ -0,0 +1,161 @@
+
+#undef NDEBUG
+#include "benchmark/benchmark.h"
+#include "../src/check.h" // NOTE: check.h is for internal use only!
+#include <cassert>
+#include <vector>
+
+namespace {
+
+class TestReporter : public benchmark::ConsoleReporter {
+ public:
+ virtual bool ReportContext(const Context& context) {
+ return ConsoleReporter::ReportContext(context);
+ };
+
+ virtual void ReportRuns(const std::vector<Run>& report) {
+ all_runs_.insert(all_runs_.end(), begin(report), end(report));
+ ConsoleReporter::ReportRuns(report);
+ }
+
+ TestReporter() {}
+ virtual ~TestReporter() {}
+
+ mutable std::vector<Run> all_runs_;
+};
+
+struct TestCase {
+ std::string name;
+ bool error_occurred;
+ std::string error_message;
+
+ typedef benchmark::BenchmarkReporter::Run Run;
+
+ void CheckRun(Run const& run) const {
+ CHECK(name == run.benchmark_name) << "expected " << name << " got " << run.benchmark_name;
+ CHECK(error_occurred == run.error_occurred);
+ CHECK(error_message == run.error_message);
+ if (error_occurred) {
+ //CHECK(run.iterations == 0);
+ } else {
+ CHECK(run.iterations != 0);
+ }
+ }
+};
+
+std::vector<TestCase> ExpectedResults;
+
+int AddCases(const char* base_name, std::initializer_list<TestCase> const& v) {
+ for (auto TC : v) {
+ TC.name = base_name + TC.name;
+ ExpectedResults.push_back(std::move(TC));
+ }
+ return 0;
+}
+
+#define CONCAT(x, y) CONCAT2(x, y)
+#define CONCAT2(x, y) x##y
+#define ADD_CASES(...) \
+int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
+
+} // end namespace
+
+
+void BM_error_before_running(benchmark::State& state) {
+ state.SkipWithError("error message");
+ while (state.KeepRunning()) {
+ assert(false);
+ }
+}
+BENCHMARK(BM_error_before_running);
+ADD_CASES("BM_error_before_running",
+ {{"", true, "error message"}});
+
+void BM_error_during_running(benchmark::State& state) {
+ int first_iter = true;
+ while (state.KeepRunning()) {
+ if (state.range_x() == 1 && state.thread_index <= (state.threads / 2)) {
+ assert(first_iter);
+ first_iter = false;
+ state.SkipWithError("error message");
+ } else {
+ state.PauseTiming();
+ state.ResumeTiming();
+ }
+ }
+}
+BENCHMARK(BM_error_during_running)->Arg(1)->Arg(2)->ThreadRange(1, 8);
+ADD_CASES(
+ "BM_error_during_running",
+ {{"/1/threads:1", true, "error message"},
+ {"/1/threads:2", true, "error message"},
+ {"/1/threads:4", true, "error message"},
+ {"/1/threads:8", true, "error message"},
+ {"/2/threads:1", false, ""},
+ {"/2/threads:2", false, ""},
+ {"/2/threads:4", false, ""},
+ {"/2/threads:8", false, ""}}
+);
+
+void BM_error_after_running(benchmark::State& state) {
+ while (state.KeepRunning()) {
+ benchmark::DoNotOptimize(state.iterations());
+ }
+ if (state.thread_index <= (state.threads / 2))
+ state.SkipWithError("error message");
+}
+BENCHMARK(BM_error_after_running)->ThreadRange(1, 8);
+ADD_CASES(
+ "BM_error_after_running",
+ {{"/threads:1", true, "error message"},
+ {"/threads:2", true, "error message"},
+ {"/threads:4", true, "error message"},
+ {"/threads:8", true, "error message"}}
+);
+
+void BM_error_while_paused(benchmark::State& state) {
+ bool first_iter = true;
+ while (state.KeepRunning()) {
+ if (state.range_x() == 1 && state.thread_index <= (state.threads / 2)) {
+ assert(first_iter);
+ first_iter = false;
+ state.PauseTiming();
+ state.SkipWithError("error message");
+ } else {
+ state.PauseTiming();
+ state.ResumeTiming();
+ }
+ }
+}
+BENCHMARK(BM_error_while_paused)->Arg(1)->Arg(2)->ThreadRange(1, 8);
+ADD_CASES(
+ "BM_error_while_paused",
+ {{"/1/threads:1", true, "error message"},
+ {"/1/threads:2", true, "error message"},
+ {"/1/threads:4", true, "error message"},
+ {"/1/threads:8", true, "error message"},
+ {"/2/threads:1", false, ""},
+ {"/2/threads:2", false, ""},
+ {"/2/threads:4", false, ""},
+ {"/2/threads:8", false, ""}}
+);
+
+
+int main(int argc, char* argv[]) {
+ benchmark::Initialize(&argc, argv);
+
+ TestReporter test_reporter;
+ benchmark::RunSpecifiedBenchmarks(&test_reporter);
+
+ typedef benchmark::BenchmarkReporter::Run Run;
+ auto EB = ExpectedResults.begin();
+
+ for (Run const& run : test_reporter.all_runs_) {
+ assert(EB != ExpectedResults.end());
+ EB->CheckRun(run);
+ ++EB;
+ }
+ assert(EB == ExpectedResults.end());
+
+ return 0;
+}