aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRoman Lebedev <lebedev.ri@gmail.com>2018-08-29 14:58:54 +0300
committerGitHub <noreply@github.com>2018-08-29 14:58:54 +0300
commitd9cab612e40017af10bddaa5b60c7067032a9e1c (patch)
tree4ac64e5197a84a484324ae55ccb974d4408b655d
parenta0018c39310da35048083e4759774ba0e6cee468 (diff)
downloadgoogle-benchmark-d9cab612e40017af10bddaa5b60c7067032a9e1c.tar.gz
[NFC] s/console_reporter/display_reporter/ (#663)
There are two destinations: * display (console, terminal) and * file. And each of the destinations can be poplulated with one of the reporters: * console - human-friendly table-like display * json * csv (deprecated) So using the name console_reporter is confusing. Is it talking about the console reporter in the sense of table-like reporter, or in the sense of display destination?
-rw-r--r--include/benchmark/benchmark.h6
-rw-r--r--src/benchmark.cc36
2 files changed, 21 insertions, 21 deletions
diff --git a/include/benchmark/benchmark.h b/include/benchmark/benchmark.h
index efbbd72..c21ccf7 100644
--- a/include/benchmark/benchmark.h
+++ b/include/benchmark/benchmark.h
@@ -268,7 +268,7 @@ bool ReportUnrecognizedArguments(int argc, char** argv);
// of each matching benchmark. Otherwise run each matching benchmark and
// report the results.
//
-// The second and third overload use the specified 'console_reporter' and
+// The second and third overload use the specified 'display_reporter' and
// 'file_reporter' respectively. 'file_reporter' will write to the file
// specified
// by '--benchmark_output'. If '--benchmark_output' is not given the
@@ -276,8 +276,8 @@ bool ReportUnrecognizedArguments(int argc, char** argv);
//
// RETURNS: The number of matching benchmarks.
size_t RunSpecifiedBenchmarks();
-size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter);
-size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter);
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter);
// Register a MemoryManager instance that will be used to collect and report
diff --git a/src/benchmark.cc b/src/benchmark.cc
index f9c8686..81a520c 100644
--- a/src/benchmark.cc
+++ b/src/benchmark.cc
@@ -424,10 +424,10 @@ namespace internal {
namespace {
void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
- BenchmarkReporter* console_reporter,
+ BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter) {
// Note the file_reporter can be null.
- CHECK(console_reporter != nullptr);
+ CHECK(display_reporter != nullptr);
// Determine the width of the name field using a minimum width of 10.
bool has_repetitions = FLAGS_benchmark_repetitions > 1;
@@ -458,22 +458,22 @@ void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
std::flush(reporter->GetErrorStream());
};
- if (console_reporter->ReportContext(context) &&
+ if (display_reporter->ReportContext(context) &&
(!file_reporter || file_reporter->ReportContext(context))) {
- flushStreams(console_reporter);
+ flushStreams(display_reporter);
flushStreams(file_reporter);
for (const auto& benchmark : benchmarks) {
std::vector<BenchmarkReporter::Run> reports =
RunBenchmark(benchmark, &complexity_reports);
- console_reporter->ReportRuns(reports);
+ display_reporter->ReportRuns(reports);
if (file_reporter) file_reporter->ReportRuns(reports);
- flushStreams(console_reporter);
+ flushStreams(display_reporter);
flushStreams(file_reporter);
}
}
- console_reporter->Finalize();
+ display_reporter->Finalize();
if (file_reporter) file_reporter->Finalize();
- flushStreams(console_reporter);
+ flushStreams(display_reporter);
flushStreams(file_reporter);
}
@@ -523,11 +523,11 @@ size_t RunSpecifiedBenchmarks() {
return RunSpecifiedBenchmarks(nullptr, nullptr);
}
-size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter) {
- return RunSpecifiedBenchmarks(console_reporter, nullptr);
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
+ return RunSpecifiedBenchmarks(display_reporter, nullptr);
}
-size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
BenchmarkReporter* file_reporter) {
std::string spec = FLAGS_benchmark_filter;
if (spec.empty() || spec == "all")
@@ -535,15 +535,15 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
// Setup the reporters
std::ofstream output_file;
- std::unique_ptr<BenchmarkReporter> default_console_reporter;
+ std::unique_ptr<BenchmarkReporter> default_display_reporter;
std::unique_ptr<BenchmarkReporter> default_file_reporter;
- if (!console_reporter) {
- default_console_reporter = internal::CreateReporter(
+ if (!display_reporter) {
+ default_display_reporter = internal::CreateReporter(
FLAGS_benchmark_format, internal::GetOutputOptions());
- console_reporter = default_console_reporter.get();
+ display_reporter = default_display_reporter.get();
}
- auto& Out = console_reporter->GetOutputStream();
- auto& Err = console_reporter->GetErrorStream();
+ auto& Out = display_reporter->GetOutputStream();
+ auto& Err = display_reporter->GetErrorStream();
std::string const& fname = FLAGS_benchmark_out;
if (fname.empty() && file_reporter) {
@@ -578,7 +578,7 @@ size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter,
if (FLAGS_benchmark_list_tests) {
for (auto const& benchmark : benchmarks) Out << benchmark.name << "\n";
} else {
- internal::RunBenchmarks(benchmarks, console_reporter, file_reporter);
+ internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
}
return benchmarks.size();