aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHaibo Huang <hhb@google.com>2020-09-10 22:01:04 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2020-09-10 22:01:04 +0000
commitb71e6302ffa6eba0943184b967b0c42ebf849079 (patch)
tree591ff81464b0d795b61d5c5d0712e94197c4ecc6
parentdda47259195b01a28520cd2101080d100dddcc2e (diff)
parent6ee8394bc0e240b3332f5609af6933c405cd7b6b (diff)
downloadgoogle-benchmark-b71e6302ffa6eba0943184b967b0c42ebf849079.tar.gz
Upgrade google-benchmark to beb360d03e2a1a2673d9c2cf408c13b69fdb5627 am: daca24e319 am: c4f8afab29 am: d737881367 am: b4855e3648 am: 6ee8394bc0
Original change: https://android-review.googlesource.com/c/platform/external/google-benchmark/+/1423976 Change-Id: If50b82f822eab0fff589de9780e77a1647b7a707
-rw-r--r--.github/workflows/build-and-test.yml38
-rw-r--r--.github/workflows/pylint.yml26
-rw-r--r--AUTHORS1
-rw-r--r--BUILD.bazel4
-rw-r--r--CMakeLists.txt13
-rw-r--r--CONTRIBUTORS1
-rw-r--r--METADATA6
-rw-r--r--README.md57
-rw-r--r--WORKSPACE1
-rw-r--r--bindings/python/google_benchmark/__init__.py24
-rw-r--r--bindings/python/google_benchmark/example.py18
-rw-r--r--cmake/CXXFeatureCheck.cmake5
-rw-r--r--cmake/GoogleTest.cmake.in2
-rw-r--r--include/benchmark/benchmark.h5
-rw-r--r--setup.py139
-rw-r--r--src/benchmark_register.cc43
-rw-r--r--src/commandlineflags.cc2
-rw-r--r--test/CMakeLists.txt3
-rw-r--r--test/args_product_test.cc77
-rw-r--r--test/commandlineflags_gtest.cc144
-rw-r--r--tools/gbench/util.py5
21 files changed, 403 insertions, 211 deletions
diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml
new file mode 100644
index 0000000..f0f0626
--- /dev/null
+++ b/.github/workflows/build-and-test.yml
@@ -0,0 +1,38 @@
+name: build-and-test
+
+on:
+ push:
+ branches: [ master ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+ job:
+ # TODO(dominic): Extend this to include compiler and set through env: CC/CXX.
+ name: ${{ matrix.os }}.${{ matrix.build_type }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: [ubuntu-latest, ubuntu-16.04, ubuntu-20.04, macos-latest, windows-latest]
+ build_type: ['Release', 'Debug']
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: create build environment
+ run: cmake -E make_directory ${{ runner.workspace }}/_build
+
+ - name: configure cmake
+ shell: bash
+ working-directory: ${{ runner.workspace }}/_build
+ run: cmake -DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=${{ matrix.build_type }}
+
+ - name: build
+ shell: bash
+ working-directory: ${{ runner.workspace }}/_build
+ run: cmake --build . --config ${{ matrix.build_type }}
+
+ - name: test
+ shell: bash
+ working-directory: ${{ runner.workspace }}/_build
+ run: ctest -C ${{ matrix.build_type }}
diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml
new file mode 100644
index 0000000..c869674
--- /dev/null
+++ b/.github/workflows/pylint.yml
@@ -0,0 +1,26 @@
+name: pylint
+
+on:
+ push:
+ branches: [ master ]
+ pull_request:
+ branches: [ master ]
+
+jobs:
+ pylint:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v1
+ with:
+ python-version: 3.8
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install pylint pylint-exit conan
+ - name: Run pylint
+ run: |
+ pylint `find . -name '*.py'|xargs` || pylint-exit $?
diff --git a/AUTHORS b/AUTHORS
index 89205a1..e353b53 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -13,6 +13,7 @@ Alex Steele <steeleal123@gmail.com>
Andriy Berestovskyy <berestovskyy@gmail.com>
Arne Beer <arne@twobeer.de>
Carto
+Christian Wassermann <christian_wassermann@web.de>
Christopher Seymour <chris.j.seymour@hotmail.com>
Colin Braley <braley.colin@gmail.com>
Daniel Harvey <danielharvey458@gmail.com>
diff --git a/BUILD.bazel b/BUILD.bazel
index d97a019..eb35b62 100644
--- a/BUILD.bazel
+++ b/BUILD.bazel
@@ -1,3 +1,5 @@
+load("@rules_cc//cc:defs.bzl", "cc_library")
+
licenses(["notice"])
config_setting(
@@ -8,8 +10,6 @@ config_setting(
visibility = [":__subpackages__"],
)
-load("@rules_cc//cc:defs.bzl", "cc_library")
-
cc_library(
name = "benchmark",
srcs = glob(
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 67c0b70..a157666 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -245,11 +245,17 @@ if (BENCHMARK_USE_LIBCXX)
endif()
endif(BENCHMARK_USE_LIBCXX)
+set(EXTRA_CXX_FLAGS "")
+if (WIN32 AND "${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
+ # Clang on Windows fails to compile the regex feature check under C++11
+ set(EXTRA_CXX_FLAGS "-DCMAKE_CXX_STANDARD=14")
+endif()
+
# C++ feature checks
# Determine the correct regular expression engine to use
-cxx_feature_check(STD_REGEX)
-cxx_feature_check(GNU_POSIX_REGEX)
-cxx_feature_check(POSIX_REGEX)
+cxx_feature_check(STD_REGEX ${EXTRA_CXX_FLAGS})
+cxx_feature_check(GNU_POSIX_REGEX ${EXTRA_CXX_FLAGS})
+cxx_feature_check(POSIX_REGEX ${EXTRA_CXX_FLAGS})
if(NOT HAVE_STD_REGEX AND NOT HAVE_GNU_POSIX_REGEX AND NOT HAVE_POSIX_REGEX)
message(FATAL_ERROR "Failed to determine the source files for the regular expression backend")
endif()
@@ -257,6 +263,7 @@ if (NOT BENCHMARK_ENABLE_EXCEPTIONS AND HAVE_STD_REGEX
AND NOT HAVE_GNU_POSIX_REGEX AND NOT HAVE_POSIX_REGEX)
message(WARNING "Using std::regex with exceptions disabled is not fully supported")
endif()
+
cxx_feature_check(STEADY_CLOCK)
# Ensure we have pthreads
set(THREADS_PREFER_PTHREAD_FLAG ON)
diff --git a/CONTRIBUTORS b/CONTRIBUTORS
index 88f7eee..6beed71 100644
--- a/CONTRIBUTORS
+++ b/CONTRIBUTORS
@@ -28,6 +28,7 @@ Andriy Berestovskyy <berestovskyy@gmail.com>
Arne Beer <arne@twobeer.de>
Billy Robert O'Neal III <billy.oneal@gmail.com> <bion@microsoft.com>
Chris Kennelly <ckennelly@google.com> <ckennelly@ckennelly.com>
+Christian Wassermann <christian_wassermann@web.de>
Christopher Seymour <chris.j.seymour@hotmail.com>
Colin Braley <braley.colin@gmail.com>
Cyrille Faucheux <cyrille.faucheux@gmail.com>
diff --git a/METADATA b/METADATA
index 75ceba4..389d4c2 100644
--- a/METADATA
+++ b/METADATA
@@ -9,11 +9,11 @@ third_party {
type: GIT
value: "https://github.com/google/benchmark.git"
}
- version: "1302d2ce094a9753b0f81a81ea74c0fa71fae582"
+ version: "beb360d03e2a1a2673d9c2cf408c13b69fdb5627"
license_type: NOTICE
last_upgrade_date {
year: 2020
- month: 8
- day: 10
+ month: 9
+ day: 9
}
}
diff --git a/README.md b/README.md
index 02a3bfa..41a1bdf 100644
--- a/README.md
+++ b/README.md
@@ -70,13 +70,13 @@ $ git clone https://github.com/google/googletest.git benchmark/googletest
# Go to the library root directory
$ cd benchmark
# Make a build directory to place the build output.
-$ mkdir build && cd build
-# Generate a Makefile with cmake.
-# Use cmake -G <generator> to generate a different file type.
-$ cmake ../
+$ cmake -E make_directory "build"
+# Generate build system files with cmake.
+$ cmake -E chdir "build" cmake -DCMAKE_BUILD_TYPE=Release ../
+# or, starting with CMake 3.13, use a simpler form:
+# cmake -DCMAKE_BUILD_TYPE=Release -S . -B "build"
# Build the library.
-# Use make -j<number_of_parallel_jobs> to speed up the build process, e.g. make -j8 .
-$ make
+$ cmake --build "build" --config Release
```
This builds the `benchmark` and `benchmark_main` libraries and tests.
On a unix system, the build directory should now look something like this:
@@ -94,13 +94,13 @@ On a unix system, the build directory should now look something like this:
Next, you can run the tests to check the build.
```bash
-$ make test
+$ cmake -E chdir "build" ctest --build-config Release
```
If you want to install the library globally, also run:
```
-sudo make install
+sudo cmake --build "build" --config Release --target install
```
Note that Google Benchmark requires Google Test to build and run the tests. This
@@ -117,17 +117,14 @@ to `CMAKE_ARGS`.
### Debug vs Release
By default, benchmark builds as a debug library. You will see a warning in the
-output when this is the case. To build it as a release library instead, use:
+output when this is the case. To build it as a release library instead, add
+`-DCMAKE_BUILD_TYPE=Release` when generating the build system files, as shown
+above. The use of `--config Release` in build commands is needed to properly
+support multi-configuration tools (like Visual Studio for example) and can be
+skipped for other build systems (like Makefile).
-```
-cmake -DCMAKE_BUILD_TYPE=Release
-```
-
-To enable link-time optimisation, use
-
-```
-cmake -DCMAKE_BUILD_TYPE=Release -DBENCHMARK_ENABLE_LTO=true
-```
+To enable link-time optimisation, also add `-DBENCHMARK_ENABLE_LTO=true` when
+generating the build system files.
If you are using gcc, you might need to set `GCC_AR` and `GCC_RANLIB` cmake
cache variables, if autodetection fails.
@@ -135,7 +132,6 @@ cache variables, if autodetection fails.
If you are using clang, you may need to set `LLVMAR_EXECUTABLE`,
`LLVMNM_EXECUTABLE` and `LLVMRANLIB_EXECUTABLE` cmake cache variables.
-
### Stable and Experimental Library Versions
The main branch contains the latest stable version of the benchmarking library;
@@ -552,6 +548,29 @@ pair.
BENCHMARK(BM_SetInsert)->Ranges({{1<<10, 8<<10}, {128, 512}});
```
+Some benchmarks may require specific argument values that cannot be expressed
+with `Ranges`. In this case, `ArgsProduct` offers the ability to generate a
+benchmark input for each combination in the product of the supplied vectors.
+
+```c++
+BENCHMARK(BM_SetInsert)
+ ->ArgsProduct({{1<<10, 3<<10, 8<<10}, {20, 40, 60, 80}})
+// would generate the same benchmark arguments as
+BENCHMARK(BM_SetInsert)
+ ->Args({1<<10, 20})
+ ->Args({3<<10, 20})
+ ->Args({8<<10, 20})
+ ->Args({3<<10, 40})
+ ->Args({8<<10, 40})
+ ->Args({1<<10, 40})
+ ->Args({1<<10, 60})
+ ->Args({3<<10, 60})
+ ->Args({8<<10, 60})
+ ->Args({1<<10, 80})
+ ->Args({3<<10, 80})
+ ->Args({8<<10, 80});
+```
+
For more complex patterns of inputs, passing a custom function to `Apply` allows
programmatic specification of an arbitrary set of arguments on which to run the
benchmark. The following example enumerates a dense range on one parameter,
diff --git a/WORKSPACE b/WORKSPACE
index 5438ad3..c00d12c 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -34,4 +34,3 @@ new_local_repository(
build_file = "@//bindings/python:python_headers.BUILD",
path = "/usr/include/python3.6", # May be overwritten by setup.py.
)
-
diff --git a/bindings/python/google_benchmark/__init__.py b/bindings/python/google_benchmark/__init__.py
index c3a93bf..44531f9 100644
--- a/bindings/python/google_benchmark/__init__.py
+++ b/bindings/python/google_benchmark/__init__.py
@@ -39,27 +39,27 @@ __version__ = "0.1.0"
def register(f=None, *, name=None):
- if f is None:
- return lambda f: register(f, name=name)
- if name is None:
- name = f.__name__
- _benchmark.RegisterBenchmark(name, f)
- return f
+ if f is None:
+ return lambda f: register(f, name=name)
+ if name is None:
+ name = f.__name__
+ _benchmark.RegisterBenchmark(name, f)
+ return f
def _flags_parser(argv):
- argv = _benchmark.Initialize(argv)
- return app.parse_flags_with_usage(argv)
+ argv = _benchmark.Initialize(argv)
+ return app.parse_flags_with_usage(argv)
def _run_benchmarks(argv):
- if len(argv) > 1:
- raise app.UsageError('Too many command-line arguments.')
- return _benchmark.RunSpecifiedBenchmarks()
+ if len(argv) > 1:
+ raise app.UsageError('Too many command-line arguments.')
+ return _benchmark.RunSpecifiedBenchmarks()
def main(argv=None):
- return app.run(_run_benchmarks, argv=argv, flags_parser=_flags_parser)
+ return app.run(_run_benchmarks, argv=argv, flags_parser=_flags_parser)
# Methods for use with custom main function.
diff --git a/bindings/python/google_benchmark/example.py b/bindings/python/google_benchmark/example.py
index e968462..0dead75 100644
--- a/bindings/python/google_benchmark/example.py
+++ b/bindings/python/google_benchmark/example.py
@@ -25,24 +25,24 @@ import google_benchmark as benchmark
@benchmark.register
def empty(state):
- while state:
- pass
+ while state:
+ pass
@benchmark.register
def sum_million(state):
- while state:
- sum(range(1_000_000))
+ while state:
+ sum(range(1_000_000))
@benchmark.register
def skipped(state):
- if True: # Test some predicate here.
- state.skip_with_error('some error')
- return # NOTE: You must explicitly return, or benchmark will continue.
+ if True: # Test some predicate here.
+ state.skip_with_error('some error')
+ return # NOTE: You must explicitly return, or benchmark will continue.
- ... # Benchmark code would be here.
+ # Benchmark code would be here.
if __name__ == '__main__':
- benchmark.main()
+ benchmark.main()
diff --git a/cmake/CXXFeatureCheck.cmake b/cmake/CXXFeatureCheck.cmake
index 059d510..62e6741 100644
--- a/cmake/CXXFeatureCheck.cmake
+++ b/cmake/CXXFeatureCheck.cmake
@@ -27,6 +27,11 @@ function(cxx_feature_check FILE)
return()
endif()
+ if (ARGC GREATER 1)
+ message(STATUS "Enabling additional flags: ${ARGV1}")
+ list(APPEND BENCHMARK_CXX_LINKER_FLAGS ${ARGV1})
+ endif()
+
if (NOT DEFINED COMPILE_${FEATURE})
message(STATUS "Performing Test ${FEATURE}")
if(CMAKE_CROSSCOMPILING)
diff --git a/cmake/GoogleTest.cmake.in b/cmake/GoogleTest.cmake.in
index 28818ee..fd957ff 100644
--- a/cmake/GoogleTest.cmake.in
+++ b/cmake/GoogleTest.cmake.in
@@ -31,7 +31,7 @@ if(EXISTS "${GOOGLETEST_PATH}" AND IS_DIRECTORY "${GOOGLETEST_PATH}"
)
else()
if(NOT ALLOW_DOWNLOADING_GOOGLETEST)
- message(SEND_ERROR "Did not find Google Test sources! Either pass correct path in GOOGLETEST_PATH, or enable ALLOW_DOWNLOADING_GOOGLETEST, or disable BENCHMARK_ENABLE_GTEST_TESTS / BENCHMARK_ENABLE_TESTING.")
+ message(SEND_ERROR "Did not find Google Test sources! Either pass correct path in GOOGLETEST_PATH, or enable BENCHMARK_DOWNLOAD_DEPENDENCIES, or disable BENCHMARK_ENABLE_GTEST_TESTS / BENCHMARK_ENABLE_TESTING.")
else()
message(WARNING "Did not find Google Test sources! Fetching from web...")
ExternalProject_Add(
diff --git a/include/benchmark/benchmark.h b/include/benchmark/benchmark.h
index da638f9..01f1262 100644
--- a/include/benchmark/benchmark.h
+++ b/include/benchmark/benchmark.h
@@ -828,6 +828,11 @@ class Benchmark {
// REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
Benchmark* Ranges(const std::vector<std::pair<int64_t, int64_t> >& ranges);
+ // Run this benchmark once for each combination of values in the (cartesian)
+ // product of the supplied argument lists.
+ // REQUIRES: The function passed to the constructor must accept arg1, arg2 ...
+ Benchmark* ArgsProduct(const std::vector<std::vector<int64_t> >& arglists);
+
// Equivalent to ArgNames({name})
Benchmark* ArgName(const std::string& name);
diff --git a/setup.py b/setup.py
index a2b0b91..800a879 100644
--- a/setup.py
+++ b/setup.py
@@ -9,89 +9,91 @@ import setuptools
from setuptools.command import build_ext
-here = os.path.dirname(os.path.abspath(__file__))
+HERE = os.path.dirname(os.path.abspath(__file__))
IS_WINDOWS = sys.platform.startswith('win')
def _get_version():
- """Parse the version string from __init__.py."""
- with open(os.path.join(here, 'bindings', 'python', 'google_benchmark', '__init__.py')) as f:
- try:
- version_line = next(
- line for line in f if line.startswith('__version__'))
- except StopIteration:
- raise ValueError('__version__ not defined in __init__.py')
- else:
- ns = {}
- exec(version_line, ns) # pylint: disable=exec-used
- return ns['__version__']
+ """Parse the version string from __init__.py."""
+ with open(os.path.join(
+ HERE, 'bindings', 'python', 'google_benchmark', '__init__.py')) as init_file:
+ try:
+ version_line = next(
+ line for line in init_file if line.startswith('__version__'))
+ except StopIteration:
+ raise ValueError('__version__ not defined in __init__.py')
+ else:
+ namespace = {}
+ exec(version_line, namespace) # pylint: disable=exec-used
+ return namespace['__version__']
def _parse_requirements(path):
- with open(os.path.join(here, path)) as f:
- return [
- line.rstrip() for line in f
- if not (line.isspace() or line.startswith('#'))
- ]
+ with open(os.path.join(HERE, path)) as requirements:
+ return [
+ line.rstrip() for line in requirements
+ if not (line.isspace() or line.startswith('#'))
+ ]
class BazelExtension(setuptools.Extension):
- """A C/C++ extension that is defined as a Bazel BUILD target."""
+ """A C/C++ extension that is defined as a Bazel BUILD target."""
- def __init__(self, name, bazel_target):
- self.bazel_target = bazel_target
- self.relpath, self.target_name = (
- posixpath.relpath(bazel_target, '//').split(':'))
- setuptools.Extension.__init__(self, name, sources=[])
+ def __init__(self, name, bazel_target):
+ self.bazel_target = bazel_target
+ self.relpath, self.target_name = (
+ posixpath.relpath(bazel_target, '//').split(':'))
+ setuptools.Extension.__init__(self, name, sources=[])
class BuildBazelExtension(build_ext.build_ext):
- """A command that runs Bazel to build a C/C++ extension."""
-
- def run(self):
- for ext in self.extensions:
- self.bazel_build(ext)
- build_ext.build_ext.run(self)
-
- def bazel_build(self, ext):
- with open('WORKSPACE', 'r') as f:
- workspace_contents = f.read()
-
- with open('WORKSPACE', 'w') as f:
- f.write(re.sub(
- r'(?<=path = ").*(?=", # May be overwritten by setup\.py\.)',
- sysconfig.get_python_inc().replace(os.path.sep, posixpath.sep),
- workspace_contents))
-
- if not os.path.exists(self.build_temp):
- os.makedirs(self.build_temp)
-
- bazel_argv = [
- 'bazel',
- 'build',
- ext.bazel_target,
- '--symlink_prefix=' + os.path.join(self.build_temp, 'bazel-'),
- '--compilation_mode=' + ('dbg' if self.debug else 'opt'),
- ]
-
- if IS_WINDOWS:
- # Link with python*.lib.
- for library_dir in self.library_dirs:
- bazel_argv.append('--linkopt=/LIBPATH:' + library_dir)
-
- self.spawn(bazel_argv)
-
- shared_lib_suffix = '.dll' if IS_WINDOWS else '.so'
- ext_bazel_bin_path = os.path.join(
- self.build_temp, 'bazel-bin',
- ext.relpath, ext.target_name + shared_lib_suffix)
- ext_dest_path = self.get_ext_fullpath(ext.name)
- ext_dest_dir = os.path.dirname(ext_dest_path)
- if not os.path.exists(ext_dest_dir):
- os.makedirs(ext_dest_dir)
- shutil.copyfile(ext_bazel_bin_path, ext_dest_path)
+ """A command that runs Bazel to build a C/C++ extension."""
+
+ def run(self):
+ for ext in self.extensions:
+ self.bazel_build(ext)
+ build_ext.build_ext.run(self)
+
+ def bazel_build(self, ext):
+ """Runs the bazel build to create the package."""
+ with open('WORKSPACE', 'r') as workspace:
+ workspace_contents = workspace.read()
+
+ with open('WORKSPACE', 'w') as workspace:
+ workspace.write(re.sub(
+ r'(?<=path = ").*(?=", # May be overwritten by setup\.py\.)',
+ sysconfig.get_python_inc().replace(os.path.sep, posixpath.sep),
+ workspace_contents))
+
+ if not os.path.exists(self.build_temp):
+ os.makedirs(self.build_temp)
+
+ bazel_argv = [
+ 'bazel',
+ 'build',
+ ext.bazel_target,
+ '--symlink_prefix=' + os.path.join(self.build_temp, 'bazel-'),
+ '--compilation_mode=' + ('dbg' if self.debug else 'opt'),
+ ]
+
+ if IS_WINDOWS:
+ # Link with python*.lib.
+ for library_dir in self.library_dirs:
+ bazel_argv.append('--linkopt=/LIBPATH:' + library_dir)
+
+ self.spawn(bazel_argv)
+
+ shared_lib_suffix = '.dll' if IS_WINDOWS else '.so'
+ ext_bazel_bin_path = os.path.join(
+ self.build_temp, 'bazel-bin',
+ ext.relpath, ext.target_name + shared_lib_suffix)
+ ext_dest_path = self.get_ext_fullpath(ext.name)
+ ext_dest_dir = os.path.dirname(ext_dest_path)
+ if not os.path.exists(ext_dest_dir):
+ os.makedirs(ext_dest_dir)
+ shutil.copyfile(ext_bazel_bin_path, ext_dest_path)
setuptools.setup(
@@ -106,7 +108,8 @@ setuptools.setup(
packages=setuptools.find_packages('bindings/python'),
install_requires=_parse_requirements('bindings/python/requirements.txt'),
cmdclass=dict(build_ext=BuildBazelExtension),
- ext_modules=[BazelExtension('google_benchmark._benchmark', '//bindings/python/google_benchmark:_benchmark')],
+ ext_modules=[BazelExtension(
+ 'google_benchmark._benchmark', '//bindings/python/google_benchmark:_benchmark')],
zip_safe=False,
# PyPI package information.
classifiers=[
diff --git a/src/benchmark_register.cc b/src/benchmark_register.cc
index cca39b2..65d9944 100644
--- a/src/benchmark_register.cc
+++ b/src/benchmark_register.cc
@@ -31,6 +31,7 @@
#include <fstream>
#include <iostream>
#include <memory>
+#include <numeric>
#include <sstream>
#include <thread>
@@ -303,33 +304,41 @@ Benchmark* Benchmark::Ranges(
const std::vector<std::pair<int64_t, int64_t>>& ranges) {
CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(ranges.size()));
std::vector<std::vector<int64_t>> arglists(ranges.size());
- std::size_t total = 1;
for (std::size_t i = 0; i < ranges.size(); i++) {
AddRange(&arglists[i], ranges[i].first, ranges[i].second,
range_multiplier_);
- total *= arglists[i].size();
}
- std::vector<std::size_t> ctr(arglists.size(), 0);
+ ArgsProduct(arglists);
- for (std::size_t i = 0; i < total; i++) {
- std::vector<int64_t> tmp;
- tmp.reserve(arglists.size());
-
- for (std::size_t j = 0; j < arglists.size(); j++) {
- tmp.push_back(arglists[j].at(ctr[j]));
- }
+ return this;
+}
- args_.push_back(std::move(tmp));
+Benchmark* Benchmark::ArgsProduct(
+ const std::vector<std::vector<int64_t>>& arglists) {
+ CHECK(ArgsCnt() == -1 || ArgsCnt() == static_cast<int>(arglists.size()));
- for (std::size_t j = 0; j < arglists.size(); j++) {
- if (ctr[j] + 1 < arglists[j].size()) {
- ++ctr[j];
- break;
- }
- ctr[j] = 0;
+ std::vector<std::size_t> indices(arglists.size());
+ const std::size_t total = std::accumulate(
+ std::begin(arglists), std::end(arglists), std::size_t{1},
+ [](const std::size_t res, const std::vector<int64_t>& arglist) {
+ return res * arglist.size();
+ });
+ std::vector<int64_t> args;
+ args.reserve(arglists.size());
+ for (std::size_t i = 0; i < total; i++) {
+ for (std::size_t arg = 0; arg < arglists.size(); arg++) {
+ args.push_back(arglists[arg][indices[arg]]);
}
+ args_.push_back(args);
+ args.clear();
+
+ std::size_t arg = 0;
+ do {
+ indices[arg] = (indices[arg] + 1) % arglists[arg].size();
+ } while (indices[arg++] == 0 && arg < arglists.size());
}
+
return this;
}
diff --git a/src/commandlineflags.cc b/src/commandlineflags.cc
index 3380a12..0648fe3 100644
--- a/src/commandlineflags.cc
+++ b/src/commandlineflags.cc
@@ -88,7 +88,7 @@ static std::string FlagToEnvVar(const char* flag) {
for (size_t i = 0; i != flag_str.length(); ++i)
env_var += static_cast<char>(::toupper(flag_str.c_str()[i]));
- return "BENCHMARK_" + env_var;
+ return env_var;
}
} // namespace
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 0d228b8..c1a3a3f 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -113,6 +113,9 @@ add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01)
compile_benchmark_test(multiple_ranges_test)
add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01)
+compile_benchmark_test(args_product_test)
+add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01)
+
compile_benchmark_test_with_main(link_main_test)
add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01)
diff --git a/test/args_product_test.cc b/test/args_product_test.cc
new file mode 100644
index 0000000..8a859f8
--- /dev/null
+++ b/test/args_product_test.cc
@@ -0,0 +1,77 @@
+#include "benchmark/benchmark.h"
+
+#include <cassert>
+#include <iostream>
+#include <set>
+#include <vector>
+
+class ArgsProductFixture : public ::benchmark::Fixture {
+ public:
+ ArgsProductFixture()
+ : expectedValues({{0, 100, 2000, 30000},
+ {1, 15, 3, 8},
+ {1, 15, 3, 9},
+ {1, 15, 7, 8},
+ {1, 15, 7, 9},
+ {1, 15, 10, 8},
+ {1, 15, 10, 9},
+ {2, 15, 3, 8},
+ {2, 15, 3, 9},
+ {2, 15, 7, 8},
+ {2, 15, 7, 9},
+ {2, 15, 10, 8},
+ {2, 15, 10, 9},
+ {4, 5, 6, 11}}) {}
+
+ void SetUp(const ::benchmark::State& state) {
+ std::vector<int64_t> ranges = {state.range(0), state.range(1),
+ state.range(2), state.range(3)};
+
+ assert(expectedValues.find(ranges) != expectedValues.end());
+
+ actualValues.insert(ranges);
+ }
+
+ // NOTE: This is not TearDown as we want to check after _all_ runs are
+ // complete.
+ virtual ~ArgsProductFixture() {
+ if (actualValues != expectedValues) {
+ std::cout << "EXPECTED\n";
+ for (auto v : expectedValues) {
+ std::cout << "{";
+ for (int64_t iv : v) {
+ std::cout << iv << ", ";
+ }
+ std::cout << "}\n";
+ }
+ std::cout << "ACTUAL\n";
+ for (auto v : actualValues) {
+ std::cout << "{";
+ for (int64_t iv : v) {
+ std::cout << iv << ", ";
+ }
+ std::cout << "}\n";
+ }
+ }
+ }
+
+ std::set<std::vector<int64_t>> expectedValues;
+ std::set<std::vector<int64_t>> actualValues;
+};
+
+BENCHMARK_DEFINE_F(ArgsProductFixture, Empty)(benchmark::State& state) {
+ for (auto _ : state) {
+ int64_t product =
+ state.range(0) * state.range(1) * state.range(2) * state.range(3);
+ for (int64_t x = 0; x < product; x++) {
+ benchmark::DoNotOptimize(x);
+ }
+ }
+}
+
+BENCHMARK_REGISTER_F(ArgsProductFixture, Empty)
+ ->Args({0, 100, 2000, 30000})
+ ->ArgsProduct({{1, 2}, {15}, {3, 7, 10}, {8, 9}})
+ ->Args({4, 5, 6, 11});
+
+BENCHMARK_MAIN();
diff --git a/test/commandlineflags_gtest.cc b/test/commandlineflags_gtest.cc
index 36bdb44..656020f 100644
--- a/test/commandlineflags_gtest.cc
+++ b/test/commandlineflags_gtest.cc
@@ -26,175 +26,175 @@ int unsetenv(const char* name) {
#endif // BENCHMARK_OS_WINDOWS
TEST(BoolFromEnv, Default) {
- ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+ ASSERT_EQ(unsetenv("NOT_IN_ENV"), 0);
EXPECT_EQ(BoolFromEnv("not_in_env", true), true);
}
TEST(BoolFromEnv, False) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "0", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "0", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "N", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "N", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "n", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "n", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "NO", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "NO", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "No", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "No", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "no", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "no", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "F", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "F", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "f", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "f", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "FALSE", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "FALSE", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "False", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "False", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "false", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "false", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "OFF", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "OFF", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "Off", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "Off", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "off", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "off", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", true), false);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
}
TEST(BoolFromEnv, True) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "1", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "1", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "Y", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "Y", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "y", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "y", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "YES", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "YES", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "Yes", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "Yes", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "yes", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "yes", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "T", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "T", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "t", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "t", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "TRUE", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "TRUE", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "True", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "True", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "true", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "true", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "ON", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "ON", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "On", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "On", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "on", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "on", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
#ifndef BENCHMARK_OS_WINDOWS
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "", 1), 0);
EXPECT_EQ(BoolFromEnv("in_env", false), true);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
#endif
}
TEST(Int32FromEnv, NotInEnv) {
- ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+ ASSERT_EQ(unsetenv("NOT_IN_ENV"), 0);
EXPECT_EQ(Int32FromEnv("not_in_env", 42), 42);
}
TEST(Int32FromEnv, InvalidInteger) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "foo", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "foo", 1), 0);
EXPECT_EQ(Int32FromEnv("in_env", 42), 42);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
}
TEST(Int32FromEnv, ValidInteger) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "42", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "42", 1), 0);
EXPECT_EQ(Int32FromEnv("in_env", 64), 42);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
}
TEST(DoubleFromEnv, NotInEnv) {
- ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+ ASSERT_EQ(unsetenv("NOT_IN_ENV"), 0);
EXPECT_EQ(DoubleFromEnv("not_in_env", 0.51), 0.51);
}
TEST(DoubleFromEnv, InvalidReal) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "foo", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "foo", 1), 0);
EXPECT_EQ(DoubleFromEnv("in_env", 0.51), 0.51);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
}
TEST(DoubleFromEnv, ValidReal) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "0.51", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "0.51", 1), 0);
EXPECT_EQ(DoubleFromEnv("in_env", 0.71), 0.51);
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
}
TEST(StringFromEnv, Default) {
- ASSERT_EQ(unsetenv("BENCHMARK_NOT_IN_ENV"), 0);
+ ASSERT_EQ(unsetenv("NOT_IN_ENV"), 0);
EXPECT_STREQ(StringFromEnv("not_in_env", "foo"), "foo");
}
TEST(StringFromEnv, Valid) {
- ASSERT_EQ(setenv("BENCHMARK_IN_ENV", "foo", 1), 0);
+ ASSERT_EQ(setenv("IN_ENV", "foo", 1), 0);
EXPECT_STREQ(StringFromEnv("in_env", "bar"), "foo");
- unsetenv("BENCHMARK_IN_ENV");
+ unsetenv("IN_ENV");
}
} // namespace
diff --git a/tools/gbench/util.py b/tools/gbench/util.py
index 1f8e8e2..661c4ba 100644
--- a/tools/gbench/util.py
+++ b/tools/gbench/util.py
@@ -158,7 +158,6 @@ def run_or_load_benchmark(filename, benchmark_flags):
ftype = check_input_file(filename)
if ftype == IT_JSON:
return load_benchmark_results(filename)
- elif ftype == IT_Executable:
+ if ftype == IT_Executable:
return run_benchmark(filename, benchmark_flags)
- else:
- assert False # This branch is unreachable
+ raise ValueError('Unknown file type %s' % ftype)