aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEvgeny Astigeevich <evgeny.astigeevich@linaro.org>2020-03-04 12:48:42 +0000
committerEvgeny Astigeevich <evgeny.astigeevich@linaro.org>2020-04-08 13:47:42 +0100
commit89dab669dca6d768a79101b1e480794fa46012d5 (patch)
treeb9fcde8bdf4e527d0cbf728312d1984f6c7fa3c8
parent9d2d671b13d82decdcebe852d7a8c50ed9906d65 (diff)
downloadart-testing-89dab669dca6d768a79101b1e480794fa46012d5.tar.gz
Add ability to calibrate benchmark as separate task
Such tasks as collecting performance profiles need calibration not to be a part of a profiled run. This CL adds an ability to run calibration as a separate task. The result of the calibration can be provided to RunBench. In such a case RunBench won't run calibration. Test: scripts/benchmarks/perf_profile_benchmarks_target.sh --cpu big --single-event cpu-cycles benchmarks/algorithm/DeltaBlue Test: scripts/benchmarks/perf_profile_benchmarks_target.sh --cpu little --single-event cpu-cycles benchmarks/specjvm2008/compress/CompressBench Test: scripts/benchmarks/benchmarks_run_target.sh --mode 64 --cpu big --iterations 1 Change-Id: I771e9029b23345ba6130cdfb1af3161d50df8e94
-rw-r--r--framework/org/linaro/bench/Benchmark.java287
-rw-r--r--framework/org/linaro/bench/CalibrateBench.java60
-rw-r--r--framework/org/linaro/bench/RunBench.java124
3 files changed, 344 insertions, 127 deletions
diff --git a/framework/org/linaro/bench/Benchmark.java b/framework/org/linaro/bench/Benchmark.java
index 30a7b5a..c7854aa 100644
--- a/framework/org/linaro/bench/Benchmark.java
+++ b/framework/org/linaro/bench/Benchmark.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -50,26 +51,51 @@ public class Benchmark {
private static final int ITERATIONS_LIMIT = 0x400000;
+ // Default target running time for benchmarks.
+ public static final long DEFAULT_TARGET_RUNNING_TIME_MS = 400;
+
+ // The constant to indicate the unknown value of the calibration time.
+ private static final long UNKNOWN_CALIBRATION_TIME = -1;
+
/*
* BenchmarkMethod is a class to work with methods containing benchmarking code.
* Those methods run benchmarking code a number of iterations. The number of
- * iterations can either be provided by the methods via IterationsAnnotation
+ * iterations can be explicitly provided in the constructor or via methods IterationsAnnotation
* or be calculated using calibration process.
*/
private static class BenchmarkMethod {
+ // The constant to indicate the unknown value of the iteration count.
+ private static final int UNKNOWN_ITERATION_COUNT = -1;
+
private Object parent;
private Method method;
private int iterationsCount;
private String id;
private boolean doWarmup;
- public BenchmarkMethod(Object parent, Method method) {
+ // Construct BenchmarkMethod with the provided iteration count.
+ // Arguments:
+ // parent - an instance of the class containing benchmarking methods.
+ // method - a method containing benchmarking code.
+ // iterationCount - a number of iterations the method to run
+ // benchmarking code. UNKNOWN_ITERATION_COUNT can be used
+ // if the iteration count is provided later.
+ public BenchmarkMethod(Object parent, Method method, int iterationCount) {
this.parent = parent;
this.method = method;
this.id = benchmarkIdentifier(method);
this.doWarmup = true;
+ this.iterationsCount = iterationCount;
+ }
- this.iterationsCount = -1;
+ // Construct BenchmarkMethod based on the annotation if the method provides it.
+ // If the method does not have the annotation, the iteration count will be set to
+ // UNKNOWN_ITERATION_COUNT.
+ // Arguments:
+ // parent - an instance of the class containing benchmarking methods.
+ // method - a method containing benchmarking code.
+ public BenchmarkMethod(Object parent, Method method) {
+ this(parent, method, UNKNOWN_ITERATION_COUNT);
IterationsAnnotation annotation = method.getAnnotation(IterationsAnnotation.class);
if (annotation != null) {
this.doWarmup = !annotation.noWarmup();
@@ -83,22 +109,33 @@ public class Benchmark {
return id;
}
+ public String getName() {
+ return method.getName();
+ }
+
+ public int getIterationCount() {
+ return iterationsCount;
+ }
+
public boolean needsCalibration() {
- return iterationsCount == -1;
+ return iterationsCount == UNKNOWN_ITERATION_COUNT;
}
- public void calibrateIterations(long calibrationMinTimeNs, long targetRunTimeNs) {
+ public void calibrateIterations() {
// Estimate how long it takes to run one iteration.
long iterations = 1;
long duration = -1;
- while ((duration < calibrationMinTimeNs) && (iterations < ITERATIONS_LIMIT)) {
+ if (Benchmark.calibrationTimeNs == UNKNOWN_CALIBRATION_TIME) {
+ Benchmark.calibrationTimeNs = Benchmark.calculateCalibrationTimeNs();
+ }
+ while ((duration < calibrationTimeNs) && (iterations < ITERATIONS_LIMIT)) {
iterations *= 2;
duration = timeIterations((int) iterations);
}
// Estimate the number of iterations to run based on the calibration
// phase, and benchmark the function.
double iterTime = duration / (double) iterations;
- this.iterationsCount = (int) Math.max(1.0, targetRunTimeNs / iterTime);
+ this.iterationsCount = (int) Math.max(1.0, Benchmark.targetRunningTimeNs / iterTime);
}
public Result run() {
@@ -194,19 +231,6 @@ public class Benchmark {
}
}
- private static final class ParticularBenchmarkMethodSelector implements MethodSelector {
- private String particularBenchMethodName;
-
- public ParticularBenchmarkMethodSelector(String methodName) {
- particularBenchMethodName = methodName;
- }
-
- public boolean accept(Method method) {
- return method.getName().equals(particularBenchMethodName)
- && doesMethodHaveOneIntParam(method);
- }
- }
-
private static final class VerifyMethodSelector implements MethodSelector {
public boolean accept(Method method) {
return method.getName().startsWith(VERIFY_BENCH_METHOD_PREFIX)
@@ -215,12 +239,115 @@ public class Benchmark {
}
}
+ /*
+ * This class allows to access information encoded into the benchmark specification.
+ * The benchmark specification format:
+ * <benchmark_class_name>[:<benchmark_method>:<iterations>]+
+ */
+ private static class BenchmarkSpecification {
+ String[] parts;
+
+ public BenchmarkSpecification(String str) {
+ parts = str.split(":");
+ if (parts.length < 3 || (parts.length % 2) != 1) {
+ throw new IllegalArgumentException("The provided benchmark specification is invalid.");
+ }
+ }
+
+ public String getClassName() {
+ return parts[0];
+ }
+
+ public int getMethodCount() {
+ // The method count is the number of parts minus 1 for the class name and divided by 2
+ // because it is in format <method name>:<iteration count>
+ return (parts.length - 1) / 2;
+ }
+
+ private int getMethodPartIndex(int index) {
+ return 2 * index + 1; // skipping the class name.
+ }
+
+ public String getMethodName(int index) {
+ return parts[getMethodPartIndex(index)];
+ }
+
+ public int getMethodIterationCount(int index) {
+ return Integer.parseInt(parts[getMethodPartIndex(index) + 1]);
+ }
+ }
+
+ // The target running time for benchmarks.
+ private static long targetRunningTimeNs =
+ TimeUnit.NANOSECONDS.convert(DEFAULT_TARGET_RUNNING_TIME_MS, TimeUnit.MILLISECONDS);
+ // The time used for calibration of benchmarks. It is usually less than the target running time
+ // to have the calibration process as quick as possible. UNKNOWN_CALIBRATION_TIME means the time
+ // has not been provided by an user. It will be calculated based on the target running time.
+ private static long calibrationTimeNs = UNKNOWN_CALIBRATION_TIME;
+
private Object benchInstance;
- private List<Method> setupMethods;
- private List<BenchmarkMethod> benchMethods;
- private List<Method> verifyMethods;
+ private List<Method> setupMethods = new ArrayList<Method>();
+ private List<BenchmarkMethod> benchMethods = new ArrayList<BenchmarkMethod>();
+ private List<Method> verifyMethods = new ArrayList<Method>();
+
+ private void findSetupAndVerifyMethods() {
+ // Each method declared in benchmarkClass is checked whether it is
+ // one of special methods ('setup', 'verify').
+ // Found methods are stored into corresponding lists.
+ MethodSelector setupMethodsSelector = new SetupMethodSelector();
+ MethodSelector verifyMethodsSelector = new VerifyMethodSelector();
+
+ for (Method method : benchInstance.getClass().getDeclaredMethods()) {
+ if (setupMethodsSelector.accept(method)) {
+ setupMethods.add(method);
+ } else if (verifyMethodsSelector.accept(method)) {
+ verifyMethods.add(method);
+ }
+ }
+ }
+
+ private void findTimeBenchmarkMethods() {
+ // Each method declared in benchmarkClass is checked whether it is
+ // a special method 'time'.
+ MethodSelector benchMethodsSelector = new TimeBenchmarkMethodSelector();
+ for (Method method : benchInstance.getClass().getDeclaredMethods()) {
+ if (benchMethodsSelector.accept(method)) {
+ benchMethods.add(new BenchmarkMethod(benchInstance, method));
+ }
+ }
+ }
+
+ private void createBenchInstance(String className) {
+ try {
+ Class<?> clazz = Class.forName(className);
+ benchInstance = clazz.newInstance();
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to create a benchmark instance: " + className, e);
+ }
+ }
+
+ // Construct Benchmark based on BenchmarkSpecification, without calibration.
+ private Benchmark(BenchmarkSpecification benchmarkSpecification) {
+ createBenchInstance(benchmarkSpecification.getClassName());
+ findSetupAndVerifyMethods();
+ for (int i = 0; i < benchmarkSpecification.getMethodCount(); ++i) {
+ String methodName = benchmarkSpecification.getMethodName(i);
+ int iterationCount = benchmarkSpecification.getMethodIterationCount(i);
+ try {
+ Method method = benchInstance.getClass().getDeclaredMethod(methodName, int.class);
+ benchMethods.add(new BenchmarkMethod(benchInstance, method, iterationCount));
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to get the benchmark method: " + methodName, e);
+ }
+ }
+ // After all methods are processed the benchmark is setup.
+ setup();
+ }
- public Benchmark(String benchName, long calibrationMinTimeNs, long benchmarkTargetRunTimeNs) {
+ // Construct Benchmark based on the name format:
+ // path/to/BenchmarkClass(.Benchmark)?
+ // All benchmarking methods of Benchmark are calibrated if needed.
+ private Benchmark(String benchName) {
if (benchName == null) {
throw new NullPointerException("The provided benchmark name is null.");
}
@@ -245,44 +372,28 @@ public class Benchmark {
String benchmarkClass = matcher.group(2);
String benchmarkMethodName = matcher.group(3);
- // Each method declared in benchmarkClass is checked whether it is
- // one of special methods ('time', 'setup', 'verify').
- // Found methods are stored into corresponding lists.
- setupMethods = new ArrayList<Method>();
- benchMethods = new ArrayList<BenchmarkMethod>();
- verifyMethods = new ArrayList<Method>();
- try {
- Class<?> clazz = Class.forName(benchmarkClassPath + benchmarkClass);
- benchInstance = clazz.newInstance();
-
- MethodSelector setupMethodsSelector = new SetupMethodSelector();
- MethodSelector benchMethodsSelector =
- (benchmarkMethodName != null)
- ? new ParticularBenchmarkMethodSelector(
- TIME_BENCH_METHOD_PREFIX + benchmarkMethodName)
- : new TimeBenchmarkMethodSelector();
- MethodSelector verifyMethodsSelector = new VerifyMethodSelector();
-
- for (Method method : clazz.getDeclaredMethods()) {
- if (setupMethodsSelector.accept(method)) {
- setupMethods.add(method);
- } else if (benchMethodsSelector.accept(method)) {
- benchMethods.add(new BenchmarkMethod(benchInstance, method));
- } else if (verifyMethodsSelector.accept(method)) {
- verifyMethods.add(method);
- }
- }
+ createBenchInstance(benchmarkClassPath + benchmarkClass);
+ findSetupAndVerifyMethods();
- if (benchMethods.isEmpty()) {
- throw new RuntimeException("No benchmark method in the benchmark: " + benchName);
+ try {
+ if (benchmarkMethodName == null) {
+ findTimeBenchmarkMethods();
+ } else {
+ Method method = benchInstance.getClass().getDeclaredMethod(TIME_BENCH_METHOD_PREFIX
+ + benchmarkMethodName, int.class);
+ benchMethods.add(new BenchmarkMethod(benchInstance, method));
}
-
- // After all methods are processed a benchmarks is setup. This includes
- // the iteration calibration process if it is needed.
- setup(calibrationMinTimeNs, benchmarkTargetRunTimeNs);
} catch (Exception e) {
throw new RuntimeException("Failed to create a benchmark: " + benchName, e);
}
+
+ if (benchMethods.isEmpty()) {
+ throw new RuntimeException("No benchmark method in the benchmark: " + benchName);
+ }
+
+ // After all methods are processed the benchmark is setup and calibrated.
+ setup();
+ calibrate();
}
public Result[] run() {
@@ -322,15 +433,21 @@ public class Benchmark {
return verifyFailures;
}
- private void setup(long calibrationMinTimeNs, long benchmarkTargetRunTimeNs) {
+ private void setup() {
try {
for (Method method : setupMethods) {
method.invoke(benchInstance);
}
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+ private void calibrate() {
+ try {
for (BenchmarkMethod method : benchMethods) {
if (method.needsCalibration()) {
- method.calibrateIterations(calibrationMinTimeNs, benchmarkTargetRunTimeNs);
+ method.calibrateIterations();
}
}
} catch (Exception e) {
@@ -351,4 +468,56 @@ public class Benchmark {
String benchName = method.getName().substring(4);
return path + className + "." + benchName;
}
+
+ // Return the string representation of the benchmark.
+ // Its format:
+ // <benchmark_class_name>[:<benchmark_method>:<iterations>]+
+ public String toString() {
+ StringBuilder resultBuilder = new StringBuilder();
+ resultBuilder.append(benchInstance.getClass().getName());
+ for (BenchmarkMethod method : benchMethods) {
+ resultBuilder.append(':').append(method.getName());
+ resultBuilder.append(':').append(method.getIterationCount());
+ }
+ return resultBuilder.toString();
+ }
+
+ // Return an instance of Benchmark based on the provided string.
+ public static Benchmark fromString(String str) {
+ if (str == null) {
+ throw new NullPointerException("The provided string is null.");
+ }
+ if (str.isEmpty()) {
+ throw new IllegalArgumentException("The provided string is empty.");
+ }
+ if (str.indexOf(':') != -1) {
+ // Provided str has the benchmark specification format:
+ // <benchmark_class_name>[:<benchmark_method>:<iterations>]+
+ return new Benchmark(new BenchmarkSpecification(str));
+ } else {
+ // Assume that the provided str has the benchmark name format:
+ // path/to/BenchmarkClass(.Benchmark)?
+ return new Benchmark(str);
+ }
+ }
+
+ public static void setTargetRunningTime(long time) {
+ targetRunningTimeNs =
+ TimeUnit.NANOSECONDS.convert(Long.valueOf(time), TimeUnit.MILLISECONDS);
+ }
+
+ public static long getTargetRunningTimeNs() {
+ return targetRunningTimeNs;
+ }
+
+ public static void setCalibrationTime(long time) {
+ calibrationTimeNs =
+ TimeUnit.NANOSECONDS.convert(Long.valueOf(time), TimeUnit.MILLISECONDS);
+ }
+
+ public static long calculateCalibrationTimeNs() {
+ // As we want the calibration process to be quick, the calibration time is chosen
+ // to be the tenth of the target running time.
+ return targetRunningTimeNs / 10;
+ }
}
diff --git a/framework/org/linaro/bench/CalibrateBench.java b/framework/org/linaro/bench/CalibrateBench.java
new file mode 100644
index 0000000..b80ce30
--- /dev/null
+++ b/framework/org/linaro/bench/CalibrateBench.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2020 Linaro Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.linaro.bench;
+
+public class CalibrateBench {
+ private static final String HELP_MESSAGE =
+ "Usage: java org.linaro.bench.CalibrateBench --target_running_time <time in ms> Benchmark\n"
+ + "Identifies a number of iterations needed for the specified benchmark methods to have\n"
+ + "the target running time.\n"
+ + "The result is output in the format:\n"
+ + "<benchmark_class_name>[:<benchmark_method>:<iterations>]+\n"
+ + "Examples of results:\n"
+ + "benchmarks.caffeinemark.FloatAtom:timeFloatAtom@100\n"
+ + "benchmarks.micro.ArrayAccess:timeAccessArrayConstants:10000:timeAccessArrayVariables:500\n"
+ + "OPTIONS:\n"
+ + "\t--target_running_time <time in ms>\n"
+ + "\t The target running time for benchmark methods.\n";
+
+ private String benchmarkName;
+
+ private void run() {
+ System.out.println(Benchmark.fromString(benchmarkName).toString());
+ }
+
+ private void parseArgs(String[] args) {
+ if (args.length != 3 ||
+ !args[0].equals("--target_running_time")) {
+ System.out.println(HELP_MESSAGE);
+ System.exit(1);
+ }
+
+ Benchmark.setTargetRunningTime(Long.valueOf(args[1]));
+ this.benchmarkName = args[2];
+ if (this.benchmarkName.isEmpty()) {
+ System.out.println("ERROR: benchmark name is not provided.");
+ System.exit(1);
+ }
+ }
+
+ public static void main(String[] args) {
+ CalibrateBench calibrateBench = new CalibrateBench();
+ calibrateBench.parseArgs(args);
+ calibrateBench.run();
+ }
+}
diff --git a/framework/org/linaro/bench/RunBench.java b/framework/org/linaro/bench/RunBench.java
index 2dae5da..3da4013 100644
--- a/framework/org/linaro/bench/RunBench.java
+++ b/framework/org/linaro/bench/RunBench.java
@@ -22,36 +22,18 @@ import java.util.List;
import java.util.concurrent.TimeUnit;
public class RunBench {
- // Minimum valid calibration time.
- public static final long DEFAULT_CALIBRATION_MIN_TIME_NS =
- TimeUnit.NANOSECONDS.convert(50, TimeUnit.MILLISECONDS);
- // The target benchmark running time.
- public static final long DEFAULT_BENCH_TARGET_RUN_TIME_NS =
- TimeUnit.NANOSECONDS.convert(400, TimeUnit.MILLISECONDS);
-
private SimpleLogger log;
- private long calibrationMinTimeNs;
- private long benchmarkTargetRunTimeNs;
public RunBench() {
this.log = SimpleLogger.getInstance();
- calibrationMinTimeNs = DEFAULT_CALIBRATION_MIN_TIME_NS;
- benchmarkTargetRunTimeNs = DEFAULT_BENCH_TARGET_RUN_TIME_NS;
}
public void setLogLevel(SimpleLogger.LogLevel level) {
- this.log = SimpleLogger.getInstance();
- log.setLogLevel(level);
+ this.log.setLogLevel(level);
}
- public int runBenchSet(String target, boolean verify) {
- if (target == null) {
- return 1;
- }
-
+ public int runBenchSet(Benchmark benchmark, boolean verify) {
try {
- Benchmark benchmark = new Benchmark(target, calibrationMinTimeNs,
- benchmarkTargetRunTimeNs);
Benchmark.Result[] results = benchmark.run();
int verifyFailures = 0;
if (verify) {
@@ -72,78 +54,84 @@ public class RunBench {
}
public static final String helpMessage =
- "Usage: java org.linaro.bench.RunBench [OPTIONS] [Benchmark...]\n"
+ "Usage: java org.linaro.bench.RunBench [OPTIONS] [Benchmark_name|Benchmark_specification]*\n"
+ + "\n"
+ + "Benchmark_specification: <benchmark_class_name>[:<benchmark_method>:<iterations>]+\n"
+ "OPTIONS:\n"
+ "\t--help Print this error message.\n"
+ "\t--verbose Be verbose.\n"
+ "\t--debug Be more verbose than the verbose mode.\n"
+ "\t--list_benchmarks List available benchmarks and exit.\n"
/* TODO: Add a `--list_sub_benchmarks` option. */
- + "\t--benchmark_run_time <time in ms>\n"
+ + "\t--target_running_time <time in ms>\n"
+ "\t Set the target running time for benchmarks.\n"
+ "\t (default: "
- + TimeUnit.MILLISECONDS.convert(DEFAULT_BENCH_TARGET_RUN_TIME_NS, TimeUnit.NANOSECONDS)
+ + Benchmark.DEFAULT_TARGET_RUNNING_TIME_MS
+ ")\n"
- + "\t--calibration_min_time <time in ms>\n"
- + "\t Set the minimum running time for benchmark calibration.\n"
- + "\t (default: "
- + TimeUnit.MILLISECONDS.convert(DEFAULT_CALIBRATION_MIN_TIME_NS, TimeUnit.NANOSECONDS)
- + ")\n";
+ + "\t--calibration_time <time in ms>\n"
+ + "\t Set the time for benchmark calibration.\n"
+ + "\t If it is not provided the time is calculated based on the target\n"
+ + "\t running time. With the target running time "
+ + "\t "
+ + TimeUnit.NANOSECONDS.convert(Benchmark.getTargetRunningTimeNs(), TimeUnit.MILLISECONDS)
+ + "\n"
+ + "\t it is:\n"
+ + "\t "
+ + TimeUnit.NANOSECONDS.convert(Benchmark.calculateCalibrationTimeNs(), TimeUnit.MILLISECONDS)
+ + ".\n";
public int parseCmdlineAndRun(String[] args) {
int errors = 0;
- String subtest = null;
boolean verify = true; // Verify all benchmark results by default.
- List<String> benchmarks = new ArrayList<String>();
+ List<Benchmark> benchmarks = new ArrayList<Benchmark>();
- for (int argIndex = 0; argIndex < args.length; argIndex++) {
- if (args[argIndex].startsWith("--")) {
- String option = args[argIndex].substring(2);
- if (option.equals("help")) {
- System.out.println(helpMessage);
- System.exit(0);
- } else if (option.equals("verbose")) {
- setLogLevel(SimpleLogger.LogLevel.INFO);
- } else if (option.equals("debug")) {
- setLogLevel(SimpleLogger.LogLevel.DEBUG);
- } else if (option.equals("list_benchmarks")) {
- for (int i = 0; i < BenchmarkList.benchmarkList.length; i++) {
- System.out.println(BenchmarkList.benchmarkList[i]);
- }
- System.exit(0);
- } else if (option.equals("benchmark_run_time")) {
- argIndex++;
- if (argIndex < args.length) {
- this.benchmarkTargetRunTimeNs =
- TimeUnit.NANOSECONDS.convert(Long.valueOf(args[argIndex]), TimeUnit.MILLISECONDS);
- } else {
- log.fatal("Require time.");
- }
- } else if (option.equals("calibration_min_time")) {
- argIndex++;
- if (argIndex < args.length) {
- this.calibrationMinTimeNs =
- TimeUnit.NANOSECONDS.convert(Long.valueOf(args[argIndex]), TimeUnit.MILLISECONDS);
- } else {
- log.fatal("Require time.");
- }
- } else if (option.equals("noverify")) {
- verify = false;
+ int argIndex = 0;
+ for (argIndex = 0; argIndex < args.length && args[argIndex].startsWith("--"); ++argIndex) {
+ String option = args[argIndex].substring(2);
+ if (option.equals("help")) {
+ System.out.println(helpMessage);
+ System.exit(0);
+ } else if (option.equals("verbose")) {
+ setLogLevel(SimpleLogger.LogLevel.INFO);
+ } else if (option.equals("debug")) {
+ setLogLevel(SimpleLogger.LogLevel.DEBUG);
+ } else if (option.equals("list_benchmarks")) {
+ for (int i = 0; i < BenchmarkList.benchmarkList.length; i++) {
+ System.out.println(BenchmarkList.benchmarkList[i]);
+ }
+ System.exit(0);
+ } else if (option.equals("target_running_time")) {
+ argIndex++;
+ if (argIndex < args.length) {
+ Benchmark.setTargetRunningTime(Long.valueOf(args[argIndex]));
+ } else {
+ log.fatal("Require time.");
+ }
+ } else if (option.equals("calibration_time")) {
+ argIndex++;
+ if (argIndex < args.length) {
+ Benchmark.setCalibrationTime(Long.valueOf(args[argIndex]));
} else {
- log.error("Unknown option `--" + option + "`.");
- System.out.println(helpMessage);
- System.exit(1);
+ log.fatal("Require time.");
}
+ } else if (option.equals("noverify")) {
+ verify = false;
} else {
- benchmarks.add(args[argIndex]);
+ log.error("Unknown option `--" + option + "`.");
+ System.out.println(helpMessage);
+ System.exit(1);
}
}
+ for (; argIndex < args.length; ++argIndex) {
+ benchmarks.add(Benchmark.fromString(args[argIndex]));
+ }
+
if (benchmarks.size() == 0) {
// No benchmarks were specified on the command line. Run all
// benchmarks available.
for (int i = 0; i < BenchmarkList.benchmarkList.length; i++) {
- benchmarks.add(BenchmarkList.benchmarkList[i]);
+ benchmarks.add(Benchmark.fromString(BenchmarkList.benchmarkList[i]));
}
}
// Run the benchmarks.