aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEvgeny Astigeevich <evgeny.astigeevich@linaro.org>2019-06-20 12:02:38 +0100
committerEvgeny Astigeevich <evgeny.astigeevich@linaro.org>2019-08-23 11:50:52 +0000
commit5e5566a0483d6e58b5ac1725f992759a64f3cf85 (patch)
tree4edaadc7ef345d1973789334790e1c64830410c1
parente847a32c7da6d40a012e100af2082ec831745401 (diff)
downloadart-testing-5e5566a0483d6e58b5ac1725f992759a64f3cf85.tar.gz
Introduce Benchmark class
This CL contains: 1. A new class Benchmark which represents a bridge to the actual code doing benchmarking. Benchmark class code is based on the code from RunBench. 2. Benchmark supports 'setup' methods to initialize benchmarks. 3. Benchmark supports 'verify' methods to verify benchmarks. 4. Benchmark does warm-up before measuring a benchmark execution time if it is not disabled by IterationAnnotation.noWarmup. Test: benchmarks_run_target.sh --iterations 10 Change-Id: Ib993595ee8aebd1c8b65b08fc7d9f6af9536aac1
-rw-r--r--README.md47
-rw-r--r--framework/org/linaro/bench/Benchmark.java354
-rw-r--r--framework/org/linaro/bench/IterationsAnnotation.java2
-rw-r--r--framework/org/linaro/bench/RunBench.java149
4 files changed, 404 insertions, 148 deletions
diff --git a/README.md b/README.md
index 50befc3..795b385 100644
--- a/README.md
+++ b/README.md
@@ -158,35 +158,66 @@ existing benchmark. Besides, developers should also notice:
### Rules
-1. Test method names start with "time" -- Test launcher will find all timeXXX()
+1. Init/setup method names start with 'setup' -- All found methods will be
+ used to initialize data needed for benchmarks. As the data is initialized once
+ it must not be changed in "time"/"verify" methods.
+2. Test method names start with "time" -- Test launcher will find all timeXXX()
methods and run them.
-2. Verify methods start with "verify" -- all boolean verifyXXX() methods will
+3. Verify methods start with "verify" -- all boolean verifyXXX() methods will
be run to check the benchmark is working correctly.
`verify` methods should *not* depend on the benchmark having run before it is
called.
-3. Leave iterations as parameter -- Test launcher will fill it with a value
+4. Leave iterations as parameter -- Test launcher will fill it with a value
to make sure it runs in a reasonable duration.
-4. Without auto-calibration benchmarks should run for a reasonable amount of
+5. Without auto-calibration benchmarks should run for a reasonable amount of
time on target. Between 1 and 10 seconds is acceptable.
(`tools/benchmarks/run.py --target --dont-auto-calibrate`)
### Example
public class MyBenchmark {
+ private final static int N = 1000;
+ private int[] a;
public static void main(String [] args) {
MyBenchmark b = new MyBenchmark();
+ b.setupArray();
long before = System.currentTimeMillis();
- b.timeMethod0(1000);
- b.timeMethod1(1000);
+ b.timeSumArray(1000);
+ b.timeTestAdd(1000);
+ b.timeSfib(600);
long after = System.currentTimeMillis();
System.out.println("MyBenchmark: " + (after - before));
}
+ public void setupArray() {
+ a = new int[N];
+ for (int i = 0; i < N; ++i) {
+ a[i] = i;
+ }
+ }
+
+ private int sumArray(int[] a) {
+ int n = a.length;
+ int result = 0;
+ for (int i = 0; i < n; ++i) {
+ result += a[i];
+ }
+ return result;
+ }
+
+ public int timeSumArray(int iters) {
+ int result = 0;
+ for (int i = 0; i < iters; ++i) {
+ result += sumArray(a);
+ }
+ return result;
+ }
+
// +----> test method prefix should be "time..."
// |
// ignored <---+ | +-------> No need to set iterations. Test
- | | | framework will try to fill a
- | | | reasonable value automatically.
+ // | | | framework will try to fill a
+ // | | | reasonable value automatically.
// | | |
public int timeTestAdd(int iters) {
int result = 0;
diff --git a/framework/org/linaro/bench/Benchmark.java b/framework/org/linaro/bench/Benchmark.java
new file mode 100644
index 0000000..d9abc42
--- /dev/null
+++ b/framework/org/linaro/bench/Benchmark.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright 2019 Linaro Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.linaro.bench;
+
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/*
+ * Benchmark is a class that is a bridge to the actual code doing benchmarking.
+ * Benchmark supports code following the format:
+ *
+ * - Instance methods with the prefix 'time' are benchmark code. They
+ * must have one parameter of the type 'int' which means a number of
+ * iterations. A method can have IterationAnnotation which specifies a
+ * number of iterations. If the method does not it, a calibration process
+ * is used to find out a needed number of iterations.
+ *
+ * - Instance methods with the prefix 'setup' are used to initialize
+ * benchmark data.
+ *
+ * - Instance methods with the prefix 'verify' are used to check that
+ * 'time' methods produce correct results.
+ */
+public class Benchmark {
+ private static final String TIME_BENCH_METHOD_PREFIX = "time";
+
+ private static final String SETUP_METHOD_PREFIX = "setup";
+
+ private static final String VERIFY_BENCH_METHOD_PREFIX = "verify";
+
+ private static final int ITERATIONS_LIMIT = 0x400000;
+
+ /*
+ * BenchmarkMethod is a class to work with methods containing benchmarking code.
+ * Those methods run benchmarking code a number of iterations. The number of
+ * iterations can either be provided by the methods via IterationsAnnotation
+ * or be calculated using calibration process.
+ */
+ private static class BenchmarkMethod {
+ private Object parent;
+ private Method method;
+ private int iterationsCount;
+ private String id;
+ private boolean doWarmup;
+
+ public BenchmarkMethod(Object parent, Method method) {
+ this.parent = parent;
+ this.method = method;
+ this.id = benchmarkIdentifier(method);
+ this.doWarmup = true;
+
+ this.iterationsCount = -1;
+ IterationsAnnotation annotation = method.getAnnotation(IterationsAnnotation.class);
+ if (annotation != null) {
+ this.doWarmup = !annotation.noWarmup();
+ if (annotation.iterations() > 0) {
+ this.iterationsCount = annotation.iterations();
+ }
+ }
+ }
+
+ public String getID() {
+ return id;
+ }
+
+ public boolean needsCalibration() {
+ return iterationsCount == -1;
+ }
+
+ public void calibrateIterations(long calibrationMinTimeNs, long targetRunTimeNs) {
+ // Estimate how long it takes to run one iteration.
+ long iterations = 1;
+ long duration = -1;
+ while ((duration < calibrationMinTimeNs) && (iterations < ITERATIONS_LIMIT)) {
+ iterations *= 2;
+ duration = timeIterations((int) iterations);
+ }
+ // Estimate the number of iterations to run based on the calibration
+ // phase, and benchmark the function.
+ double iterTime = duration / (double) iterations;
+ this.iterationsCount = (int) Math.max(1.0, targetRunTimeNs / iterTime);
+ }
+
+ public Result run() {
+ SimpleLogger log = SimpleLogger.getInstance();
+ log.debug("Running method: " + method.toString());
+
+ if (this.doWarmup) {
+ warmup();
+ }
+
+ long duration = timeIterations();
+ log.info(id + ": " + duration + " ns for " + iterationsCount + " iterations");
+
+ return new Result(this, duration, iterationsCount);
+ }
+
+ private long timeIterations() {
+ int iterations = this.iterationsCount;
+ if (needsCalibration()) {
+ SimpleLogger log = SimpleLogger.getInstance();
+ log.error(id + " is not calibrated. The iterations count to be used is 1.");
+ iterations = 1;
+ }
+
+ return timeIterations(iterations);
+ }
+
+ private void warmup() {
+ int iterations = this.iterationsCount / 10;
+ if (iterations == 0) iterations = 1;
+ timeIterations(iterations);
+ }
+
+ private long timeIterations(int iterationsCount) {
+ long start = 0;
+ long end = 0;
+ try {
+ start = System.nanoTime();
+ method.invoke(parent, iterationsCount);
+ end = System.nanoTime();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ return end - start;
+ }
+ }
+
+ /*
+ * Result represents a result of a benchmarking method.
+ */
+ public static class Result {
+ private BenchmarkMethod benchMethod;
+ private long duration;
+ private int iterations;
+
+ private Result(BenchmarkMethod benchMethod, long duration, int iterations) {
+ this.benchMethod = benchMethod;
+ this.duration = duration;
+ this.iterations = iterations;
+ }
+
+ public String toString() {
+ return String.format(
+ "%-40s%.2f ns per iteration", benchMethod.getID() + ": ", duration / (double) iterations);
+ }
+
+ public boolean isBetterThan(Result result) {
+ if (result == null) {
+ return true;
+ }
+ return this.duration / this.iterations < result.duration / result.iterations;
+ }
+ }
+
+ private interface MethodSelector {
+ public boolean accept(Method method);
+ }
+
+ private final static class SetupMethodSelector implements MethodSelector {
+ public boolean accept(Method method) {
+ return method.getName().startsWith(SETUP_METHOD_PREFIX) && method.getParameterCount() == 0;
+ }
+ }
+
+ private static boolean DoesMethodHaveOneIntParam(Method method) {
+ return method.getParameterCount() == 1 && method.getParameterTypes()[0] == int.class;
+ }
+
+ private final static class TimeBenchmarkMethodSelector implements MethodSelector {
+ public boolean accept(Method method) {
+ return method.getName().startsWith(TIME_BENCH_METHOD_PREFIX)
+ && DoesMethodHaveOneIntParam(method);
+ }
+ }
+
+ private final static class ParticularBenchmarkMethodSelector implements MethodSelector {
+ private String particularBenchMethodName;
+
+ public ParticularBenchmarkMethodSelector(String methodName) {
+ particularBenchMethodName = methodName;
+ }
+
+ public boolean accept(Method method) {
+ return method.getName().equals(particularBenchMethodName)
+ && DoesMethodHaveOneIntParam(method);
+ }
+ }
+
+ private final static class VerifyMethodSelector implements MethodSelector {
+ public boolean accept(Method method) {
+ return method.getName().startsWith(VERIFY_BENCH_METHOD_PREFIX)
+ && method.getReturnType() == boolean.class
+ && method.getParameterCount() == 0;
+ }
+ }
+
+ private Object benchInstance;
+ private List<Method> setupMethods;
+ private List<BenchmarkMethod> benchMethods;
+ private List<Method> verifyMethods;
+
+ public Benchmark(String benchName, long calibrationMinTimeNs, long benchmarkTargetRunTimeNs) {
+ if (benchName == null) {
+ throw new NullPointerException("The provided benchmark name is null.");
+ }
+
+ if (benchName.isEmpty()) {
+ throw new IllegalArgumentException("The provided benchmark name is an empty string");
+ }
+
+ // The benchmark name format is:
+ // path/to/BenchmarkClass(.Benchmark)?
+ Pattern format = Pattern.compile("((?:\\w+\\/)*)(\\w+)(?:\\.(\\w+))?$");
+ Matcher matcher = format.matcher(benchName);
+ if (!matcher.matches()) {
+ throw new IllegalArgumentException(
+ "The provided benchmark name has an unexpected format: " + benchName);
+ }
+ String benchmarkClassPath = matcher.group(1);
+ if (!benchmarkClassPath.startsWith("benchmarks/")) {
+ benchmarkClassPath = "benchmarks/" + benchmarkClassPath;
+ }
+ benchmarkClassPath = benchmarkClassPath.replace('/', '.');
+ String benchmarkClass = matcher.group(2);
+ String benchmarkMethodName = matcher.group(3);
+
+ // Each method declared in benchmarkClass is checked whether it is
+ // one of special methods ('time', 'setup', 'verify').
+ // Found methods are stored into corresponding lists.
+ setupMethods = new ArrayList<Method>();
+ benchMethods = new ArrayList<BenchmarkMethod>();
+ verifyMethods = new ArrayList<Method>();
+ try {
+ Class<?> clazz = Class.forName(benchmarkClassPath + benchmarkClass);
+ benchInstance = clazz.newInstance();
+
+ MethodSelector setupMethodsSelector = new SetupMethodSelector();
+ MethodSelector benchMethodsSelector =
+ (benchmarkMethodName != null)
+ ? new ParticularBenchmarkMethodSelector(
+ TIME_BENCH_METHOD_PREFIX + benchmarkMethodName)
+ : new TimeBenchmarkMethodSelector();
+ MethodSelector verifyMethodsSelector = new VerifyMethodSelector();
+
+ for (Method method : clazz.getDeclaredMethods()) {
+ if (setupMethodsSelector.accept(method)) {
+ setupMethods.add(method);
+ } else if (benchMethodsSelector.accept(method)) {
+ benchMethods.add(new BenchmarkMethod(benchInstance, method));
+ } else if (verifyMethodsSelector.accept(method)) {
+ verifyMethods.add(method);
+ }
+ }
+
+ if (benchMethods.isEmpty()) {
+ throw new RuntimeException("No benchmark method in the benchmark: " + benchName);
+ }
+
+ // After all methods are processed a benchmarks is setup. This includes
+ // the iteration calibration process if it is needed.
+ setup(calibrationMinTimeNs, benchmarkTargetRunTimeNs);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to create a benchmark: " + benchName, e);
+ }
+ }
+
+ public Result[] run() {
+ Result[] results = new Result[benchMethods.size()];
+
+ int i = 0;
+ for (BenchmarkMethod method : benchMethods) {
+ results[i++] = method.run();
+ }
+
+ // Sort results by method's name.
+ Arrays.sort(
+ results,
+ new Comparator<Result>() {
+ @Override
+ public int compare(Result r1, Result r2) {
+ return r1.benchMethod.getID().compareTo(r2.benchMethod.getID());
+ }
+ });
+
+ return results;
+ }
+
+ public int verify() {
+ SimpleLogger log = SimpleLogger.getInstance();
+ int verifyFailures = 0;
+ try {
+ for (Method verifyMethod : verifyMethods) {
+ if (!(Boolean) verifyMethod.invoke(benchInstance)) {
+ log.error(verifyMethod.getName() + " failed.");
+ ++verifyFailures;
+ }
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ return verifyFailures;
+ }
+
+ private void setup(long calibrationMinTimeNs, long benchmarkTargetRunTimeNs) {
+ try {
+ for (Method method : setupMethods) {
+ method.invoke(benchInstance);
+ }
+
+ for (BenchmarkMethod method : benchMethods) {
+ if (method.needsCalibration()) {
+ method.calibrateIterations(calibrationMinTimeNs, benchmarkTargetRunTimeNs);
+ }
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static String benchmarkIdentifier(Method method) {
+ Pattern format = Pattern.compile("((?:\\w+\\.)*)(\\w+)");
+ Matcher matcher = format.matcher(method.getDeclaringClass().getName());
+ if (!matcher.matches()) {
+ return null;
+ }
+ String path = matcher.group(1);
+ path = path.replace('.', '/');
+ String className = matcher.group(2);
+ // Filter the "time" prefix.
+ String benchName = method.getName().substring(4);
+ return path + className + "." + benchName;
+ }
+}
diff --git a/framework/org/linaro/bench/IterationsAnnotation.java b/framework/org/linaro/bench/IterationsAnnotation.java
index c717f48..462e161 100644
--- a/framework/org/linaro/bench/IterationsAnnotation.java
+++ b/framework/org/linaro/bench/IterationsAnnotation.java
@@ -22,7 +22,7 @@ import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface IterationsAnnotation {
- // false: need to warm up. Only valid when calibration is needed.
+ // false: need to warm up.
boolean noWarmup() default false;
// <=0: means we need to calibrate, others: no calibration and use this as iteration count
diff --git a/framework/org/linaro/bench/RunBench.java b/framework/org/linaro/bench/RunBench.java
index 8d595bb..11517f0 100644
--- a/framework/org/linaro/bench/RunBench.java
+++ b/framework/org/linaro/bench/RunBench.java
@@ -17,14 +17,9 @@
package org.linaro.bench;
-import java.lang.reflect.Method;
import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
import java.util.List;
import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
public class RunBench {
// Minimum valid calibration time.
@@ -33,10 +28,6 @@ public class RunBench {
// The target benchmark running time.
public static final long DEFAULT_BENCH_TARGET_RUN_TIME_NS =
TimeUnit.NANOSECONDS.convert(400, TimeUnit.MILLISECONDS);
- public static final int ITERATIONS_LIMIT = 0x400000;
-
- // A method with this name will be executed as a microbenchmark.
- public static final String TESTNAME_PREFIX = "time";
private SimpleLogger log;
private long calibrationMinTimeNs;
@@ -53,142 +44,22 @@ public class RunBench {
log.setLogLevel(level);
}
- public IterationsAnnotation getTestProperties(Method method) {
- IterationsAnnotation it = method.getAnnotation(IterationsAnnotation.class);
- return it;
- }
-
- /*
- * Returns duration of given iterations in nano seconds.
- */
- public static long timeIterations(Object object, Method method, int iters) {
- long start = 0;
- long end = 0;
- try {
- start = System.nanoTime();
- method.invoke(object, iters);
- end = System.nanoTime();
- } catch (Exception e) {
- return -1;
- }
- return end - start;
- }
-
- static String benchmarkIdentifier(Method method) {
- Pattern format = Pattern.compile("((?:\\w+\\.)*)(\\w+)");
- Matcher matcher = format.matcher(method.getDeclaringClass().getName());
- if (! matcher.matches()) {
- return null;
- }
- String path = matcher.group(1);
- path = path.replace('.', '/');
- String className = matcher.group(2);
- // Filter the "time" prefix.
- String benchName = method.getName().substring(4);
- return path + className + "." + benchName;
- }
-
- /*
- * Run one benchmark. May have auto-calibration depends on method's IterationsAnnotation.
- */
- public void runOneBench(Object instance, Method method) throws Exception {
- log.debug("Running method: " + method.toString());
-
- IterationsAnnotation anno = getTestProperties(method);
- long iterations;
- long duration = -1;
- double time;
- double iterationTime;
-
- if (anno != null && anno.iterations() > 0) {
- iterations = anno.iterations();
- duration = timeIterations(instance, method, (int) iterations);
- } else {
- // Estimate how long it takes to run one iteration.
- iterations = 1;
- while ((duration < calibrationMinTimeNs) && (iterations < ITERATIONS_LIMIT)) {
- iterations *= 2;
- duration = timeIterations(instance, method, (int) iterations);
- }
- // Estimate the number of iterations to run based on the calibration
- // phase, and benchmark the function.
- double iterTime = duration / (double) iterations;
- iterations = (int) Math.max(1.0, benchmarkTargetRunTimeNs / iterTime);
- duration = timeIterations(instance, method, (int) iterations);
- }
-
- iterationTime = duration / (float) iterations;
-
- log.info(benchmarkIdentifier(method) + ": "
- + duration + " ns for " + iterations + " iterations");
- // The runner expects each output line to end with "per iteration"
- System.out.printf("%-40s%.2f ns per iteration\n",
- benchmarkIdentifier(method) + ":", iterationTime);
- }
-
public int runBenchSet(String target, boolean verify) {
if (target == null) {
return 1;
}
- // The target format is:
- // path/to/BenchmarkClass(.Benchmark)?
- Pattern format = Pattern.compile("((?:\\w+\\/)*)(\\w+)(?:\\.(\\w+))?$");
- Matcher matcher = format.matcher(target);
- if (! matcher.matches()) {
- return 1;
- }
- String benchmarkClassPath = matcher.group(1);
- if (!benchmarkClassPath.startsWith("benchmarks/")) {
- benchmarkClassPath = "benchmarks/" + benchmarkClassPath;
- }
- benchmarkClassPath = benchmarkClassPath.replace('/', '.');
- String benchmarkClass = matcher.group(2);
- String benchmark = matcher.group(3);
-
- List<Method> benchMethods = new ArrayList<Method>(5);
- List<Method> verifyMethods = new ArrayList<Method>(2);
try {
- Class<?> clazz = Class.forName(benchmarkClassPath + benchmarkClass);
- Object instance = clazz.newInstance();
- if (benchmark != null) {
- Method method = clazz.getMethod(TESTNAME_PREFIX + benchmark, int.class);
- benchMethods.add(method);
- } else {
- for (Method method : clazz.getDeclaredMethods()) {
- if (method.getName().startsWith(TESTNAME_PREFIX)) {
- benchMethods.add(method);
- } else if (method.getName().startsWith("verify") &&
- method.getReturnType() == boolean.class) {
- verifyMethods.add(method);
- }
- }
- }
- // Sort benchMethods by name.
- Collections.sort(benchMethods, new Comparator<Method>() {
- @Override
- public int compare(Method m1, Method m2) {
- return m1.getName().compareTo(m2.getName());
- }
- });
-
- for (Method method : benchMethods) {
- // Run each method as a benchmark.
- runOneBench(instance, method);
- }
-
- // Optionally run all verify* methods to check benchmark's work.
- if (verify) {
- int verifyFailures = 0;
- for (Method verifyMethod : verifyMethods) {
- if (!(Boolean)verifyMethod.invoke(instance)) {
- log.error(verifyMethod.getName() + " failed.");
- verifyFailures++;
- }
- }
- if (verifyFailures > 0) {
- return 1;
- }
+ Benchmark benchmark = new Benchmark(target, calibrationMinTimeNs,
+ benchmarkTargetRunTimeNs);
+ Benchmark.Result[] results = benchmark.run();
+ int verifyFailures = 0;
+ if (verify)
+ verifyFailures = benchmark.verify();
+ for (Benchmark.Result result : results)
+ System.out.println(result.toString());
+ if (verifyFailures > 0) {
+ return 1;
}
} catch (Exception e) {
// TODO: filter exceptions.