aboutsummaryrefslogtreecommitdiff
path: root/crosperf/experiment_factory.py
diff options
context:
space:
mode:
authorTing-Yuan Huang <laszio@google.com>2016-12-09 11:04:46 -0800
committerchrome-bot <chrome-bot@chromium.org>2016-12-09 15:18:17 -0800
commit7a6bb17630ea86f12c1045c2c47339b02e79bdc2 (patch)
tree1cd0fabc0fc8fdd7c49fd537b1a64031eac2ac1f /crosperf/experiment_factory.py
parentc0194f9187813b63b31ad9e54649ee259c2231e5 (diff)
downloadtoolchain-utils-7a6bb17630ea86f12c1045c2c47339b02e79bdc2.tar.gz
crosperf: Add all_graphics_perf and all_crosbolt_perf
BUG=none TEST=Run them on samus. Change-Id: I2d66351d80ce87f84924fe3bf55abf903fc54829 Reviewed-on: https://chrome-internal-review.googlesource.com/311196 Commit-Ready: Ting-Yuan Huang <laszio@google.com> Tested-by: Ting-Yuan Huang <laszio@google.com> Reviewed-by: Ting-Yuan Huang <laszio@google.com>
Diffstat (limited to 'crosperf/experiment_factory.py')
-rw-r--r--crosperf/experiment_factory.py73
1 files changed, 58 insertions, 15 deletions
diff --git a/crosperf/experiment_factory.py b/crosperf/experiment_factory.py
index 0fdaea03..05a78d65 100644
--- a/crosperf/experiment_factory.py
+++ b/crosperf/experiment_factory.py
@@ -54,8 +54,35 @@ telemetry_toolchain_perf_tests = [
'dromaeo.domcoremodify',
'smoothness.tough_webgl_cases',
]
-
-# 'page_cycler_v2.typical_25']
+graphics_perf_tests = [
+ 'graphics_GLBench',
+ 'graphics_GLMark2',
+ 'graphics_SanAngeles',
+ 'graphics_WebGLAquarium',
+ 'graphics_WebGLPerformance',
+]
+telemetry_crosbolt_perf_tests = [
+ 'octane',
+ 'kraken',
+ 'speedometer',
+ 'jetstream',
+ 'startup.cold.blank_page',
+ 'smoothness.top_25_smooth',
+]
+crosbolt_perf_tests = [
+ 'graphics_WebGLAquarium',
+ 'video_PlaybackPerf.h264',
+ 'video_PlaybackPerf.vp9',
+ 'video_WebRtcPerf',
+ 'BootPerfServerCrosPerf',
+ 'power_Resume',
+ 'video_PlaybackPerf.h264',
+ 'build_RootFilesystemSize',
+ 'cheets_AntutuTest',
+ 'cheets_PerfBootServer',
+ 'cheets_CandyCrushTest',
+ 'cheets_LinpackTest',
+]
class ExperimentFactory(object):
@@ -182,19 +209,35 @@ class ExperimentFactory(object):
show_all_results, retries, run_local)
benchmarks.append(benchmark)
else:
- # Add the single benchmark.
- benchmark = Benchmark(
- benchmark_name,
- test_name,
- test_args,
- iterations,
- rm_chroot_tmp,
- perf_args,
- suite,
- show_all_results,
- retries,
- run_local=False)
- benchmarks.append(benchmark)
+ if test_name == 'all_graphics_perf':
+ self.AppendBenchmarkSet(benchmarks,
+ graphics_perf_tests, '',
+ iterations, rm_chroot_tmp, perf_args, '',
+ show_all_results, retries, run_local=False)
+ elif test_name == 'all_crosbolt_perf':
+ self.AppendBenchmarkSet(benchmarks,
+ telemetry_crosbolt_perf_tests, test_args,
+ iterations, rm_chroot_tmp, perf_args,
+ 'telemetry_Crosperf', show_all_results,
+ retries, run_local)
+ self.AppendBenchmarkSet(benchmarks,
+ crosbolt_perf_tests, '',
+ iterations, rm_chroot_tmp, perf_args, '',
+ show_all_results, retries, run_local=False)
+ else:
+ # Add the single benchmark.
+ benchmark = Benchmark(
+ benchmark_name,
+ test_name,
+ test_args,
+ iterations,
+ rm_chroot_tmp,
+ perf_args,
+ suite,
+ show_all_results,
+ retries,
+ run_local=False)
+ benchmarks.append(benchmark)
if not benchmarks:
raise RuntimeError('No benchmarks specified')