aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crosperf/experiment_files/README33
-rw-r--r--crosperf/experiment_files/aes_perf7
-rw-r--r--crosperf/experiment_files/aes_perf.exp21
-rw-r--r--crosperf/experiment_files/bloat_perf6
-rw-r--r--crosperf/experiment_files/bloat_perf.exp25
-rw-r--r--crosperf/experiment_files/morejs_perf6
-rw-r--r--crosperf/experiment_files/morejs_perf.exp25
-rw-r--r--crosperf/experiment_files/non-telemetry-tests.exp31
-rw-r--r--crosperf/experiment_files/official-image.exp40
-rw-r--r--crosperf/experiment_files/page_cycler6
-rw-r--r--crosperf/experiment_files/page_cycler.exp28
-rw-r--r--crosperf/experiment_files/page_cycler_perf43
-rw-r--r--crosperf/experiment_files/page_cycler_perf.exp45
-rw-r--r--crosperf/experiment_files/telemetry-crosperf-suites.exp45
-rw-r--r--crosperf/experiment_files/telemetry-crosperf-with-external-chrome-src.exp31
-rw-r--r--crosperf/experiment_files/telemetry-crosperf-with-profiler.exp43
-rw-r--r--crosperf/experiment_files/telemetry-crosperf.exp30
-rw-r--r--crosperf/experiment_files/telemetry-pure.exp28
-rw-r--r--crosperf/experiment_files/telemetry-without-autotest.exp31
-rw-r--r--crosperf/experiment_files/test_that.exp40
-rw-r--r--crosperf/experiment_files/toolchain16
-rw-r--r--crosperf/experiment_files/trybot-image.exp33
22 files changed, 349 insertions, 264 deletions
diff --git a/crosperf/experiment_files/README b/crosperf/experiment_files/README
index 5c0e3d41..60073492 100644
--- a/crosperf/experiment_files/README
+++ b/crosperf/experiment_files/README
@@ -1,26 +1,33 @@
-To use these experiment files, add board, remote and images and run crosperf
-on them.
+To use these experiment files, replace the board, remote and images
+placeholders and run crosperf on them.
Further information about crosperf:
-https://sites.google.com/a/google.com/compiler-chromeos-workflows/crosperf
+https://sites.google.com/a/google.com/chromeos-toolchain-team-home2/home/team-tools-and-scripts/crosperf-cros-image-performance-comparison-tool
-The final experiment file should look something like the following:
+The final experiment file should look something like the following (but with
+different actual values for the fields):
-board: <board>
-remote: <ip address or machine name>
+board: lumpy
+remote: 123.45.67.089
# Add images you want to test:
-label: myimage {
- chromeos_image: <path to image>
+my_image {
+ chromeos_image: /usr/local/chromeos/src/build/images/lumpy/chromiumos_test_image.bin
}
-# Paste experiment benchmarks here. Example, I pasted aes_perf here.
+vanilla_image {
+ chromeos_root: /usr/local/chromeos
+ build: lumpy-release/R35-5672.0.0
+}
+
+# Paste experiment benchmarks here. Example, I pasted page_cyler.morejs here.
# This experiment just runs a short autotest which measures the performance of
-# aes encryption. In addition, it profiles
+# Telemetry's page_cycler.morejs. In addition, it profiles
-profile_type: record
-profile_counters: instructions cycles
+perg_args: record -e cycles
-benchmark: platform_AesThroughput {
+benchmark: page_cycler.morejs {
+ suite: telemetry_Crosperf
+ iterations: 1
}
diff --git a/crosperf/experiment_files/aes_perf b/crosperf/experiment_files/aes_perf
deleted file mode 100644
index 0c54ccbd..00000000
--- a/crosperf/experiment_files/aes_perf
+++ /dev/null
@@ -1,7 +0,0 @@
-# This experiment just runs a short autotest which measures the performance of
-# aes encryption. In addition, it profiles
-
-profile_args: record -e cycles -e instructions
-
-benchmark: platform_AesThroughput {
-}
diff --git a/crosperf/experiment_files/aes_perf.exp b/crosperf/experiment_files/aes_perf.exp
new file mode 100644
index 00000000..063c74be
--- /dev/null
+++ b/crosperf/experiment_files/aes_perf.exp
@@ -0,0 +1,21 @@
+# This experiment just runs a short autotest which measures the performance of
+# aes encryption.
+#
+# You should replace all the placeholders, marked by angle-brackets, with the
+# appropriate actual values.
+
+name: aes_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+benchmark: platform_AesThroughput {
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/bloat_perf b/crosperf/experiment_files/bloat_perf
deleted file mode 100644
index f8258ee1..00000000
--- a/crosperf/experiment_files/bloat_perf
+++ /dev/null
@@ -1,6 +0,0 @@
-perf_args: record -e cycles
-
-benchmark: bloat {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=1 perf.PageCyclerTest.testBloatFile'
-}
diff --git a/crosperf/experiment_files/bloat_perf.exp b/crosperf/experiment_files/bloat_perf.exp
new file mode 100644
index 00000000..5fa464a0
--- /dev/null
+++ b/crosperf/experiment_files/bloat_perf.exp
@@ -0,0 +1,25 @@
+# This experiment just runs a short telemety autotest which measures
+# the performance of the page_cycler.bloat test.
+#
+# You should replace all the placeholders, marked by angle-brackets, with the
+# appropriate actual values.
+
+name: bloat_perf_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+perf_args: record -e cycles
+
+benchmark: page_cycler.bloat {
+ suite: telemetry_Crosperf
+ iterations:1
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/morejs_perf b/crosperf/experiment_files/morejs_perf
deleted file mode 100644
index a02f15f5..00000000
--- a/crosperf/experiment_files/morejs_perf
+++ /dev/null
@@ -1,6 +0,0 @@
-perf_args: record -e cycles
-
-benchmark: morejs {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=1 perf.PageCyclerTest.testMoreJSFile'
-}
diff --git a/crosperf/experiment_files/morejs_perf.exp b/crosperf/experiment_files/morejs_perf.exp
new file mode 100644
index 00000000..79005d6b
--- /dev/null
+++ b/crosperf/experiment_files/morejs_perf.exp
@@ -0,0 +1,25 @@
+# This experiment just runs a short telemety autotest which measures
+# the performance of the page_cycler.morejs test.
+#
+# You should replace all the placeholders, marked by angle-brackets, with the
+# appropriate actual values.
+
+name: morejs_perf_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+perf_args: record -e cycles
+
+benchmark: page_cycler.morejs {
+ suite: telemetry_Crosperf
+ iterations: 1
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/non-telemetry-tests.exp b/crosperf/experiment_files/non-telemetry-tests.exp
new file mode 100644
index 00000000..0ad1fe5c
--- /dev/null
+++ b/crosperf/experiment_files/non-telemetry-tests.exp
@@ -0,0 +1,31 @@
+# This example experiment file showa how to run some basic non-Telemetry
+# autotest tests.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
+name: non_telemetry_tests_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+benchmark: BootPerfServer {
+ test_name: BootPerfServer
+ iterations: 1
+}
+
+benchmark: bvt {
+ test_name: suite:bvt
+}
+
+benchmark: login_LoginSuccess {
+ test_name: login_LoginSuccess
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/official-image.exp b/crosperf/experiment_files/official-image.exp
index 72be02bc..bce7d6a3 100644
--- a/crosperf/experiment_files/official-image.exp
+++ b/crosperf/experiment_files/official-image.exp
@@ -1,14 +1,18 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a basic test, using a (previously made) trybot image.
+# This example experiment file shows how to run a basic test, using
+# official images.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
+name: official_image_example
-name: trybot_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
-# You can replace 'canvasmark' below with the name of the Telemetry
-# benchmakr you want to run.
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
benchmark: canvasmark {
suite:telemetry_Crosperf
iterations: 1
@@ -17,15 +21,21 @@ benchmark: canvasmark {
# Replace <path-to-your-chroot-goes-here> with the actual directory path
# to the top of your ChromimumOS chroot.
-trybot_image {
+first_official_image {
chromeos_root:<path-to-your-chroot-goes-here>
- # Replace <xbuddy-official-image-designation> with the xbuddy syntax
+ # Replace "latest-official" with the appropriate xbuddy version alias
# for the official image you want to use (see
# http://www.chromium.org/chromium-os/how-tos-and-troubleshooting/using-the-dev-server/xbuddy-for-devserver#TOC-XBuddy-Paths
- # for xbuddy syntax). Omit the "http://xbuddy/remote/<board>/" prefix.
- # For example, if you want to use the "latest-dev" official image,
- # your build field would look like:
- # build:latest-dev
- build:<xbuddy-official-image-designation>
+ # for xbuddy syntax).
+ build: latest-official
+}
+
+second_official_image {
+ # Replace <path-to-your-chroot-goes-here> with actual path.
+ chromeos_root:<path-to-your-chroot-goes-here>
+ # Replace "lumpy-release/R35-5672.0.0" with the official image you want
+ # to use.
+ build:lumpy-release/R35-5672.0.0
}
+
diff --git a/crosperf/experiment_files/page_cycler b/crosperf/experiment_files/page_cycler
deleted file mode 100644
index ada9ed67..00000000
--- a/crosperf/experiment_files/page_cycler
+++ /dev/null
@@ -1,6 +0,0 @@
-# This experiment runs page cycler tests.
-
-benchmark: AllPageCyclers {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='perf.PageCyclerTest'
-}
diff --git a/crosperf/experiment_files/page_cycler.exp b/crosperf/experiment_files/page_cycler.exp
new file mode 100644
index 00000000..6cb6166d
--- /dev/null
+++ b/crosperf/experiment_files/page_cycler.exp
@@ -0,0 +1,28 @@
+# This experiment file shows how to run all of the Telemetry
+# page_cycler tests.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
+name: all_page_cyclers_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in
+# parallel on multiple machines. e.g. "remote: test-machine-1.com
+# test-machine2.come test-machine3.com"
+
+remote: <your-remote-goes-here>
+
+
+# NOTE: all_pagecyclers is a Crosperf alias that will cause all of the
+# Telemetry page_cycler benchmark tests to be run.
+benchmark: all_pagecyclers {
+ suite: telemetry_Crosperf
+ iterations: 2
+}
+
+# Replace the chromeos image below with the actual path to your test
+# image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/page_cycler_perf b/crosperf/experiment_files/page_cycler_perf
deleted file mode 100644
index 7f5e7118..00000000
--- a/crosperf/experiment_files/page_cycler_perf
+++ /dev/null
@@ -1,43 +0,0 @@
-# This experiment profiles all page cyclers.
-
-perf_args: record -e cycles
-
-benchmark: morejs {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testMoreJSFile'
-}
-
-benchmark: alexa {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testAlexaFile'
-}
-
-benchmark: bloat {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testBloatFile'
-}
-
-benchmark: dhtml {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testDHTMLFile'
-}
-
-benchmark: intl1 {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testIntl1File'
-}
-
-benchmark: intl2 {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testIntl2File'
-}
-
-benchmark: moz {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testMozFile'
-}
-
-benchmark: moz2 {
- autotest_name: desktopui_PyAutoPerfTests
- autotest_args: --args='--iterations=10 perf.PageCyclerTest.testMoz2File'
-}
diff --git a/crosperf/experiment_files/page_cycler_perf.exp b/crosperf/experiment_files/page_cycler_perf.exp
new file mode 100644
index 00000000..f3f962b3
--- /dev/null
+++ b/crosperf/experiment_files/page_cycler_perf.exp
@@ -0,0 +1,45 @@
+# This experiment profiles some of the Telemetry page cycler tests,
+# uisng 'perf' on the remotes to get performance profiles.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
+name: aes_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+perf_args: record -e cycles,instructions
+
+benchmark: page_cycler.morejs {
+ suite: telemetry_Crosperf
+ iterations: 10
+}
+
+benchmark: page_cycler.bloat {
+ suite: telemetry_Crosperf
+ iterations: 10
+}
+
+benchmark: page_cycler.dhtml {
+ suite: telemetry_Crosperf
+ iterations: 10
+}
+
+benchmark: page_cycler.intl_ar_fa_he {
+ suite: telemetry_Crosperf
+ iterations: 10
+}
+
+benchmark: page_cycler.moz {
+ suite: telemetry_Crosperf
+ iterations: 10
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/telemetry-crosperf-suites.exp b/crosperf/experiment_files/telemetry-crosperf-suites.exp
index cb1a258a..2caa588d 100644
--- a/crosperf/experiment_files/telemetry-crosperf-suites.exp
+++ b/crosperf/experiment_files/telemetry-crosperf-suites.exp
@@ -1,15 +1,19 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a Telemetry test, using test_that and autotest. This is similar to
-# the basic_telemetry_crosperf_example (in telemetry-crosperf.exp),
-# except that it shows how to invoke suites of tests. There are
-# currently two suites defined for crosperf_Telemetry: all_perfv2 and
-# all_pagecyclers.
+# This example experiment file shows how to invoke sets of tests (a
+# set is a group of tests that can be invoked by a single alias).
+# There are currently three sets defined for crosperf_Telemetry:
+# all_perfv2, all_pagecyclers, and all_toolchain_perf.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
name: telemetry_crosperf_suites_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
# The example below will run all the benchmarks in the perf_v2 suite.
# The exact list of benchmarks that will be run can be seen in
@@ -27,18 +31,21 @@ benchmark: all_pagecyclers {
iterations: 1
}
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-# You can optionally add "chrome_src:" followed by the path to a Chrome
-# source tree outside your chroot that you wish to use for running
-# Telemetry.
-old_image {
- chromeos_image:<path-to-your-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
+# The example below will run all the Telemetry page_cycler benchmarks.
+# The exact list of benchmarks that will be run can be seen in
+# crosperf/experiment_factory.py
+benchmark: all_toolchain_perf {
+ suite:telemetry_Crosperf
+ iterations: 1
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image_1 {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
}
-# Replace <path-to-your-other-chroot-goes-here> and <board-goes-here> below.
-# You can optionally add "chrome_src:" followed by the path to a Chrome
-# source tree outside your chroot that you wish to use for running
-# Telemetry.
+# Replace the chromeos image below with the actual path to your second
+# test image (if desired).
new_image {
chromeos_image:<path-to-your-other-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
}
diff --git a/crosperf/experiment_files/telemetry-crosperf-with-external-chrome-src.exp b/crosperf/experiment_files/telemetry-crosperf-with-external-chrome-src.exp
index 35f7a4ec..551fac67 100644
--- a/crosperf/experiment_files/telemetry-crosperf-with-external-chrome-src.exp
+++ b/crosperf/experiment_files/telemetry-crosperf-with-external-chrome-src.exp
@@ -1,26 +1,27 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a Telemetry test, using test_that and autotest. This is similar to
-# the basic_telemetry_crosperf_example (in telemetry-crosperf.exp),
-# except that it shows how to specify an external chrome source tree
-# (rather than using the one inside the chroot).
+# This example experiment file showings how to specify an external
+# chrome source tree (rather than using the one inside the chroot).
+# The Telemetry tests will be run from the external Chrome source
+# tree.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
name: telemetry_crosperf_external_src_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
+
board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
-# Replace "octane" below with the name of the Telemetry benchmark you
-# want to run.
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
benchmark: octane {
suite: telemetry_Crosperf
iterations: 1
}
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-# Also replace <path-to-chrome-source-root>.
-vanilla_image {
- chromeos_image:<path-to-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
- chrome_src:<path-to-chrome-source-root>/chrome-src-internal
+# Replace the chromeos image below with the actual path to your test imnage.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
}
diff --git a/crosperf/experiment_files/telemetry-crosperf-with-profiler.exp b/crosperf/experiment_files/telemetry-crosperf-with-profiler.exp
index 3b90ae14..3bc39d42 100644
--- a/crosperf/experiment_files/telemetry-crosperf-with-profiler.exp
+++ b/crosperf/experiment_files/telemetry-crosperf-with-profiler.exp
@@ -1,36 +1,35 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a Telemetry test, using test_that and autotest. This is similar to
-# the basic_telemetry_crosperf_example (in telemetry-crosperf.exp),
-# except that it shows how to invoke the profiler (via the perf_args
-# above the benchmark).
+# This example experiment file shows how to invoke the profiler (via
+# the perf_args above the benchmark).
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
name: telemetry_crosperf_profiler_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
+
board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
# Below is the line that causes the profiler to run. Currently the
-# only profiler option is running 'perf' on the DUT. If you want you
-# can replace 'record' with 'stat', to get 'perf' on the DUT to do
-# something different. You would also need to change the other args
-# accordingly. Crosperf automatically inserts a '-a' if you use
-# 'record' for you perf_args. The results of the perf run (perf.data
-# file) will be put in the normal test_that/autotest output directory.
+# only profiler option is running 'perf' on the remote machine. If
+# you want you can replace 'record' with 'stat'. You would also need
+# to change the other args accordingly. Crosperf automatically
+# inserts a '-a' if you use 'record' for you perf_args. The results
+# of the perf run (perf.data and perf.report files) will be available
+# with the rest of the Crosperf results.
perf_args: record -e cycles,instructions
-# Replace "page_cycler.dhtml" with whatever benchmark you want to
-# profile.
benchmark: page_cycler.dhtml {
suite: telemetry_Crosperf
iterations: 1
}
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-# You can optionally add "chrome_src:" followed by the path to a Chrome
-# source tree outside your chroot that you wish to use for running
-# Telemetry.
-vanilla_image {
- chromeos_image:<path-to-your-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
+# Replace the chromeos image below with the actual path to your test imnage.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
}
diff --git a/crosperf/experiment_files/telemetry-crosperf.exp b/crosperf/experiment_files/telemetry-crosperf.exp
index c52d0842..111001d4 100644
--- a/crosperf/experiment_files/telemetry-crosperf.exp
+++ b/crosperf/experiment_files/telemetry-crosperf.exp
@@ -1,14 +1,17 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a Telemetry test, using test_that and autotest. This runs the
-# "run_benchmark" script directly (via the telemetry_Crosperf autotest).
-# You do not need to supply both the page_set, as that is automatically
-# supplied by run_benchmark.
+# This example experiment file shows how to run a Telemetry test,
+# using autotest (via "suite: telemetry_Crosperf"). This runs the
+# Telemetry's "run_benchmark" for the specified test.
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
name: basic_telemetry_crosperf_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
# Replace "octane" below with the name of the Telemetry benchmark you
# want to run.
@@ -17,10 +20,13 @@ benchmark: octane {
iterations: 1
}
+# NOTE: You must specify at least one image; you may specify more than one.
# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-# You can optionally add "chrome_src:" followed by the path to a Chrome
-# source tree outside your chroot that you wish to use for running
-# Telemetry.
vanilla_image {
- chromeos_image:<path-to-your-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/vanilla-image/chromiumos_test_image.bin
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
}
diff --git a/crosperf/experiment_files/telemetry-pure.exp b/crosperf/experiment_files/telemetry-pure.exp
deleted file mode 100644
index 92dcae57..00000000
--- a/crosperf/experiment_files/telemetry-pure.exp
+++ /dev/null
@@ -1,28 +0,0 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a "pure" Telemetry test, i.e. bypassing run_remote_tests, test_that
-# and autotest. This runs the "run_measurement" script directly. You
-# need to supply both the name of the Telemetry test and the page_set
-# (via the test_args argument).
-
-name: pure_telemetry_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
-board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
-
-# Replace "page_cycler_dhtml" below with the name of the Telemetry test
-# that you wnat run_measurement to run. Also replace the page set below
-# (in the test_args field) with the appropriate page set for your test.
-benchmark: page_cycler_dhtml {
- suite: telemetry
- iterations: 1
- test_args: ./page_sets/page_cycler/dhtml.json
-}
-
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-# You can optionally add "chrome_src:" followed by the path to a Chrome
-# source tree outside your chroot that you wish to use for running
-# Telemetry.
-vanilla_image {
- chromeos_image:<path-to-your-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
-}
diff --git a/crosperf/experiment_files/telemetry-without-autotest.exp b/crosperf/experiment_files/telemetry-without-autotest.exp
new file mode 100644
index 00000000..ce3f207e
--- /dev/null
+++ b/crosperf/experiment_files/telemetry-without-autotest.exp
@@ -0,0 +1,31 @@
+# This example experiment file shows how to run a Telemetry test
+# directly, bypassing autotest. This runs the "run_measurement"
+# script. You need to supply both the name of the Telemetry test and
+# the page_set (via the test_args argument).
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
+
+name: telemetry_without_autotest_example
+board: <your-board-goes-here>
+
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+# Replace "page_cycler_dhtml" below with the name of the Telemetry test
+# that you want run_measurement to run. Also replace the page set below
+# (in the test_args field) with the appropriate page set for your test.
+# N.B. The key to running telemetry without autotest is the 'suite' field.
+# Make sure your suite is 'telemtry', NOT 'telemetry_Crosperf'.
+benchmark: page_cycler_dhtml {
+ suite: telemetry
+ iterations: 1
+ test_args: ./page_sets/page_cycler/dhtml.json
+}
+
+# Replace the chromeos image below with the actual path to your test image.
+test_image {
+ chromeos_image:<path-to-your-chroot>/src/build/images/<board>/test-image/chromiumos_test_image.bin
+}
diff --git a/crosperf/experiment_files/test_that.exp b/crosperf/experiment_files/test_that.exp
deleted file mode 100644
index 176252e2..00000000
--- a/crosperf/experiment_files/test_that.exp
+++ /dev/null
@@ -1,40 +0,0 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a basic test, using test_that and autotest. You can explicitly invoke
-# test_that (or not) for a particular benchmark, using the use_test_that
-# field, as shown below. Alternatively, you can pass --use_test_that=True
-# or --use_test_that=False to the command line args you pass to crosperf.
-
-name: test_that_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
-board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
-
-# Replace "BootPerfServer" below with the name of the Telemetry benchmark you
-# want to run.
-benchmark: BootPerfServer {
- test_name: BootPerfServer
- iterations: 1
- use_test_that: True
-}
-
-# Replace "bvt" below with the name of the test you want to run.
-benchmark: bvt {
- test_name: suite:bvt
- use_test_that: True
-}
-
-# Replace "login_LoginSuccess" below with the name of the test you
-# want to run.
-benchmark: login_LoginSuccess {
- test_name: login_LoginSuccess
- use_test_that: True
-}
-
-# Replace <path-to-your-chroot-goes-here> and <board-goes-here> below.
-# You can optionally add "chrome_src:" followed by the path to a Chrome
-# source tree outside your chroot that you wish to use for running
-# Telemetry.
-vanilla_image {
- chromeos_image:<path-to-your-chroot-goes-here>/src/build/images/<board-goes-here>/latest/chromiumos_test_image.bin
-}
diff --git a/crosperf/experiment_files/toolchain b/crosperf/experiment_files/toolchain
deleted file mode 100644
index 9156998b..00000000
--- a/crosperf/experiment_files/toolchain
+++ /dev/null
@@ -1,16 +0,0 @@
-# Use this experiment whenever the toolchain is upgraded.
-
-benchmark: bvt {
- autotest_name: suite:bvt
-}
-
-benchmark: suite_Smoke {
- autotest_name: suite:smoke
-}
-
-benchmark: PyAutoPerfTests {
-}
-
-benchmark: BootPerfServer {
- autotest_name: ^server/site_tests/platform_BootPerfServer/control$
-}
diff --git a/crosperf/experiment_files/trybot-image.exp b/crosperf/experiment_files/trybot-image.exp
index d80f4187..a261e08c 100644
--- a/crosperf/experiment_files/trybot-image.exp
+++ b/crosperf/experiment_files/trybot-image.exp
@@ -1,14 +1,19 @@
-# This is an example experiment file for Crosperf, showing how to run
-# a basic test, using a (previously made) trybot image.
+# This example experiment shows how to run a basic test, using a
+# (previously made) trybot image.
+
+#
+# You should replace all the placeholders, marked by angle-brackets,
+# with the appropriate actual values.
name: trybot_example
-# Replace board and remote values below appropriately. e.g. "lumpy" and
-# "123.45.678.901" or "my-machine.blah.com".
board: <your-board-goes-here>
-remote: <your-remote-ip-address-here>
-# You can replace 'canvasmark' below with the name of the Telemetry
-# benchmakr you want to run.
+# Note: You can specify multiple remotes, to run your tests in parallel on
+# multiple machines. e.g. "remote: test-machine-1.com test-machine2.come
+# test-machine3.com"
+remote: <your-remote-goes-here>
+
+
benchmark: canvasmark {
suite:telemetry_Crosperf
iterations: 1
@@ -19,14 +24,10 @@ benchmark: canvasmark {
# to the top of your ChromimumOS chroot.
trybot_image {
chromeos_root:<path-to-your-chroot-goes-here>
- # Replace <trybot-image-name> with the actual name of the trybot image
- # that you wish to use. You can find this by going to the trybot build
- # log, going # to the 'Report' stage, and looking for "Build Artifacts'
- # at the bottom. You will see something like:
- # 'lumpy: https://storage.cloud.google.com/chromeos-image-archive/trybot-lumpy-paladin/R34-5393.0.0-b1504/index.html'
- # From that you can extract the trybot image name and put it in the build
- # field:
- # build:trybot-lumpy-paladin/R34-5417.0.0-b1506
- build:<trybot-image-name>
+ # Replace "trybot-lumpy-paladin/R34-5417.0.0-b1506" with the name of the
+ # trybot image that you wish to use. You can find this by going to the
+ # trybot build log, going to the 'Report' stage, and looking for 'Build
+ # Artifacts' at the bottom. You can extract the trybot image name from that.
+ build:trybot-lumpy-paladin/R34-5417.0.0-b1506
}