diff options
Diffstat (limited to 'crosperf/help.py')
-rw-r--r-- | crosperf/help.py | 79 |
1 files changed, 42 insertions, 37 deletions
diff --git a/crosperf/help.py b/crosperf/help.py index cf74d93e..09a6c66c 100644 --- a/crosperf/help.py +++ b/crosperf/help.py @@ -11,7 +11,7 @@ from settings_factory import LabelSettings class Help(object): def GetUsage(self): - return """%s [OPTIONS] [ACTION] EXPERIMENT_FILE""" % (sys.argv[0]) + return """%s [OPTIONS] EXPERIMENT_FILE""" % (sys.argv[0]) def _WrapLine(self, line): return "\n".join(textwrap.wrap(line, 80)) @@ -34,21 +34,23 @@ class Help(object): benchmark_fields = self._GetFieldDescriptions(BenchmarkSettings("").fields) label_fields = self._GetFieldDescriptions(LabelSettings("").fields) - return """%s is a script for running performance experiments on ChromeOS. It -allows one to run ChromeOS Autotest benchmarks over several images and compare -the results to determine whether there is a performance difference. + return """%s is a script for running performance experiments on +ChromeOS. It allows one to run ChromeOS Autotest benchmarks over +several images and compare the results to determine whether there +is a performance difference. Comparing several images using %s is referred to as running an -"experiment". An "experiment file" is a configuration file which holds all the -information that describes the experiment and how it should be run. An example -of a simple experiment file is below: +"experiment". An "experiment file" is a configuration file which holds +all the information that describes the experiment and how it should be +run. An example of a simple experiment file is below: --------------------------------- test.exp --------------------------------- name: my_experiment board: x86-alex -remote: chromeos-alex5 172.18.122.132 +remote: chromeos2-row1-rack4-host7.cros 172.18.122.132 -benchmark: PageCycler { +benchmark: page_cycler.morejs { + suite: telemetry_Crosperf iterations: 3 } @@ -61,20 +63,26 @@ my_second_image { } ---------------------------------------------------------------------------- -This experiment file names the experiment "my_experiment". It will be run -on the board x86-alex. Benchmarks will be run using two remote devices, -one is a device specified by a hostname and the other is a device specified -by it's IP address. Benchmarks will be run in parallel across these devices. -There is currently no way to specify which benchmark will run on each device. - -We define one "benchmark" that will be run, PageCycler. This benchmark has one -"field" which specifies how many iterations it will run for. - -We specify 2 "labels" or images which will be compared. The PageCycler benchmark -will be run on each of these images 3 times and a result table will be output -which compares the two. - -The full list of fields that can be specified are as follows: +This experiment file names the experiment "my_experiment". It will be +run on the board x86-alex. Benchmarks will be run using two remote +devices, one is a device specified by a hostname and the other is a +device specified by it's IP address. Benchmarks will be run in +parallel across these devices. There is currently no way to specify +which benchmark will run on each device. + +We define one "benchmark" that will be run, page_cycler.morejs. This +benchmark has two "fields", one which specifies that this benchmark is +part of the telemetry_Crosperf suite (this is the common way to run +most Telemetry benchmarks), and the other which specifies how many +iterations it will run for. + +We specify one or more "labels" or images which will be compared. The +page_cycler.morejs benchmark will be run on each of these images 3 +times and a result table will be output which compares them for all +the images specified. + +The full list of fields that can be specified in the experiment file +are as follows: ================= Global Fields ================= @@ -88,19 +96,16 @@ Label Fields ================= %s -Note that global fields are overidden by label or benchmark fields, if they can -be specified in both places. Fields that are specified as arguments override -fields specified in experiment files. - -%s is invoked by passing it a path to an experiment file, as well as an action -to execute on that experiment file. The possible actions to use are: - -run\t\tRun the experiment and cache the results. - -table\t\tDisplay cached results of an experiment, without running anything. - -email\t\tEmail a summary of the results to the user. - -do\t\tThe default action. Executes the following actions: run, table, email. +Note that global fields are overidden by label or benchmark fields, if +they can be specified in both places. Fields that are specified as +arguments override fields specified in experiment files. + +%s is invoked by passing it a path to an experiment file, +as well as any options (in addition to those specified in the +experiment file). Crosperf runs the experiment and caches the results +(or reads the previously cached experiment results out of the cache), +generates and displays a report based on the run, and emails the +report to the user. If the results were all read out of the cache, +then by default no email is generated. """ % (sys.argv[0], sys.argv[0], global_fields, benchmark_fields, label_fields, sys.argv[0]) |