aboutsummaryrefslogtreecommitdiff
path: root/crosperf
diff options
context:
space:
mode:
authorAhmad Sharif <asharif@chromium.org>2012-12-20 12:09:49 -0800
committerAhmad Sharif <asharif@chromium.org>2012-12-20 12:09:49 -0800
commit4467f004e7f0854963bec90daff1879fbd9d2fec (patch)
treeaac36caa6279aa532e2d6234e50ee812f2db0c8d /crosperf
parentf395c26437cbdabc2960447fba89b226f4409e82 (diff)
downloadtoolchain-utils-4467f004e7f0854963bec90daff1879fbd9d2fec.tar.gz
Synced repos to: 64740
Diffstat (limited to 'crosperf')
-rw-r--r--crosperf/benchmark.py1
-rw-r--r--crosperf/benchmark_run.py108
-rwxr-xr-xcrosperf/benchmark_run_unittest.py50
-rwxr-xr-xcrosperf/crosperf2
-rwxr-xr-xcrosperf/crosperf.py4
-rw-r--r--crosperf/default_remotes5
-rw-r--r--crosperf/experiment.py33
-rw-r--r--crosperf/experiment_factory.py57
-rwxr-xr-xcrosperf/experiment_factory_unittest.py7
-rwxr-xr-xcrosperf/experiment_file_unittest.py9
-rw-r--r--crosperf/experiment_runner.py14
-rw-r--r--crosperf/experiment_status.py31
-rw-r--r--crosperf/label.py26
-rw-r--r--crosperf/machine_manager.py194
-rwxr-xr-xcrosperf/machine_manager_unittest.py65
-rw-r--r--crosperf/perf_table.py58
-rw-r--r--crosperf/results_cache.py76
-rw-r--r--crosperf/results_organizer.py59
-rw-r--r--crosperf/results_report.py165
-rw-r--r--crosperf/settings_factory.py21
-rw-r--r--crosperf/test_flag.py16
21 files changed, 789 insertions, 212 deletions
diff --git a/crosperf/benchmark.py b/crosperf/benchmark.py
index a75bd8e3..bc7f1fa8 100644
--- a/crosperf/benchmark.py
+++ b/crosperf/benchmark.py
@@ -21,3 +21,4 @@ class Benchmark(object):
self.iterations = iterations
self.outlier_range = outlier_range
self.perf_args = perf_args
+ self.iteration_adjusted = False
diff --git a/crosperf/benchmark_run.py b/crosperf/benchmark_run.py
index 7579b6c2..dc837937 100644
--- a/crosperf/benchmark_run.py
+++ b/crosperf/benchmark_run.py
@@ -4,16 +4,16 @@
import datetime
import os
-import re
import threading
import time
import traceback
+from utils import command_executer
+from utils import timeline
+
from autotest_runner import AutotestRunner
from results_cache import Result
from results_cache import ResultsCache
-from utils import command_executer
-from utils import logger
STATUS_FAILED = "FAILED"
STATUS_SUCCEEDED = "SUCCEEDED"
@@ -24,53 +24,50 @@ STATUS_PENDING = "PENDING"
class BenchmarkRun(threading.Thread):
- def __init__(self, name, benchmark_name, autotest_name, autotest_args,
- label_name, chromeos_root, chromeos_image, board, iteration,
- cache_conditions, outlier_range, perf_args,
+ def __init__(self, name, benchmark,
+ label,
+ iteration,
+ cache_conditions,
machine_manager,
logger_to_use):
threading.Thread.__init__(self)
self.name = name
self._logger = logger_to_use
- self.benchmark_name = benchmark_name
- self.autotest_name = autotest_name
- self.label_name = label_name
- self.chromeos_root = chromeos_root
- self.chromeos_image = os.path.expanduser(chromeos_image)
- self.board = board
+ self.benchmark = benchmark
self.iteration = iteration
+ self.label = label
self.result = None
self.terminated = False
self.retval = None
- self.status = STATUS_PENDING
self.run_completed = False
- self.outlier_range = outlier_range
- self.perf_args = perf_args
self.machine_manager = machine_manager
self.cache = ResultsCache()
self.autotest_runner = AutotestRunner(self._logger)
self.machine = None
- self.full_name = self.autotest_name
self.cache_conditions = cache_conditions
self.runs_complete = 0
self.cache_hit = False
self.failure_reason = ""
- self.autotest_args = "%s %s" % (autotest_args, self._GetExtraAutotestArgs())
+ self.autotest_args = "%s %s" % (benchmark.autotest_args,
+ self._GetExtraAutotestArgs())
self._ce = command_executer.GetCommandExecuter(self._logger)
+ self.timeline = timeline.Timeline()
+ self.timeline.Record(STATUS_PENDING)
def run(self):
try:
# Just use the first machine for running the cached version,
# without locking it.
- self.cache.Init(self.chromeos_image,
- self.chromeos_root,
- self.autotest_name,
+ self.cache.Init(self.label.chromeos_image,
+ self.label.chromeos_root,
+ self.benchmark.autotest_name,
self.iteration,
self.autotest_args,
self.machine_manager,
- self.board,
+ self.label.board,
self.cache_conditions,
self._logger,
+ self.label
)
self.result = self.cache.ReadResult()
@@ -78,10 +75,12 @@ class BenchmarkRun(threading.Thread):
if self.result:
self._logger.LogOutput("%s: Cache hit." % self.name)
- self._logger.LogOutput(self.result.out + "\n" + self.result.err)
+ self._logger.LogOutput(self.result.out, print_to_console=False)
+ self._logger.LogError(self.result.err, print_to_console=False)
+
else:
self._logger.LogOutput("%s: No cache hit." % self.name)
- self.status = STATUS_WAITING
+ self.timeline.Record(STATUS_WAITING)
# Try to acquire a machine now.
self.machine = self.AcquireMachine()
self.cache.remote = self.machine.name
@@ -92,17 +91,17 @@ class BenchmarkRun(threading.Thread):
return
if not self.result.retval:
- self.status = STATUS_SUCCEEDED
+ self.timeline.Record(STATUS_SUCCEEDED)
else:
- if self.status != STATUS_FAILED:
- self.status = STATUS_FAILED
+ if self.timeline.GetLastEvent() != STATUS_FAILED:
self.failure_reason = "Return value of autotest was non-zero."
+ self.timeline.Record(STATUS_FAILED)
except Exception, e:
self._logger.LogError("Benchmark run: '%s' failed: %s" % (self.name, e))
traceback.print_exc()
- if self.status != STATUS_FAILED:
- self.status = STATUS_FAILED
+ if self.timeline.GetLastEvent() != STATUS_FAILED:
+ self.timeline.Record(STATUS_FAILED)
self.failure_reason = str(e)
finally:
if self.machine:
@@ -113,15 +112,17 @@ class BenchmarkRun(threading.Thread):
def Terminate(self):
self.terminated = True
self.autotest_runner.Terminate()
- if self.status != STATUS_FAILED:
- self.status = STATUS_FAILED
+ if self.timeline.GetLastEvent() != STATUS_FAILED:
+ self.timeline.Record(STATUS_FAILED)
self.failure_reason = "Thread terminated."
def AcquireMachine(self):
while True:
if self.terminated:
raise Exception("Thread terminated while trying to acquire machine.")
- machine = self.machine_manager.AcquireMachine(self.chromeos_image)
+ machine = self.machine_manager.AcquireMachine(self.label.chromeos_image,
+ self.label)
+
if machine:
self._logger.LogOutput("%s: Machine %s acquired at %s" %
(self.name,
@@ -134,8 +135,8 @@ class BenchmarkRun(threading.Thread):
return machine
def _GetExtraAutotestArgs(self):
- if self.perf_args:
- perf_args_list = self.perf_args.split(" ")
+ if self.benchmark.perf_args:
+ perf_args_list = self.benchmark.perf_args.split(" ")
perf_args_list = [perf_args_list[0]] + ["-a"] + perf_args_list[1:]
perf_args = " ".join(perf_args_list)
if not perf_args_list[0] in ["record", "stat"]:
@@ -148,24 +149,47 @@ class BenchmarkRun(threading.Thread):
return ""
def RunTest(self, machine):
- self.status = STATUS_IMAGING
+ self.timeline.Record(STATUS_IMAGING)
self.machine_manager.ImageMachine(machine,
- self.chromeos_image,
- self.board)
- self.status = "%s %s" % (STATUS_RUNNING, self.autotest_name)
+ self.label)
+ self.timeline.Record(STATUS_RUNNING)
[retval, out, err] = self.autotest_runner.Run(machine.name,
- self.chromeos_root,
- self.board,
- self.autotest_name,
+ self.label.chromeos_root,
+ self.label.board,
+ self.benchmark.autotest_name,
self.autotest_args)
self.run_completed = True
return Result.CreateFromRun(self._logger,
- self.chromeos_root,
- self.board,
+ self.label.chromeos_root,
+ self.label.board,
+ self.label.name,
out,
err,
retval)
def SetCacheConditions(self, cache_conditions):
self.cache_conditions = cache_conditions
+
+
+class MockBenchmarkRun(BenchmarkRun):
+ """Inherited from BenchmarkRun, just overide RunTest for testing."""
+
+ def RunTest(self, machine):
+ """Remove Result.CreateFromRun for testing."""
+ self.timeline.Record(STATUS_IMAGING)
+ self.machine_manager.ImageMachine(machine,
+ self.label)
+ self.timeline.Record(STATUS_RUNNING)
+ [retval, out, err] = self.autotest_runner.Run(machine.name,
+ self.label.chromeos_root,
+ self.label.board,
+ self.benchmark.autotest_name,
+ self.autotest_args)
+ self.run_completed = True
+ rr = Result("Results placed in /tmp/test", "", 0)
+ rr.out = out
+ rr.err = err
+ rr.retval = retval
+ return rr
+
diff --git a/crosperf/benchmark_run_unittest.py b/crosperf/benchmark_run_unittest.py
index c4c3fdf1..c4670c9c 100755
--- a/crosperf/benchmark_run_unittest.py
+++ b/crosperf/benchmark_run_unittest.py
@@ -1,38 +1,42 @@
#!/usr/bin/python
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""Testing of benchmark_run."""
import unittest
+
+from utils import logger
+
from autotest_runner import MockAutotestRunner
-from benchmark_run import BenchmarkRun
+from benchmark_run import MockBenchmarkRun
+from label import MockLabel
+from benchmark import Benchmark
from machine_manager import MockMachineManager
from results_cache import MockResultsCache
-from utils import logger
class BenchmarkRunTest(unittest.TestCase):
def testDryRun(self):
- m = MockMachineManager()
+ my_label = MockLabel("test1", "image1", "/tmp/test_benchmark_run",
+ "x86-alex", "chromeos-alex1", "")
+ m = MockMachineManager("/tmp/chromeos_root")
m.AddMachine("chromeos-alex1")
- b = BenchmarkRun("test run",
- "PageCycler",
- "PageCycler",
- "",
- "image1",
- "/tmp/test",
- "/tmp/test/image",
- "x86-alex",
- 1,
- [],
- 0.2,
- "",
- "none",
- m,
- MockResultsCache(),
- MockAutotestRunner(),
- logger.GetLogger())
+ bench = Benchmark("PageCyler",
+ "Pyautoperf",
+ "",
+ 1,
+ 0.2,
+ "")
+ b = MockBenchmarkRun("test run",
+ bench,
+ my_label,
+ 1,
+ [],
+ m,
+ logger.GetLogger())
+ b.cache = MockResultsCache()
+ b.autotest_runner = MockAutotestRunner()
b.start()
diff --git a/crosperf/crosperf b/crosperf/crosperf
index 286bf25a..904a172a 100755
--- a/crosperf/crosperf
+++ b/crosperf/crosperf
@@ -1,2 +1,2 @@
#!/bin/bash
-PYTHONPATH=$PYTHONPATH:$(dirname $0)/.. python $(dirname $0)/crosperf.py "$@"
+PYTHONPATH=$(dirname $0)/..:$PYTHONPATH python $(dirname $0)/crosperf.py "$@"
diff --git a/crosperf/crosperf.py b/crosperf/crosperf.py
index 24699338..cfb48d7c 100755
--- a/crosperf/crosperf.py
+++ b/crosperf/crosperf.py
@@ -16,6 +16,7 @@ from help import Help
from settings_factory import GlobalSettings
from utils import logger
+import test_flag
l = logger.GetLogger()
@@ -77,6 +78,9 @@ def Main(argv):
parser.error("Invalid number arguments.")
working_directory = os.getcwd()
+ if options.dry_run:
+ test_flag.SetTestMode(True)
+
experiment_file = ExperimentFile(open(experiment_filename, "rb"),
option_settings)
if not experiment_file.GetGlobalSettings().GetField("name"):
diff --git a/crosperf/default_remotes b/crosperf/default_remotes
new file mode 100644
index 00000000..5efaebcd
--- /dev/null
+++ b/crosperf/default_remotes
@@ -0,0 +1,5 @@
+x86-zgb : chromeos1-rack3-host4.cros chromeos1-rack3-host5.cros chromeos1-rack3-host6.cros
+x86-alex : chromeos2-row1-rack4-host7.cros chromeos2-row1-rack4-host8.cros chromeos2-row1-rack4-host9.cros
+lumpy : chromeos2-row2-rack4-host10.cros chromeos2-row2-rack4-host11.cros chromeos2-row2-rack4-host12.cros
+stumpy : chromeos2-row3-rack7-host1.cros chromeos2-row3-rack7-host2.cros chromeos2-row3-rack7-host3.cros
+
diff --git a/crosperf/experiment.py b/crosperf/experiment.py
index 7b48344c..e9dc3d07 100644
--- a/crosperf/experiment.py
+++ b/crosperf/experiment.py
@@ -2,15 +2,20 @@
# Copyright 2011 Google Inc. All Rights Reserved.
+"""The experiment setting module."""
+
import os
import time
+
+from utils import logger
+
from autotest_runner import AutotestRunner
from benchmark_run import BenchmarkRun
from machine_manager import MachineManager
+from machine_manager import MockMachineManager
from results_cache import ResultsCache
from results_report import HTMLResultsReport
-from utils import logger
-from utils.file_utils import FileUtils
+import test_flag
class Experiment(object):
@@ -33,6 +38,7 @@ class Experiment(object):
self.labels = labels
self.benchmarks = benchmarks
self.num_complete = 0
+ self.num_run_complete = 0
# We need one chromeos_root to run the benchmarks in, but it doesn't
# matter where it is, unless the ABIs are different.
@@ -44,13 +50,17 @@ class Experiment(object):
raise Exception("No chromeos_root given and could not determine one from "
"the image path.")
- self.machine_manager = MachineManager(chromeos_root)
+ if test_flag.GetTestMode():
+ self.machine_manager = MockMachineManager(chromeos_root)
+ else:
+ self.machine_manager = MachineManager(chromeos_root)
self.l = logger.GetLogger()
for machine in remote:
self.machine_manager.AddMachine(machine)
- self.machine_manager.ComputeCommonCheckSum()
- self.machine_manager.ComputeCommonCheckSumString()
+ for label in labels:
+ self.machine_manager.ComputeCommonCheckSum(label)
+ self.machine_manager.ComputeCommonCheckSumString(label)
self.start_time = None
self.benchmark_runs = self._GenerateBenchmarkRuns()
@@ -69,17 +79,10 @@ class Experiment(object):
"run.%s" % (full_name),
True)
benchmark_run = BenchmarkRun(benchmark_run_name,
- benchmark.name,
- benchmark.autotest_name,
- benchmark.autotest_args,
- label.name,
- label.chromeos_root,
- label.chromeos_image,
- label.board,
+ benchmark,
+ label,
iteration,
self.cache_conditions,
- benchmark.outlier_range,
- benchmark.perf_args,
self.machine_manager,
logger_to_use)
@@ -102,6 +105,8 @@ class Experiment(object):
t.join(0)
if not t.isAlive():
self.num_complete += 1
+ if not t.cache_hit:
+ self.num_run_complete += 1
self.active_threads.remove(t)
return False
return True
diff --git a/crosperf/experiment_factory.py b/crosperf/experiment_factory.py
index 5c21179e..bd3076dd 100644
--- a/crosperf/experiment_factory.py
+++ b/crosperf/experiment_factory.py
@@ -1,11 +1,17 @@
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
+"""A module to generate experments."""
+
+import os
+import socket
from benchmark import Benchmark
from experiment import Experiment
from label import Label
+from label import MockLabel
from results_cache import CacheConditions
+import test_flag
class ExperimentFactory(object):
@@ -32,7 +38,9 @@ class ExperimentFactory(object):
cache_conditions.append(CacheConditions.RUN_SUCCEEDED)
if global_settings.GetField("rerun"):
cache_conditions.append(CacheConditions.FALSE)
- if global_settings.GetField("exact_remote"):
+ if global_settings.GetField("same_machine"):
+ cache_conditions.append(CacheConditions.SAME_MACHINE_MATCH)
+ if global_settings.GetField("same_specs"):
cache_conditions.append(CacheConditions.MACHINES_MATCH)
# Construct benchmarks.
@@ -54,20 +62,61 @@ class ExperimentFactory(object):
# Construct labels.
labels = []
all_label_settings = experiment_file.GetSettings("label")
+ all_remote = list(remote)
for label_settings in all_label_settings:
label_name = label_settings.name
image = label_settings.GetField("chromeos_image")
chromeos_root = label_settings.GetField("chromeos_root")
board = label_settings.GetField("board")
- label = Label(label_name, image, chromeos_root, board)
+ my_remote = label_settings.GetField("remote")
+ # TODO(yunlian): We should consolidate code in machine_manager.py
+ # to derermine whether we are running from within google or not
+ if ("corp.google.com" in socket.gethostname() and
+ (not my_remote
+ or my_remote == remote
+ and global_settings.GetField("board") != board)):
+ my_remote = self.GetDefaultRemotes(board)
+ if global_settings.GetField("same_machine") and len(my_remote) > 1:
+ raise Exception("Only one remote is allowed when same_machine "
+ "is turned on")
+ all_remote += my_remote
+ image_args = label_settings.GetField("image_args")
+ if test_flag.GetTestMode():
+ label = MockLabel(label_name, image, chromeos_root, board, my_remote,
+ image_args)
+ else:
+ label = Label(label_name, image, chromeos_root, board, my_remote,
+ image_args)
labels.append(label)
email = global_settings.GetField("email")
-
- experiment = Experiment(experiment_name, remote, rerun_if_failed,
+ all_remote = list(set(all_remote))
+ experiment = Experiment(experiment_name, all_remote, rerun_if_failed,
working_directory, chromeos_root,
cache_conditions, labels, benchmarks,
experiment_file.Canonicalize(),
email)
return experiment
+
+ def GetDefaultRemotes(self, board):
+ default_remotes_file = os.path.join(os.path.dirname(__file__),
+ "default_remotes")
+ try:
+ with open(default_remotes_file) as f:
+ for line in f:
+ key, v = line.split(":")
+ if key.strip() == board:
+ remotes = v.strip().split(" ")
+ if remotes:
+ return remotes
+ else:
+ raise Exception("There is not remote for {0}".format(board))
+ except IOError:
+ raise Exception("IOError while reading file {0}"
+ .format(default_remotes_file))
+ else:
+ raise Exception("There is not remote for {0}".format(board))
+
+
+
diff --git a/crosperf/experiment_factory_unittest.py b/crosperf/experiment_factory_unittest.py
index e91295da..fa943519 100755
--- a/crosperf/experiment_factory_unittest.py
+++ b/crosperf/experiment_factory_unittest.py
@@ -4,10 +4,12 @@
import StringIO
import unittest
-from experiment_factory import ExperimentFactory
-from experiment_file import ExperimentFile
+
from utils.file_utils import FileUtils
+from experiment_factory import ExperimentFactory
+from experiment_file import ExperimentFile
+import test_flag
EXPERIMENT_FILE_1 = """
board: x86-alex
@@ -47,4 +49,5 @@ class ExperimentFactoryTest(unittest.TestCase):
if __name__ == "__main__":
FileUtils.Configure(True)
+ test_flag.SetTestMode(True)
unittest.main()
diff --git a/crosperf/experiment_file_unittest.py b/crosperf/experiment_file_unittest.py
index 67da11e5..d08c7eb5 100755
--- a/crosperf/experiment_file_unittest.py
+++ b/crosperf/experiment_file_unittest.py
@@ -11,7 +11,7 @@ from experiment_file import ExperimentFile
EXPERIMENT_FILE_1 = """
board: x86-alex
remote: chromeos-alex3
-
+ perf_args: record -a -e cycles
benchmark: PageCycler {
iterations: 3
}
@@ -21,6 +21,8 @@ EXPERIMENT_FILE_1 = """
}
image2 {
+ board: lumpy
+ remote: chromeos-lumpy1
chromeos_image: /usr/local/google/cros_image2.bin
}
"""
@@ -70,7 +72,8 @@ class ExperimentFileTest(unittest.TestCase):
experiment_file = ExperimentFile(input_file)
global_settings = experiment_file.GetGlobalSettings()
self.assertEqual(global_settings.GetField("remote"), ["chromeos-alex3"])
-
+ self.assertEqual(global_settings.GetField("perf_args"),
+ "record -a -e cycles")
benchmark_settings = experiment_file.GetSettings("benchmark")
self.assertEqual(len(benchmark_settings), 1)
self.assertEqual(benchmark_settings[0].name, "PageCycler")
@@ -82,6 +85,8 @@ class ExperimentFileTest(unittest.TestCase):
self.assertEqual(label_settings[0].GetField("board"), "x86-alex")
self.assertEqual(label_settings[0].GetField("chromeos_image"),
"/usr/local/google/cros_image1.bin")
+ self.assertEqual(label_settings[1].GetField("remote"), ["chromeos-lumpy1"])
+ self.assertEqual(label_settings[0].GetField("remote"), ["chromeos-alex3"])
def testOverrideSetting(self):
input_file = StringIO.StringIO(EXPERIMENT_FILE_2)
diff --git a/crosperf/experiment_runner.py b/crosperf/experiment_runner.py
index 4219c435..b905bbdc 100644
--- a/crosperf/experiment_runner.py
+++ b/crosperf/experiment_runner.py
@@ -2,19 +2,24 @@
# Copyright 2011 Google Inc. All Rights Reserved.
+"""The experiment runner module."""
import getpass
import os
import time
-from experiment_status import ExperimentStatus
-from results_report import HTMLResultsReport
-from results_report import TextResultsReport
+
from utils import command_executer
from utils import logger
from utils.email_sender import EmailSender
from utils.file_utils import FileUtils
+from experiment_status import ExperimentStatus
+from results_report import HTMLResultsReport
+from results_report import TextResultsReport
+
class ExperimentRunner(object):
+ """ExperimentRunner Class."""
+
STATUS_TIME_DELAY = 30
THREAD_MONITOR_DELAY = 2
@@ -95,6 +100,7 @@ class ExperimentRunner(object):
benchmark_run_path = os.path.join(results_directory,
benchmark_run_name)
benchmark_run.result.CopyResultsTo(benchmark_run_path)
+ benchmark_run.result.CleanUp()
def Run(self):
self._Run(self._experiment)
@@ -105,6 +111,8 @@ class ExperimentRunner(object):
class MockExperimentRunner(ExperimentRunner):
+ """Mocked ExperimentRunner for testing."""
+
def __init__(self, experiment):
super(MockExperimentRunner, self).__init__(experiment)
diff --git a/crosperf/experiment_status.py b/crosperf/experiment_status.py
index ddf3f54a..3a270663 100644
--- a/crosperf/experiment_status.py
+++ b/crosperf/experiment_status.py
@@ -2,14 +2,20 @@
# Copyright 2011 Google Inc. All Rights Reserved.
+"""The class to show the banner."""
+
import datetime
import time
class ExperimentStatus(object):
+ """The status class."""
+
def __init__(self, experiment):
self.experiment = experiment
self.num_total = len(self.experiment.benchmark_runs)
+ self.completed = 0
+ self.new_job_start_time = time.time()
def _GetProgressBar(self, num_complete, num_total):
ret = "Done: %s%%" % int(100.0 * num_complete / num_total)
@@ -23,14 +29,23 @@ class ExperimentStatus(object):
return ret
def GetProgressString(self):
+ """Get the elapsed_time, ETA."""
current_time = time.time()
if self.experiment.start_time:
elapsed_time = current_time - self.experiment.start_time
else:
elapsed_time = 0
try:
- eta_seconds = (float(self.num_total - self.experiment.num_complete) *
- elapsed_time / self.experiment.num_complete)
+ if self.completed != self.experiment.num_complete:
+ self.completed = self.experiment.num_complete
+ self.new_job_start_time = current_time
+ time_completed_jobs = (elapsed_time -
+ (current_time - self.new_job_start_time))
+ eta_seconds = (float(self.num_total - self.experiment.num_complete -1) *
+ time_completed_jobs / self.experiment.num_run_complete
+ + (time_completed_jobs / self.experiment.num_run_complete
+ - (current_time - self.new_job_start_time)))
+
eta_seconds = int(eta_seconds)
eta = datetime.timedelta(seconds=eta_seconds)
except ZeroDivisionError:
@@ -45,11 +60,12 @@ class ExperimentStatus(object):
return "\n".join(strings)
def GetStatusString(self):
+ """Get the status string of all the benchmark_runs."""
status_bins = {}
for benchmark_run in self.experiment.benchmark_runs:
- if benchmark_run.status not in status_bins:
- status_bins[benchmark_run.status] = []
- status_bins[benchmark_run.status].append(benchmark_run)
+ if benchmark_run.timeline.GetLastEvent() not in status_bins:
+ status_bins[benchmark_run.timeline.GetLastEvent()] = []
+ status_bins[benchmark_run.timeline.GetLastEvent()].append(benchmark_run)
status_strings = []
for key, val in status_bins.items():
@@ -64,6 +80,9 @@ class ExperimentStatus(object):
def _GetNamesAndIterations(self, benchmark_runs):
strings = []
+ t = time.time()
for benchmark_run in benchmark_runs:
- strings.append("'%s'" % benchmark_run.name)
+ t_last = benchmark_run.timeline.GetLastEventTime()
+ elapsed = str(datetime.timedelta(seconds=int(t-t_last)))
+ strings.append("'{0}' {1}".format(benchmark_run.name, elapsed))
return " %s (%s)" % (len(strings), ", ".join(strings))
diff --git a/crosperf/label.py b/crosperf/label.py
index 3b6fb804..64ce352f 100644
--- a/crosperf/label.py
+++ b/crosperf/label.py
@@ -2,14 +2,24 @@
# Copyright 2011 Google Inc. All Rights Reserved.
+"""The label of benchamrks."""
+
+import os
from utils.file_utils import FileUtils
class Label(object):
- def __init__(self, name, chromeos_image, chromeos_root, board):
+ def __init__(self, name, chromeos_image, chromeos_root, board, remote,
+ image_args):
+ # Expand ~
+ chromeos_root = os.path.expanduser(chromeos_root)
+ chromeos_image = os.path.expanduser(chromeos_image)
+
self.name = name
self.chromeos_image = chromeos_image
self.board = board
+ self.remote = remote
+ self.image_args = image_args
if not chromeos_root:
chromeos_root = FileUtils().ChromeOSRootFromImage(chromeos_image)
@@ -24,3 +34,17 @@ class Label(object):
% (name, chromeos_root))
self.chromeos_root = chromeos_root
+
+
+class MockLabel(object):
+ def __init__(self, name, chromeos_image, chromeos_root, board, remote,
+ image_args):
+ self.name = name
+ self.chromeos_image = chromeos_image
+ self.board = board
+ self.remote = remote
+ if not chromeos_root:
+ self.chromeos_root = "/tmp/chromeos_root"
+ else:
+ self.chromeos_root = chromeos_root
+ self.image_args = image_args
diff --git a/crosperf/machine_manager.py b/crosperf/machine_manager.py
index 8562e929..9eb9bcdf 100644
--- a/crosperf/machine_manager.py
+++ b/crosperf/machine_manager.py
@@ -1,16 +1,23 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+
import hashlib
import image_chromeos
import lock_machine
import math
import os.path
+import re
import sys
import threading
import time
-from image_checksummer import ImageChecksummer
+
from utils import command_executer
from utils import logger
from utils.file_utils import FileUtils
+from image_checksummer import ImageChecksummer
+
CHECKSUM_FILE = "/usr/local/osimage_checksum_file"
@@ -26,7 +33,9 @@ class CrosMachine(object):
self._GetMemoryInfo()
self._GetCPUInfo()
self._ComputeMachineChecksumString()
- self._ComputeMachineChecksum()
+ self._GetMachineID()
+ self.machine_checksum = self._GetMD5Checksum(self.checksum_string)
+ self.machine_id_checksum = self._GetMD5Checksum(self.machine_id)
def _ParseMemoryInfo(self):
line = self.meminfo.splitlines()[0]
@@ -60,7 +69,7 @@ class CrosMachine(object):
self.phys_kbytes = phys_kbytes
def _GetMemoryInfo(self):
- #TODO yunlian: when the machine in rebooting, it will not return
+ #TODO yunlian: when the machine in rebooting, it will not return
#meminfo, the assert does not catch it either
ce = command_executer.GetCommandExecuter()
command = "cat /proc/meminfo"
@@ -94,11 +103,22 @@ class CrosMachine(object):
self.checksum_string += line
self.checksum_string += " " + str(self.phys_kbytes)
- def _ComputeMachineChecksum(self):
- if self.checksum_string:
- self.machine_checksum = hashlib.md5(self.checksum_string).hexdigest()
+ def _GetMD5Checksum(self, ss):
+ if ss:
+ return hashlib.md5(ss).hexdigest()
else:
- self.machine_checksum = ""
+ return ""
+
+ def _GetMachineID(self):
+ ce = command_executer.GetCommandExecuter()
+ command = "ifconfig"
+ ret, if_out, _ = ce.CrosRunCommand(
+ command, return_output=True,
+ machine=self.name, chromeos_root=self.chromeos_root)
+ b = if_out.splitlines()
+ a = [l for l in b if "lan" in l]
+ self.machine_id = a[0]
+ assert ret == 0, "Could not get machine_id from machine: %s" % self.name
def __str__(self):
l = []
@@ -118,52 +138,55 @@ class MachineManager(object):
self.image_lock = threading.Lock()
self.num_reimages = 0
self.chromeos_root = None
- if os.path.isdir(lock_machine.FileLock.LOCKS_DIR):
+ self.machine_checksum = {}
+ self.machine_checksum_string = {}
+
+ if os.path.isdir(lock_machine.Machine.LOCKS_DIR):
self.no_lock = False
else:
self.no_lock = True
- self.initialized = False
+ self._initialized_machines = []
self.chromeos_root = chromeos_root
- def ImageMachine(self, machine, chromeos_image, board=None):
- checksum = ImageChecksummer().Checksum(chromeos_image)
+ def ImageMachine(self, machine, label):
+ checksum = ImageChecksummer().Checksum(label.chromeos_image)
if machine.checksum == checksum:
return
- chromeos_root = FileUtils().ChromeOSRootFromImage(chromeos_image)
+ chromeos_root = label.chromeos_root
if not chromeos_root:
chromeos_root = self.chromeos_root
- image_args = [image_chromeos.__file__,
- "--chromeos_root=%s" % chromeos_root,
- "--image=%s" % chromeos_image,
- "--remote=%s" % machine.name]
- if board:
- image_args.append("--board=%s" % board)
+ image_chromeos_args = [image_chromeos.__file__,
+ "--chromeos_root=%s" % chromeos_root,
+ "--image=%s" % label.chromeos_image,
+ "--image_args=%s" % label.image_args,
+ "--remote=%s" % machine.name]
+ if label.board:
+ image_chromeos_args.append("--board=%s" % label.board)
# Currently can't image two machines at once.
# So have to serialized on this lock.
ce = command_executer.GetCommandExecuter()
with self.image_lock:
- retval = ce.RunCommand(" ".join(["python"] + image_args))
- self.num_reimages += 1
+ retval = ce.RunCommand(" ".join(["python"] + image_chromeos_args))
if retval:
raise Exception("Could not image machine: '%s'." % machine.name)
+ else:
+ self.num_reimages += 1
machine.checksum = checksum
- machine.image = chromeos_image
+ machine.image = label.chromeos_image
return retval
- def ComputeCommonCheckSum(self):
- self.machine_checksum = ""
- for machine in self.GetMachines():
+ def ComputeCommonCheckSum(self, label):
+ for machine in self.GetMachines(label):
if machine.machine_checksum:
- self.machine_checksum = machine.machine_checksum
+ self.machine_checksum[label.name] = machine.machine_checksum
break
- def ComputeCommonCheckSumString(self):
- self.machine_checksum_string = ""
- for machine in self.GetMachines():
+ def ComputeCommonCheckSumString(self, label):
+ for machine in self.GetMachines(label):
if machine.checksum_string:
- self.machine_checksum_string = machine.checksum_string
+ self.machine_checksum_string[label.name] = machine.checksum_string
break
def _TryToLockMachine(self, cros_machine):
@@ -198,28 +221,28 @@ class MachineManager(object):
machine_name)
self._all_machines.append(cm)
- def AreAllMachineSame(self):
- checksums = [m.machine_checksum for m in self.GetMachines()]
+ def AreAllMachineSame(self, label):
+ checksums = [m.machine_checksum for m in self.GetMachines(label)]
return len(set(checksums)) == 1
- def AcquireMachine(self, chromeos_image):
+ def AcquireMachine(self, chromeos_image, label):
image_checksum = ImageChecksummer().Checksum(chromeos_image)
+ machines = self.GetMachines(label)
with self._lock:
# Lazily external lock machines
- if not self.initialized:
- for m in self._all_machines:
+
+ for m in machines:
+ if m not in self._initialized_machines:
+ self._initialized_machines.append(m)
self._TryToLockMachine(m)
- self.initialized = True
- for m in self._all_machines:
m.released_time = time.time()
-
- if not self.AreAllMachineSame():
+ if not self.AreAllMachineSame(label):
logger.GetLogger().LogFatal("-- not all the machine are identical")
- if not self._machines:
+ if not self.GetAvailableMachines(label):
machine_names = []
- for machine in self._all_machines:
+ for machine in machines:
machine_names.append(machine.name)
- logger.GetLogger().LogFatal("Could not acquire any of the"
+ logger.GetLogger().LogFatal("Could not acquire any of the "
"following machines: '%s'"
% ", ".join(machine_names))
@@ -227,12 +250,14 @@ class MachineManager(object):
### if (m.locked and time.time() - m.released_time < 10 and
### m.checksum == image_checksum):
### return None
- for m in [machine for machine in self._machines if not machine.locked]:
+ for m in [machine for machine in self.GetAvailableMachines(label)
+ if not machine.locked]:
if m.checksum == image_checksum:
m.locked = True
m.autotest_run = threading.current_thread()
return m
- for m in [machine for machine in self._machines if not machine.locked]:
+ for m in [machine for machine in self.GetAvailableMachines(label)
+ if not machine.locked]:
if not m.checksum:
m.locked = True
m.autotest_run = threading.current_thread()
@@ -243,15 +268,23 @@ class MachineManager(object):
# the number of re-images.
# TODO(asharif): If we centralize the thread-scheduler, we wont need this
# code and can implement minimal reimaging code more cleanly.
- for m in [machine for machine in self._machines if not machine.locked]:
+ for m in [machine for machine in self.GetAvailableMachines(label)
+ if not machine.locked]:
if time.time() - m.released_time > 20:
m.locked = True
m.autotest_run = threading.current_thread()
return m
return None
- def GetMachines(self):
- return self._all_machines
+ def GetAvailableMachines(self, label=None):
+ if not label:
+ return self._machines
+ return [m for m in self._machines if m.name in label.remote]
+
+ def GetMachines(self, label=None):
+ if not label:
+ return self._all_machines
+ return [m for m in self._all_machines if m.name in label.remote]
def ReleaseMachine(self, machine):
with self._lock:
@@ -289,7 +322,7 @@ class MachineManager(object):
for m in self._machines:
if m.autotest_run:
autotest_name = m.autotest_run.name
- autotest_status = m.autotest_run.status
+ autotest_status = m.autotest_run.timeline.GetLastEvent()
else:
autotest_name = ""
autotest_status = ""
@@ -305,26 +338,73 @@ class MachineManager(object):
table.append(machine_string)
return "Machine Status:\n%s" % "\n".join(table)
+ def GetAllCPUInfo(self, labels):
+ """Get cpuinfo for labels, merge them if their cpuinfo are the same."""
+ dic = {}
+ for label in labels:
+ for machine in self._all_machines:
+ if machine.name in label.remote:
+ if machine.cpuinfo not in dic:
+ dic[machine.cpuinfo] = [label.name]
+ else:
+ dic[machine.cpuinfo].append(label.name)
+ break
+ output = ""
+ for key, v in dic.items():
+ output += " ".join(v)
+ output += "\n-------------------\n"
+ output += key
+ output += "\n\n\n"
+ return output
-class MockMachineManager(object):
- def __init__(self):
- self.machines = []
- def ImageMachine(self, machine_name, chromeos_image, board=None):
- return 0
+class MockCrosMachine(CrosMachine):
+ def __init__(self, name, chromeos_root):
+ self.name = name
+ self.image = None
+ self.checksum = None
+ self.locked = False
+ self.released_time = time.time()
+ self.autotest_run = None
+ self.chromeos_root = chromeos_root
+ self.checksum_string = re.sub("\d", "", name)
+ #In test, we assume "lumpy1", "lumpy2" are the same machine.
+ self.machine_checksum = self._GetMD5Checksum(self.checksum_string)
+
+
+class MockMachineManager(MachineManager):
+
+ def __init__(self, chromeos_root):
+ super(MockMachineManager, self).__init__(chromeos_root)
+
+ def _TryToLockMachine(self, cros_machine):
+ self._machines.append(cros_machine)
+ cros_machine.checksum = ""
def AddMachine(self, machine_name):
- self.machines.append(CrosMachine(machine_name))
+ with self._lock:
+ for m in self._all_machines:
+ assert m.name != machine_name, "Tried to double-add %s" % machine_name
+ cm = MockCrosMachine(machine_name, self.chromeos_root)
+ assert cm.machine_checksum, ("Could not find checksum for machine %s" %
+ machine_name)
+ self._all_machines.append(cm)
- def AcquireMachine(self, chromeos_image):
- for machine in self.machines:
+ def AcquireMachine(self, chromeos_image, label):
+ for machine in self._all_machines:
if not machine.locked:
machine.locked = True
return machine
return None
+ def ImageMachine(self, machine_name, label):
+ return 0
+
def ReleaseMachine(self, machine):
machine.locked = False
- def GetMachines(self):
- return self.machines
+ def GetMachines(self, label):
+ return self._all_machines
+
+ def GetAvailableMachines(self, label):
+ return self._all_machines
diff --git a/crosperf/machine_manager_unittest.py b/crosperf/machine_manager_unittest.py
new file mode 100755
index 00000000..98baf456
--- /dev/null
+++ b/crosperf/machine_manager_unittest.py
@@ -0,0 +1,65 @@
+#!/usr/bin/python
+
+# Copyright 2012 Google Inc. All Rights Reserved.
+
+"""Unittest for machine_manager."""
+import unittest
+
+import label
+import machine_manager
+
+
+class MyMachineManager(machine_manager.MachineManager):
+
+ def __init__(self, chromeos_root):
+ super(MyMachineManager, self).__init__(chromeos_root)
+
+ def _TryToLockMachine(self, cros_machine):
+ self._machines.append(cros_machine)
+ cros_machine.checksum = ""
+
+ def AddMachine(self, machine_name):
+ with self._lock:
+ for m in self._all_machines:
+ assert m.name != machine_name, "Tried to double-add %s" % machine_name
+ cm = machine_manager.MockCrosMachine(machine_name, self.chromeos_root)
+ assert cm.machine_checksum, ("Could not find checksum for machine %s" %
+ machine_name)
+ self._all_machines.append(cm)
+
+CHROMEOS_ROOT = "/tmp/chromeos-root"
+MACHINE_NAMES = ["lumpy1", "lumpy2", "lumpy3", "daisy1", "daisy2"]
+LABEL_LUMPY = label.MockLabel("lumpy", "image", CHROMEOS_ROOT, "lumpy",
+ ["lumpy1", "lumpy2", "lumpy3", "lumpy4"], "")
+LABEL_MIX = label.MockLabel("mix", "image", CHROMEOS_ROOT, "mix",
+ ["daisy1", "daisy2", "lumpy3", "lumpy4"], "")
+
+
+class MachineManagerTest(unittest.TestCase):
+
+ def testAreAllMachineSame(self):
+ manager = MyMachineManager(CHROMEOS_ROOT)
+ for m in MACHINE_NAMES:
+ manager.AddMachine(m)
+ self.assertEqual(manager.AreAllMachineSame(LABEL_LUMPY), True)
+ self.assertEqual(manager.AreAllMachineSame(LABEL_MIX), False)
+
+ def testGetMachines(self):
+ manager = MyMachineManager(CHROMEOS_ROOT)
+ for m in MACHINE_NAMES:
+ manager.AddMachine(m)
+ names = [m.name for m in manager.GetMachines(LABEL_LUMPY)]
+ self.assertEqual(names, ["lumpy1", "lumpy2", "lumpy3"])
+
+ def testGetAvailableMachines(self):
+ manager = MyMachineManager(CHROMEOS_ROOT)
+ for m in MACHINE_NAMES:
+ manager.AddMachine(m)
+ for m in manager._all_machines:
+ if int(m.name[-1]) % 2:
+ manager._TryToLockMachine(m)
+ names = [m.name for m in manager.GetAvailableMachines(LABEL_LUMPY)]
+ self.assertEqual(names, ["lumpy1", "lumpy3"])
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/crosperf/perf_table.py b/crosperf/perf_table.py
new file mode 100644
index 00000000..b3387ea8
--- /dev/null
+++ b/crosperf/perf_table.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+"""Parse perf report data for tabulator."""
+
+import os
+
+from utils import perf_diff
+
+def ParsePerfReport(perf_file):
+ """It should return a dict."""
+
+ return {"cycles": {"foo": 10, "bar": 20},
+ "cache_miss": {"foo": 20, "bar": 10}}
+
+
+class PerfTable(object):
+ """The class to generate dicts for tabulator."""
+
+ def __init__(self, experiment, label_names):
+ self._experiment = experiment
+ self._label_names = label_names
+ self.perf_data = {}
+ self.GenerateData()
+ # {benchmark:{perf_event1:[[{func1:number, func2:number},
+ # {func1: number, func2: number}]], ...},
+ # benchmark2:...}
+
+ def GenerateData(self):
+ for label in self._label_names:
+ for benchmark in self._experiment.benchmarks:
+ for i in range(1, benchmark.iterations+1):
+ dir_name = label + benchmark.name + str(i)
+ dir_name = filter(str.isalnum, dir_name)
+ perf_file = os.path.join(self._experiment.results_directory,
+ dir_name,
+ "perf.data.report.0")
+ self.ReadPerfReport(perf_file, label, benchmark.name, i - 1)
+
+ def ReadPerfReport(self, perf_file, label, benchmark_name, iteration):
+ """Add the data from one run to the dict."""
+ if not os.path.isfile(perf_file):
+ return
+ perf_of_run = perf_diff.GetPerfDictFromReport(perf_file)
+ if benchmark_name not in self.perf_data:
+ self.perf_data[benchmark_name] = {}
+ for event in perf_of_run:
+ self.perf_data[benchmark_name][event] = []
+ ben_data = self.perf_data[benchmark_name]
+
+ label_index = self._label_names.index(label)
+ for event in ben_data:
+ while len(ben_data[event]) <= label_index:
+ ben_data[event].append([])
+ data_for_label = ben_data[event][label_index]
+ while len(data_for_label) <= iteration:
+ data_for_label.append({})
+ data_for_label[iteration] = perf_of_run[event]
diff --git a/crosperf/results_cache.py b/crosperf/results_cache.py
index 1c33e720..c0600962 100644
--- a/crosperf/results_cache.py
+++ b/crosperf/results_cache.py
@@ -11,7 +11,6 @@ import re
import tempfile
from utils import command_executer
-from utils import logger
from utils import misc
from image_checksummer import ImageChecksummer
@@ -28,11 +27,13 @@ class Result(object):
what the key of the cache is. For runs with perf, it stores perf.data,
perf.report, etc. The key generation is handled by the ResultsCache class.
"""
- def __init__(self, chromeos_root, logger):
+
+ def __init__(self, chromeos_root, logger, label_name):
self._chromeos_root = chromeos_root
self._logger = logger
self._ce = command_executer.GetCommandExecuter(self._logger)
self._temp_dir = None
+ self.label_name = label_name
def _CopyFilesTo(self, dest_dir, files_to_copy):
file_index = 0
@@ -63,7 +64,7 @@ class Result(object):
command = ("python %s --no-color --csv %s" %
(generate_test_report,
self.results_dir))
- [ret, out, err] = self._ce.RunCommand(command, return_output=True)
+ [_, out, _] = self._ce.RunCommand(command, return_output=True)
keyvals_dict = {}
for line in out.splitlines():
tokens = re.split("=|,", line)
@@ -76,7 +77,7 @@ class Result(object):
return keyvals_dict
def _GetResultsDir(self):
- mo = re.search("Results placed in (\S+)", self.out)
+ mo = re.search(r"Results placed in (\S+)", self.out)
if mo:
result = mo.group(1)
return result
@@ -85,7 +86,7 @@ class Result(object):
def _FindFilesInResultsDir(self, find_args):
command = "find %s %s" % (self.results_dir,
find_args)
- ret, out, err = self._ce.RunCommand(command, return_output=True)
+ ret, out, _ = self._ce.RunCommand(command, return_output=True)
if ret:
raise Exception("Could not run find command!")
return out
@@ -108,23 +109,22 @@ class Result(object):
raise Exception("Perf report file already exists: %s" %
perf_report_file)
chroot_perf_report_file = misc.GetInsideChrootPath(self._chromeos_root,
- perf_report_file)
+ perf_report_file)
command = ("/usr/sbin/perf report "
"-n "
"--symfs /build/%s "
"--vmlinux /build/%s/usr/lib/debug/boot/vmlinux "
"--kallsyms /build/%s/boot/System.map-* "
"-i %s --stdio "
- "| head -n1000 "
- "| tee %s" %
+ "> %s" %
(self._board,
self._board,
self._board,
chroot_perf_data_file,
chroot_perf_report_file))
- ret, out, err = self._ce.ChrootRunCommand(self._chromeos_root,
- command,
- return_output=True)
+ self._ce.ChrootRunCommand(self._chromeos_root,
+ command)
+
# Add a keyval to the dictionary for the events captured.
perf_report_files.append(
misc.GetOutsideChrootPath(self._chromeos_root,
@@ -136,7 +136,7 @@ class Result(object):
for perf_report_file in self.perf_report_files:
with open(perf_report_file, "r") as f:
report_contents = f.read()
- for group in re.findall("Events: (\S+) (\S+)", report_contents):
+ for group in re.findall(r"Events: (\S+) (\S+)", report_contents):
num_events = group[0]
event_name = group[1]
key = "perf_%s_%s" % (report_id, event_name)
@@ -188,14 +188,12 @@ class Result(object):
self.perf_data_files = self._GetPerfDataFiles()
self.perf_report_files = self._GetPerfReportFiles()
self._ProcessResults()
- self.CleanUp()
def CleanUp(self):
if self._temp_dir:
command = "rm -rf %s" % self._temp_dir
self._ce.RunCommand(command)
-
def StoreToCacheDir(self, cache_dir, machine_manager):
# Create the dir if it doesn't exist.
command = "mkdir -p %s" % cache_dir
@@ -221,17 +219,18 @@ class Result(object):
# TODO(asharif): Make machine_manager a singleton, and don't pass it into
# this function.
with open(os.path.join(cache_dir, MACHINE_FILE), "w") as f:
- f.write(machine_manager.machine_checksum_string)
+ f.write(machine_manager.machine_checksum_string[self.label_name])
@classmethod
- def CreateFromRun(cls, logger, chromeos_root, board, out, err, retval):
- result = cls(chromeos_root, logger)
+ def CreateFromRun(cls, logger, chromeos_root, board, label_name,
+ out, err, retval):
+ result = cls(chromeos_root, logger, label_name)
result._PopulateFromRun(board, out, err, retval)
return result
@classmethod
- def CreateFromCacheHit(cls, chromeos_root, logger, cache_dir):
- result = cls(chromeos_root, logger)
+ def CreateFromCacheHit(cls, chromeos_root, logger, cache_dir, label_name):
+ result = cls(chromeos_root, logger, label_name)
try:
result._PopulateFromCacheDir(cache_dir)
except Exception as e:
@@ -260,17 +259,21 @@ class CacheConditions(object):
# Cache hit if the image path matches the cached image path.
IMAGE_PATH_MATCH = 5
+ # Cache hit if the uuid of hard disk mataches the cached one
+
+ SAME_MACHINE_MATCH = 6
+
class ResultsCache(object):
""" This class manages the key of the cached runs without worrying about what
is exactly stored (value). The value generation is handled by the Results
class.
"""
- CACHE_VERSION = 5
+ CACHE_VERSION = 6
def Init(self, chromeos_image, chromeos_root, autotest_name, iteration,
autotest_args, machine_manager, board, cache_conditions,
- logger_to_use):
+ logger_to_use, label):
self.chromeos_image = chromeos_image
self.chromeos_root = chromeos_root
self.autotest_name = autotest_name
@@ -281,6 +284,7 @@ class ResultsCache(object):
self.machine_manager = machine_manager
self._logger = logger_to_use
self._ce = command_executer.GetCommandExecuter(self._logger)
+ self.label = label
def _GetCacheDirForRead(self):
glob_path = self._FormCacheDir(self._GetCacheKeyList(True))
@@ -288,9 +292,6 @@ class ResultsCache(object):
if matching_dirs:
# Cache file found.
- if len(matching_dirs) > 1:
- self._logger.LogError("Multiple compatible cache files: %s." %
- " ".join(matching_dirs))
return matching_dirs[0]
else:
return None
@@ -308,7 +309,7 @@ class ResultsCache(object):
if read and CacheConditions.MACHINES_MATCH not in self.cache_conditions:
machine_checksum = "*"
else:
- machine_checksum = self.machine_manager.machine_checksum
+ machine_checksum = self.machine_manager.machine_checksum[self.label.name]
if read and CacheConditions.CHECKSUMS_MATCH not in self.cache_conditions:
checksum = "*"
else:
@@ -319,13 +320,22 @@ class ResultsCache(object):
else:
image_path_checksum = hashlib.md5(self.chromeos_image).hexdigest()
+ if read and CacheConditions.SAME_MACHINE_MATCH not in self.cache_conditions:
+ machine_id_checksum = "*"
+ else:
+ for machine in self.machine_manager.GetMachines(self.label):
+ if machine.name == self.label.remote[0]:
+ machine_id_checksum = machine.machine_id_checksum
+ break
+
autotest_args_checksum = hashlib.md5(
- "".join(self.autotest_args)).hexdigest()
+ "".join(self.autotest_args)).hexdigest()
return (image_path_checksum,
self.autotest_name, str(self.iteration),
autotest_args_checksum,
checksum,
machine_checksum,
+ machine_id_checksum,
str(self.CACHE_VERSION))
def ReadResult(self):
@@ -342,7 +352,7 @@ class ResultsCache(object):
self._logger.LogOutput("Trying to read from cache dir: %s" % cache_dir)
result = Result.CreateFromCacheHit(self.chromeos_root,
- self._logger, cache_dir)
+ self._logger, cache_dir, self.label.name)
if not result:
return None
@@ -358,12 +368,20 @@ class ResultsCache(object):
result.StoreToCacheDir(cache_dir, self.machine_manager)
-class MockResultsCache(object):
+class MockResultsCache(ResultsCache):
def Init(self, *args):
pass
def ReadResult(self):
- return Result("Results placed in /tmp/test", "", 0)
+ return None
def StoreResult(self, result):
pass
+
+
+class MockResult(Result):
+ def _PopulateFromRun(self, out, err, retval):
+ self.out = out
+ self.err = err
+ self.retval = retval
+
diff --git a/crosperf/results_organizer.py b/crosperf/results_organizer.py
index 0071387b..810186b2 100644
--- a/crosperf/results_organizer.py
+++ b/crosperf/results_organizer.py
@@ -1,6 +1,8 @@
#!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
+"""Parse data from benchmark_runs for tabulator."""
+import re
class ResultOrganizer(object):
@@ -18,18 +20,22 @@ class ResultOrganizer(object):
]}.
"""
- def __init__(self, benchmark_runs, labels):
+ def __init__(self, benchmark_runs, labels, benchmarks=None):
self.result = {}
self.labels = []
+ self.prog = re.compile(r"(\w+)\{(\d+)\}")
+ self.benchmarks = benchmarks
+ if not self.benchmarks:
+ self.benchmarks = []
for label in labels:
self.labels.append(label.name)
for benchmark_run in benchmark_runs:
- benchmark_name = benchmark_run.benchmark_name
+ benchmark_name = benchmark_run.benchmark.name
if benchmark_name not in self.result:
self.result[benchmark_name] = []
while len(self.result[benchmark_name]) < len(labels):
self.result[benchmark_name].append([])
- label_index = self.labels.index(benchmark_run.label_name)
+ label_index = self.labels.index(benchmark_run.label.name)
cur_table = self.result[benchmark_name][label_index]
index = benchmark_run.iteration - 1
while index >= len(cur_table):
@@ -40,3 +46,50 @@ class ResultOrganizer(object):
for autotest_key in benchmark_run.result.keyvals:
result_value = benchmark_run.result.keyvals[autotest_key]
cur_dict[autotest_key] = result_value
+ self._DuplicatePass()
+
+ def _DuplicatePass(self):
+ for bench, data in self.result.items():
+ max_dup = self._GetMaxDup(data)
+ if not max_dup:
+ continue
+ for label in data:
+ index = data.index(label)
+ data[index] = self._GetNonDupLabel(max_dup, label)
+ self._AdjustIteration(max_dup, bench)
+
+ def _GetMaxDup(self, data):
+ """Find the maximum i inside ABCD{i}."""
+ max_dup = 0
+ for label in data:
+ for run in label:
+ for key in run:
+ if re.match(self.prog, key):
+ max_dup = max(max_dup,
+ int(re.search(self.prog, key).group(2)))
+ return max_dup
+
+ def _GetNonDupLabel(self, max_dup, label):
+ """Create new list for the runs of the same label."""
+ new_label = []
+ for run in label:
+ start_index = len(new_label)
+ new_label.append(dict(run))
+ for i in range(max_dup):
+ new_label.append({})
+ new_run = new_label[start_index]
+ for key, value in new_run.items():
+ if re.match(self.prog, key):
+ new_key = re.search(self.prog, key).group(1)
+ index = int(re.search(self.prog, key).group(2))
+ new_label[start_index+index][new_key] = str(value)
+ del new_run[key]
+ return new_label
+
+ def _AdjustIteration(self, max_dup, bench):
+ """Adjust the interation numbers if the have keys like ABCD{i}."""
+ for benchmark in self.benchmarks:
+ if benchmark.name == bench:
+ if not benchmark.iteration_adjusted:
+ benchmark.iteration_adjusted = True
+ benchmark.iterations *= (max_dup +1)
diff --git a/crosperf/results_report.py b/crosperf/results_report.py
index b591370a..f7434132 100644
--- a/crosperf/results_report.py
+++ b/crosperf/results_report.py
@@ -2,11 +2,12 @@
# Copyright 2011 Google Inc. All Rights Reserved.
-import math
+from utils.tabulator import *
+
from column_chart import ColumnChart
-from results_sorter import ResultSorter
from results_organizer import ResultOrganizer
-from utils.tabulator import *
+from perf_table import PerfTable
+
class ResultsReport(object):
MAX_COLOR_CODE = 255
@@ -26,7 +27,7 @@ class ResultsReport(object):
labels[benchmark_run.label_name].append(benchmark_run)
return labels
- def GetFullTables(self):
+ def GetFullTables(self, perf=False):
columns = [Column(NonEmptyCountResult(),
Format(),
"Completed"),
@@ -41,23 +42,30 @@ class ResultsReport(object):
Column(StdResult(),
Format())
]
- return self._GetTables(self.labels, self.benchmark_runs, columns)
+ if not perf:
+ return self._GetTables(self.labels, self.benchmark_runs, columns)
+ return self. _GetPerfTables(self.labels, columns)
- def GetSummaryTables(self):
- columns = [Column(AmeanResult(),
+ def GetSummaryTables(self, perf=False):
+ columns = [Column(NonEmptyCountResult(),
+ Format(),
+ "Completed"),
+ Column(AmeanResult(),
Format()),
Column(StdResult(),
Format(), "StdDev"),
Column(CoeffVarResult(),
- CoeffVarFormat(), "Mean/StdDev"),
+ CoeffVarFormat(), "StdDev/Mean"),
Column(GmeanRatioResult(),
RatioFormat(), "GmeanSpeedup"),
Column(GmeanRatioResult(),
ColorBoxFormat(), " "),
- Column(StatsSignificant(),
- Format(), "p-value")
+ Column(PValueResult(),
+ PValueFormat(), "p-value")
]
- return self._GetTables(self.labels, self.benchmark_runs, columns)
+ if not perf:
+ return self._GetTables(self.labels, self.benchmark_runs, columns)
+ return self. _GetPerfTables(self.labels, columns)
def _ParseColumn(self, columns, iteration):
new_column = []
@@ -78,9 +86,17 @@ class ResultsReport(object):
return False
return True
+ def _GetTableHeader(self, benchmark):
+ benchmark_info = ("Benchmark: {0}; Iterations: {1}"
+ .format(benchmark.name, benchmark.iterations))
+ cell = Cell()
+ cell.string_value = benchmark_info
+ cell.header = True
+ return [[cell]]
+
def _GetTables(self, labels, benchmark_runs, columns):
tables = []
- ro = ResultOrganizer(benchmark_runs, labels)
+ ro = ResultOrganizer(benchmark_runs, labels, self.benchmarks)
result = ro.result
label_name = ro.labels
for item in result:
@@ -88,11 +104,7 @@ class ResultsReport(object):
for benchmark in self.benchmarks:
if benchmark.name == item:
break
- benchmark_info = ("Benchmark: {0}; Iterations: {1}"
- .format(benchmark.name, benchmark.iterations))
- cell = Cell()
- cell.string_value = benchmark_info
- ben_table = [[cell]]
+ ben_table = self._GetTableHeader(benchmark)
if self._AreAllRunsEmpty(runs):
cell = Cell()
@@ -109,8 +121,41 @@ class ResultsReport(object):
tables.append(cell_table)
return tables
+ def _GetPerfTables(self, labels, columns):
+ tables = []
+ label_names = [label.name for label in labels]
+ p_table = PerfTable(self.experiment, label_names)
+
+ if not p_table.perf_data:
+ return tables
+
+ for benchmark in p_table.perf_data:
+ ben = None
+ for ben in self.benchmarks:
+ if ben.name == benchmark:
+ break
+
+ ben_table = self._GetTableHeader(ben)
+ tables.append(ben_table)
+ benchmark_data = p_table.perf_data[benchmark]
+ table = []
+ for event in benchmark_data:
+ tg = TableGenerator(benchmark_data[event], label_names)
+ table = tg.GetTable()
+ parsed_columns = self._ParseColumn(columns, ben.iterations)
+ tf = TableFormatter(table, parsed_columns)
+ tf.GenerateCellTable()
+ tf.AddColumnName()
+ tf.AddLabelName()
+ tf.AddHeader(str(event))
+ table = tf.GetCellTable(headers=False)
+ tables.append(table)
+ return tables
+
def PrintTables(self, tables, out_to):
output = ""
+ if not tables:
+ return output
for table in tables:
if out_to == "HTML":
tp = TablePrinter(table, TablePrinter.HTML)
@@ -126,6 +171,8 @@ class ResultsReport(object):
pass
output += tp.Print()
return output
+
+
class TextResultsReport(ResultsReport):
TEXT = """
===========================================
@@ -137,10 +184,30 @@ Summary
-------------------------------------------
%s
+
+Number re-images: %s
+
+-------------------------------------------
+Benchmark Run Status
-------------------------------------------
+%s
+
+
+-------------------------------------------
+Perf Data
+-------------------------------------------
+%s
+
+
+
Experiment File
-------------------------------------------
%s
+
+
+CPUInfo
+-------------------------------------------
+%s
===========================================
"""
@@ -148,17 +215,46 @@ Experiment File
super(TextResultsReport, self).__init__(experiment)
self.email = email
+ def GetStatusTable(self):
+ """Generate the status table by the tabulator."""
+ table = [["", ""]]
+ columns = [Column(LiteralResult(iteration=0), Format(), "Status"),
+ Column(LiteralResult(iteration=1), Format(), "Failing Reason")]
+
+ for benchmark_run in self.benchmark_runs:
+ status = [benchmark_run.name, [benchmark_run.timeline.GetLastEvent(),
+ benchmark_run.failure_reason]]
+ table.append(status)
+ tf = TableFormatter(table, columns)
+ cell_table = tf.GetCellTable()
+ return [cell_table]
+
def GetReport(self):
+ """Generate the report for email and console."""
+ status_table = self.GetStatusTable()
summary_table = self.GetSummaryTables()
full_table = self.GetFullTables()
+ perf_table = self.GetSummaryTables(perf=True)
+ if not perf_table:
+ perf_table = None
if not self.email:
return self.TEXT % (self.experiment.name,
self.PrintTables(summary_table, "CONSOLE"),
- self.experiment.experiment_file)
+ self.experiment.machine_manager.num_reimages,
+ self.PrintTables(status_table, "CONSOLE"),
+ self.PrintTables(perf_table, "CONSOLE"),
+ self.experiment.experiment_file,
+ self.experiment.machine_manager.GetAllCPUInfo(
+ self.experiment.labels))
return self.TEXT % (self.experiment.name,
self.PrintTables(summary_table, "EMAIL"),
- self.experiment.experiment_file)
+ self.experiment.machine_manager.num_reimages,
+ self.PrintTables(status_table, "EMAIL"),
+ self.PrintTables(perf_table, "EMAIL"),
+ self.experiment.experiment_file,
+ self.experiment.machine_manager.GetAllCPUInfo(
+ self.experiment.labels))
class HTMLResultsReport(ResultsReport):
@@ -243,6 +339,7 @@ pre {
google.setOnLoadCallback(init);
function init() {
switchTab('summary', 'html');
+ %s
switchTab('full', 'html');
drawTable();
}
@@ -268,6 +365,7 @@ pre {
</div>
%s
</div>
+ %s
<div class='results-section'>
<div class='results-section-title'>Charts</div>
<div class='results-section-content'>%s</div>
@@ -291,6 +389,18 @@ pre {
</html>
"""
+ PERF_HTML = """
+ <div class='results-section'>
+ <div class='results-section-title'>Perf Table</div>
+ <div class='results-section-content'>
+ <div id='perf-html'>%s</div>
+ <div id='perf-text'><pre>%s</pre></div>
+ <div id='perf-tsv'><pre>%s</pre></div>
+ </div>
+ %s
+ </div>
+"""
+
def __init__(self, experiment):
super(HTMLResultsReport, self).__init__(experiment)
@@ -313,11 +423,26 @@ pre {
summary_table = self.GetSummaryTables()
full_table = self.GetFullTables()
- return self.HTML % (chart_javascript,
+ perf_table = self.GetSummaryTables(perf=True)
+ if perf_table:
+ perf_html = self.PERF_HTML % (
+ self.PrintTables(perf_table, "HTML"),
+ self.PrintTables(perf_table, "PLAIN"),
+ self.PrintTables(perf_table, "TSV"),
+ self._GetTabMenuHTML("perf")
+ )
+ perf_init = "switchTab('perf', 'html');"
+ else:
+ perf_html = ""
+ perf_init = ""
+
+ return self.HTML % (perf_init,
+ chart_javascript,
self.PrintTables(summary_table, "HTML"),
self.PrintTables(summary_table, "PLAIN"),
self.PrintTables(summary_table, "TSV"),
self._GetTabMenuHTML("summary"),
+ perf_html,
chart_divs,
self.PrintTables(full_table, "HTML"),
self.PrintTables(full_table, "PLAIN"),
diff --git a/crosperf/settings_factory.py b/crosperf/settings_factory.py
index 782f0dd3..11fa4b4b 100644
--- a/crosperf/settings_factory.py
+++ b/crosperf/settings_factory.py
@@ -1,9 +1,9 @@
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
+"""Setting files for global, benchmark and labels."""
from field import BooleanField
-from field import EnumField
from field import FloatField
from field import IntegerField
from field import ListField
@@ -46,6 +46,13 @@ class LabelSettings(Settings):
"chromeos_image."))
self.AddField(TextField("board", required=True, description="The target "
"board for running experiments on, e.g. x86-alex."))
+ self.AddField(ListField("remote", description=
+ "A comma-separated list of ip's of chromeos"
+ "devices to run experiments on."))
+ self.AddField(TextField("image_args", required=False,
+ default="",
+ description="Extra arguments to pass to "
+ "image_chromeos.py."))
class GlobalSettings(Settings):
@@ -56,20 +63,24 @@ class GlobalSettings(Settings):
"identifier."))
self.AddField(TextField("board", description="The target "
"board for running experiments on, e.g. x86-alex."))
- self.AddField(ListField("remote", required=True,
+ self.AddField(ListField("remote",
description="A comma-separated list of ip's of "
"chromeos devices to run experiments on."))
self.AddField(BooleanField("rerun_if_failed", description="Whether to "
"re-run failed autotest runs or not.",
default=False))
self.AddField(ListField("email", description="Space-seperated"
- "list of email addresses to send email to."))
+ "list of email addresses to send email to."))
self.AddField(BooleanField("rerun", description="Whether to ignore the "
"cache and for autotests to be re-run.",
default=False))
- self.AddField(BooleanField("exact_remote", default=True,
+ self.AddField(BooleanField("same_specs", default=True,
description="Ensure cached runs are run on the "
- "same device that is specified as a remote."))
+ "same kind of devices which are specified as a "
+ "remote."))
+ self.AddField(BooleanField("same_machine", default=False,
+ description="Ensure cached runs are run on the "
+ "exact the same remote"))
self.AddField(IntegerField("iterations", default=1,
description="Number of iterations to run all "
"autotests."))
diff --git a/crosperf/test_flag.py b/crosperf/test_flag.py
new file mode 100644
index 00000000..613138b2
--- /dev/null
+++ b/crosperf/test_flag.py
@@ -0,0 +1,16 @@
+#!/usr/bin/python
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+"""A global variable for testing."""
+
+
+_is_test = [False]
+
+
+def SetTestMode(flag):
+ _is_test[0] = flag
+
+
+def GetTestMode():
+ return _is_test[0]