aboutsummaryrefslogtreecommitdiff
path: root/crb
diff options
context:
space:
mode:
authorLuis Lozano <llozano@chromium.org>2013-03-15 14:44:13 -0700
committerChromeBot <chrome-bot@google.com>2013-03-15 15:51:37 -0700
commitf81680c018729fd4499e1e200d04b48c4b90127c (patch)
tree940608da8374604b82edfdb2d7df55d065f05d4c /crb
parent2296ee0b914aba5bba07becab4ff68884ce9b8a5 (diff)
downloadtoolchain-utils-f81680c018729fd4499e1e200d04b48c4b90127c.tar.gz
Cleaned up directory after copy of tools from perforce directory
Got rid of stale copies of some tools like "crosperf" and moved all files under v14 directory (that came from perforce) into the top directory. BUG=None TEST=None Change-Id: I408d17a36ceb00e74db71403d2351fd466a14f8e Reviewed-on: https://gerrit-int.chromium.org/33887 Tested-by: Luis Lozano <llozano@chromium.org> Reviewed-by: Yunlian Jiang <yunlian@google.com> Commit-Queue: Luis Lozano <llozano@chromium.org>
Diffstat (limited to 'crb')
-rw-r--r--crb/autotest_gatherer.py63
-rw-r--r--crb/autotest_run.py312
-rwxr-xr-xcrb/crb_driver.py353
-rw-r--r--crb/machine_manager_singleton.py153
-rw-r--r--crb/table_formatter.py253
5 files changed, 1134 insertions, 0 deletions
diff --git a/crb/autotest_gatherer.py b/crb/autotest_gatherer.py
new file mode 100644
index 00000000..da39040d
--- /dev/null
+++ b/crb/autotest_gatherer.py
@@ -0,0 +1,63 @@
+from table_formatter import TableFormatter as TableFormatter
+
+class AutotestGatherer(TableFormatter):
+ def __init__(self):
+ self.runs = []
+ TableFormatter.__init__(self)
+
+ def GetFormattedMainTable(self, percents_only, fit_string):
+ ret = ""
+ table = self.GetTableValues()
+ ret += self.GetTableLabels(table)
+ ret += self.GetFormattedTable(table, percents_only=percents_only,
+ fit_string=fit_string)
+ return ret
+
+ def GetFormattedSummaryTable(self, percents_only, fit_string):
+ ret = ""
+ table = self.GetTableValues()
+ summary_table = self.GetSummaryTableValues(table)
+ ret += self.GetTableLabels(summary_table)
+ ret += self.GetFormattedTable(summary_table, percents_only=percents_only,
+ fit_string=fit_string)
+ return ret
+
+ def GetBenchmarksString(self):
+ ret = "Benchmarks (in order):"
+ ret = "\n".join(self.GetAllBenchmarks())
+ return ret
+
+ def GetAllBenchmarks(self):
+ all_benchmarks = []
+ for run in self.runs:
+ for key in run.results.keys():
+ if key not in all_benchmarks:
+ all_benchmarks.append(key)
+ all_benchmarks.sort()
+ return all_benchmarks
+
+ def GetTableValues(self):
+ table = []
+ row = []
+
+ row.append("Benchmark")
+ for i in range(len(self.runs)):
+ run = self.runs[i]
+ label = run.GetLabel()
+ label = self.GetLabelWithIteration(label, run.iteration)
+ row.append(label)
+ table.append(row)
+
+ all_benchmarks = self.GetAllBenchmarks()
+ for benchmark in all_benchmarks:
+ row = []
+ row.append(benchmark)
+ for run in self.runs:
+ results = run.results
+ if benchmark in results:
+ row.append(results[benchmark])
+ else:
+ row.append("")
+ table.append(row)
+
+ return table
diff --git a/crb/autotest_run.py b/crb/autotest_run.py
new file mode 100644
index 00000000..d6dc70f5
--- /dev/null
+++ b/crb/autotest_run.py
@@ -0,0 +1,312 @@
+import datetime
+import getpass
+import glob
+import os
+import pickle
+import re
+import threading
+import time
+import image_chromeos
+import machine_manager_singleton
+import table_formatter
+from utils import command_executer
+from utils import logger
+
+
+SCRATCH_DIR = "/home/%s/cros_scratch" % getpass.getuser()
+PICKLE_FILE = "pickle.txt"
+VERSION = "1"
+
+
+def ConvertToFilename(text):
+ ret = text
+ ret = re.sub("/", "__", ret)
+ ret = re.sub(" ", "_", ret)
+ ret = re.sub("=", "", ret)
+ ret = re.sub("\"", "", ret)
+ return ret
+
+
+class AutotestRun(threading.Thread):
+ def __init__(self, autotest, chromeos_root="", chromeos_image="",
+ board="", remote="", iteration=0, image_checksum="",
+ exact_remote=False, rerun=False, rerun_if_failed=False):
+ self.autotest = autotest
+ self.chromeos_root = chromeos_root
+ self.chromeos_image = chromeos_image
+ self.board = board
+ self.remote = remote
+ self.iteration = iteration
+ l = logger.GetLogger()
+ l.LogFatalIf(not image_checksum, "Checksum shouldn't be None")
+ self.image_checksum = image_checksum
+ self.results = {}
+ threading.Thread.__init__(self)
+ self.terminate = False
+ self.retval = None
+ self.status = "PENDING"
+ self.run_completed = False
+ self.exact_remote = exact_remote
+ self.rerun = rerun
+ self.rerun_if_failed = rerun_if_failed
+ self.results_dir = None
+ self.full_name = None
+
+ @staticmethod
+ def MeanExcludingSlowest(array):
+ mean = sum(array) / len(array)
+ array2 = []
+
+ for v in array:
+ if mean != 0 and abs(v - mean)/mean < 0.2:
+ array2.append(v)
+
+ if array2:
+ return sum(array2) / len(array2)
+ else:
+ return mean
+
+ @staticmethod
+ def AddComposite(results_dict):
+ composite_keys = []
+ composite_dict = {}
+ for key in results_dict:
+ mo = re.match("(.*){\d+}", key)
+ if mo:
+ composite_keys.append(mo.group(1))
+ for key in results_dict:
+ for composite_key in composite_keys:
+ if (key.count(composite_key) != 0 and
+ table_formatter.IsFloat(results_dict[key])):
+ if composite_key not in composite_dict:
+ composite_dict[composite_key] = []
+ composite_dict[composite_key].append(float(results_dict[key]))
+ break
+
+ for composite_key in composite_dict:
+ v = composite_dict[composite_key]
+ results_dict["%s[c]" % composite_key] = sum(v) / len(v)
+ mean_excluding_slowest = AutotestRun.MeanExcludingSlowest(v)
+ results_dict["%s[ce]" % composite_key] = mean_excluding_slowest
+
+ return results_dict
+
+ def ParseOutput(self):
+ p = re.compile("^-+.*?^-+", re.DOTALL|re.MULTILINE)
+ matches = p.findall(self.out)
+ for i in range(len(matches)):
+ results = matches[i]
+ results_dict = {}
+ for line in results.splitlines()[1:-1]:
+ mo = re.match("(.*\S)\s+\[\s+(PASSED|FAILED)\s+\]", line)
+ if mo:
+ results_dict[mo.group(1)] = mo.group(2)
+ continue
+ mo = re.match("(.*\S)\s+(.*)", line)
+ if mo:
+ results_dict[mo.group(1)] = mo.group(2)
+
+ # Add a composite keyval for tests like startup.
+ results_dict = AutotestRun.AddComposite(results_dict)
+
+ self.results = results_dict
+
+ # This causes it to not parse the table again
+ # Autotest recently added a secondary table
+ # That reports errors and screws up the final pretty output.
+ break
+ mo = re.search("Results placed in (\S+)", self.out)
+ if mo:
+ self.results_dir = mo.group(1)
+ self.full_name = os.path.basename(self.results_dir)
+
+ def GetCacheHashBase(self):
+ ret = ("%s %s %s" %
+ (self.image_checksum, self.autotest.name, self.iteration))
+ if self.autotest.args:
+ ret += " %s" % self.autotest.args
+ ret += "-%s" % VERSION
+ return ret
+
+ def GetLabel(self):
+ ret = "%s %s remote:%s" % (self.chromeos_image, self.autotest.name,
+ self.remote)
+ return ret
+
+ def TryToLoadFromCache(self):
+ base = self.GetCacheHashBase()
+ if self.exact_remote:
+ if not self.remote:
+ return False
+ cache_dir_glob = "%s_%s" % (ConvertToFilename(base), self.remote)
+ else:
+ cache_dir_glob = "%s*" % ConvertToFilename(base)
+ cache_path_glob = os.path.join(SCRATCH_DIR, cache_dir_glob)
+ matching_dirs = glob.glob(cache_path_glob)
+ if matching_dirs:
+ matching_dir = matching_dirs[0]
+ cache_file = os.path.join(matching_dir, PICKLE_FILE)
+ assert os.path.isfile(cache_file)
+ self._logger.LogOutput("Trying to read from cache file: %s" % cache_file)
+ return self.ReadFromCache(cache_file)
+ self._logger.LogOutput("Cache miss. AM going to run: %s for: %s" %
+ (self.autotest.name, self.chromeos_image))
+ return False
+
+ def ReadFromCache(self, cache_file):
+ with open(cache_file, "rb") as f:
+ self.retval = pickle.load(f)
+ self.out = pickle.load(f)
+ self.err = pickle.load(f)
+ self._logger.LogOutput(self.out)
+ return True
+ return False
+
+ def StoreToCache(self):
+ base = self.GetCacheHashBase()
+ self.cache_dir = os.path.join(SCRATCH_DIR, "%s_%s" % (
+ ConvertToFilename(base),
+ self.remote))
+ cache_file = os.path.join(self.cache_dir, PICKLE_FILE)
+ command = "mkdir -p %s" % os.path.dirname(cache_file)
+ ret = self._ce.RunCommand(command)
+ assert ret == 0, "Couldn't create cache dir"
+ with open(cache_file, "wb") as f:
+ pickle.dump(self.retval, f)
+ pickle.dump(self.out, f)
+ pickle.dump(self.err, f)
+
+ def run(self):
+ self._logger = logger.Logger(
+ os.path.dirname(__file__),
+ "%s.%s" % (os.path.basename(__file__),
+ self.name), True)
+ self._ce = command_executer.GetCommandExecuter(self._logger)
+ self.RunCached()
+
+ def RunCached(self):
+ self.status = "WAITING"
+ cache_hit = False
+ if not self.rerun:
+ cache_hit = self.TryToLoadFromCache()
+ else:
+ self._logger.LogOutput("--rerun passed. Not using cached results.")
+ if self.rerun_if_failed and self.retval:
+ self._logger.LogOutput("--rerun_if_failed passed and existing test "
+ "failed. Rerunning...")
+ cache_hit = False
+ if not cache_hit:
+ # Get machine
+ while True:
+ if self.terminate:
+ return 1
+ self.machine = (
+ machine_manager_singleton.MachineManagerSingleton().AcquireMachine(self.image_checksum))
+ if self.machine:
+ self._logger.LogOutput("%s: Machine %s acquired at %s" %
+ (self.name,
+ self.machine.name,
+ datetime.datetime.now()))
+ break
+ else:
+ sleep_duration = 10
+ time.sleep(sleep_duration)
+ try:
+ self.remote = self.machine.name
+
+ if self.machine.checksum != self.image_checksum:
+ self.retval = self.ImageTo(self.machine.name)
+ if self.retval: return self.retval
+ self.machine.checksum = self.image_checksum
+ self.machine.image = self.chromeos_image
+ self.status = "RUNNING: %s" % self.autotest.name
+ [self.retval, self.out, self.err] = self.RunTestOn(self.machine.name)
+ self.run_completed = True
+
+ finally:
+ self._logger.LogOutput("Releasing machine: %s" % self.machine.name)
+ machine_manager_singleton.MachineManagerSingleton().ReleaseMachine(self.machine)
+ self._logger.LogOutput("Released machine: %s" % self.machine.name)
+
+ self.StoreToCache()
+
+ if not self.retval:
+ self.status = "SUCCEEDED"
+ else:
+ self.status = "FAILED"
+
+ self.ParseOutput()
+ # Copy results directory to the scratch dir
+ if (not cache_hit and not self.retval and self.autotest.args and
+ "--profile" in self.autotest.args):
+ results_dir = os.path.join(self.chromeos_root, "chroot",
+ self.results_dir.lstrip("/"))
+ tarball = os.path.join(
+ self.cache_dir,
+ os.path.basename(os.path.dirname(self.results_dir)))
+ command = ("cd %s && tar cjf %s.tbz2 ." % (results_dir, tarball))
+ self._ce.RunCommand(command)
+ perf_data_file = os.path.join(self.results_dir, self.full_name,
+ "profiling/iteration.1/perf.data")
+
+ # Attempt to build a perf report and keep it with the results.
+ command = ("cd %s/src/scripts &&"
+ " cros_sdk -- /usr/sbin/perf report --symfs=/build/%s"
+ " -i %s --stdio" % (self.chromeos_root, self.board,
+ perf_data_file))
+ ret, out, err = self._ce.RunCommand(command, return_output=True)
+ with open(os.path.join(self.cache_dir, "perf.report"), "wb") as f:
+ f.write(out)
+ return self.retval
+
+ def ImageTo(self, machine_name):
+ image_args = [image_chromeos.__file__,
+ "--chromeos_root=%s" % self.chromeos_root,
+ "--image=%s" % self.chromeos_image,
+ "--remote=%s" % machine_name]
+ if self.board:
+ image_args.append("--board=%s" % self.board)
+
+### devserver_port = 8080
+### mo = re.search("\d+", self.name)
+### if mo:
+### to_add = int(mo.group(0))
+### assert to_add < 100, "Too many threads launched!"
+### devserver_port += to_add
+
+### # I tried --noupdate_stateful, but that still fails when run in parallel.
+### image_args.append("--image_to_live_args=\"--devserver_port=%s"
+### " --noupdate_stateful\"" % devserver_port)
+### image_args.append("--image_to_live_args=--devserver_port=%s" %
+### devserver_port)
+
+ # Currently can't image two machines at once.
+ # So have to serialized on this lock.
+ self.status = "WAITING ON IMAGE_LOCK"
+ with machine_manager_singleton.MachineManagerSingleton().image_lock:
+ self.status = "IMAGING"
+ retval = self._ce.RunCommand(" ".join(["python"] + image_args))
+ machine_manager_singleton.MachineManagerSingleton().num_reimages += 1
+ if retval:
+ self.status = "ABORTED DUE TO IMAGE FAILURE"
+ return retval
+
+ def DoPowerdHack(self):
+ command = "sudo initctl stop powerd"
+ self._ce.CrosRunCommand(command, machine=self.machine.name,
+ chromeos_root=self.chromeos_root)
+
+ def RunTestOn(self, machine_name):
+ command = "cd %s/src/scripts" % self.chromeos_root
+ options = ""
+ if self.board:
+ options += " --board=%s" % self.board
+ if self.autotest.args:
+ options += " %s" % self.autotest.args
+ if "tegra2" in self.board:
+ self.DoPowerdHack()
+ command += ("&& cros_sdk -- ./run_remote_tests.sh --remote=%s %s %s" %
+ (machine_name,
+ options,
+ self.autotest.name))
+ return self._ce.RunCommand(command, True)
diff --git a/crb/crb_driver.py b/crb/crb_driver.py
new file mode 100755
index 00000000..6cf7af35
--- /dev/null
+++ b/crb/crb_driver.py
@@ -0,0 +1,353 @@
+#!/usr/bin/python2.6
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+import datetime
+import optparse
+import os
+import smtplib
+import sys
+import time
+from email.mime.text import MIMEText
+
+from autotest_gatherer import AutotestGatherer as AutotestGatherer
+from autotest_run import AutotestRun as AutotestRun
+from machine_manager_singleton import MachineManagerSingleton as MachineManagerSingleton
+from utils import logger
+from utils.file_utils import FileUtils
+
+
+def CanonicalizeChromeOSRoot(chromeos_root):
+ chromeos_root = os.path.expanduser(chromeos_root)
+ if os.path.isfile(os.path.join(chromeos_root,
+ "src/scripts/enter_chroot.sh")):
+ return chromeos_root
+ else:
+ return None
+
+
+class Autotest(object):
+ def __init__(self, autotest_string):
+ self.name = None
+ self.iterations = None
+ self.args = None
+ fields = autotest_string.split(",", 1)
+ self.name = fields[0]
+ if len(fields) > 1:
+ autotest_string = fields[1]
+ fields = autotest_string.split(",", 1)
+ else: return
+ self.iterations = int(fields[0])
+ if len(fields) > 1:
+ self.args = fields[1]
+ else: return
+
+ def __str__(self):
+ return "\n".join([self.name, self.iterations, self.args])
+
+
+def CreateAutotestListFromString(autotest_strings, default_iterations=None):
+ autotest_list = []
+ for autotest_string in autotest_strings.split(":"):
+ autotest = Autotest(autotest_string)
+ if default_iterations and not autotest.iterations:
+ autotest.iterations = default_iterations
+
+ autotest_list.append(autotest)
+ return autotest_list
+
+
+def CreateAutotestRuns(images, autotests, remote, board, exact_remote,
+ rerun, rerun_if_failed, main_chromeos_root=None):
+ autotest_runs = []
+ for image in images:
+ logger.GetLogger().LogOutput("Computing md5sum of: %s" % image)
+ image_checksum = FileUtils().Md5File(image)
+ logger.GetLogger().LogOutput("md5sum %s: %s" % (image, image_checksum))
+### image_checksum = "abcdefghi"
+
+ chromeos_root = main_chromeos_root
+ if not main_chromeos_root:
+ image_chromeos_root = os.path.join(os.path.dirname(image),
+ "../../../../..")
+ chromeos_root = CanonicalizeChromeOSRoot(image_chromeos_root)
+ assert chromeos_root, "chromeos_root: %s invalid" % image_chromeos_root
+ else:
+ chromeos_root = CanonicalizeChromeOSRoot(main_chromeos_root)
+ assert chromeos_root, "chromeos_root: %s invalid" % main_chromeos_root
+
+ # We just need a single ChromeOS root in the MachineManagerSingleton. It is
+ # needed because we can save re-image time by checking the image checksum at
+ # the beginning and assigning autotests to machines appropriately.
+ if not MachineManagerSingleton().chromeos_root:
+ MachineManagerSingleton().chromeos_root = chromeos_root
+
+ for autotest in autotests:
+ for iteration in range(autotest.iterations):
+ autotest_run = AutotestRun(autotest,
+ chromeos_root=chromeos_root,
+ chromeos_image=image,
+ board=board,
+ remote=remote,
+ iteration=iteration,
+ image_checksum=image_checksum,
+ exact_remote=exact_remote,
+ rerun=rerun,
+ rerun_if_failed=rerun_if_failed)
+ autotest_runs.append(autotest_run)
+ return autotest_runs
+
+
+def GetNamesAndIterations(autotest_runs):
+ strings = []
+ for autotest_run in autotest_runs:
+ strings.append("%s:%s" % (autotest_run.autotest.name,
+ autotest_run.iteration))
+ return " %s (%s)" % (len(strings), " ".join(strings))
+
+
+def GetStatusString(autotest_runs):
+ status_bins = {}
+ for autotest_run in autotest_runs:
+ if autotest_run.status not in status_bins:
+ status_bins[autotest_run.status] = []
+ status_bins[autotest_run.status].append(autotest_run)
+
+ status_strings = []
+ for key, val in status_bins.items():
+ status_strings.append("%s: %s" % (key, GetNamesAndIterations(val)))
+ return "Thread Status:\n%s" % "\n".join(status_strings)
+
+
+def GetProgressBar(num_done, num_total):
+ ret = "Done: %s%%" % int(100.0 * num_done / num_total)
+ bar_length = 50
+ done_char = ">"
+ undone_char = " "
+ num_done_chars = bar_length * num_done / num_total
+ num_undone_chars = bar_length - num_done_chars
+ ret += " [%s%s]" % (num_done_chars * done_char, num_undone_chars *
+ undone_char)
+ return ret
+
+
+def GetProgressString(start_time, num_remain, num_total):
+ current_time = time.time()
+ elapsed_time = current_time - start_time
+ try:
+ eta_seconds = float(num_remain) * elapsed_time / (num_total - num_remain)
+ eta_seconds = int(eta_seconds)
+ eta = datetime.timedelta(seconds=eta_seconds)
+ except ZeroDivisionError:
+ eta = "Unknown"
+ strings = []
+ strings.append("Current time: %s Elapsed: %s ETA: %s" %
+ (datetime.datetime.now(),
+ datetime.timedelta(seconds=int(elapsed_time)),
+ eta))
+ strings.append(GetProgressBar(num_total - num_remain, num_total))
+ return "\n".join(strings)
+
+
+def RunAutotestRunsInParallel(autotest_runs):
+ start_time = time.time()
+ active_threads = []
+ for autotest_run in autotest_runs:
+ # Set threads to daemon so program exits when ctrl-c is pressed.
+ autotest_run.daemon = True
+ autotest_run.start()
+ active_threads.append(autotest_run)
+
+ print_interval = 30
+ last_printed_time = time.time()
+ while active_threads:
+ try:
+ active_threads = [t for t in active_threads if t is not None
+ and t.isAlive()]
+ for t in active_threads:
+ t.join(1)
+ if time.time() - last_printed_time > print_interval:
+ border = "=============================="
+ logger.GetLogger().LogOutput(border)
+ logger.GetLogger().LogOutput(GetProgressString(
+ start_time,
+ len([t for t in autotest_runs if t.status not in ["SUCCEEDED",
+ "FAILED"]]),
+ len(autotest_runs)))
+ logger.GetLogger().LogOutput(GetStatusString(autotest_runs))
+ logger.GetLogger().LogOutput("%s\n" %
+ MachineManagerSingleton().AsString())
+ logger.GetLogger().LogOutput(border)
+ last_printed_time = time.time()
+ except KeyboardInterrupt:
+ print "C-c received... cleaning up threads."
+ for t in active_threads:
+ t.terminate = True
+ return 1
+ return 0
+
+
+def RunAutotestRunsSerially(autotest_runs):
+ for autotest_run in autotest_runs:
+ retval = autotest_run.Run()
+ if retval: return retval
+
+
+def ProduceTables(autotest_runs, full_table, fit_string):
+ l = logger.GetLogger()
+ ags_dict = {}
+ for autotest_run in autotest_runs:
+ name = autotest_run.full_name
+ if name not in ags_dict:
+ ags_dict[name] = AutotestGatherer()
+ ags_dict[name].runs.append(autotest_run)
+ output = ""
+ for b, ag in ags_dict.items():
+ output += "Benchmark: %s\n" % b
+ output += ag.GetFormattedMainTable(percents_only=not full_table,
+ fit_string=fit_string)
+ output += "\n"
+
+ summary = ""
+ for b, ag in ags_dict.items():
+ summary += "Benchmark Summary Table: %s\n" % b
+ summary += ag.GetFormattedSummaryTable(percents_only=not full_table,
+ fit_string=fit_string)
+ summary += "\n"
+
+ output += summary
+ output += ("Number of re-images performed: %s" %
+ MachineManagerSingleton().num_reimages)
+ l.LogOutput(output)
+
+ if autotest_runs:
+ board = autotest_runs[0].board
+ else:
+ board = ""
+
+ subject = "%s: %s" % (board, ", ".join(ags_dict.keys()))
+
+ if any(autotest_run.run_completed for autotest_run in autotest_runs):
+ SendEmailToUser(subject, summary)
+
+
+def SendEmailToUser(subject, text_to_send):
+ # Email summary to the current user.
+ msg = MIMEText(text_to_send)
+
+ # me == the sender's email address
+ # you == the recipient's email address
+ me = os.path.basename(__file__)
+ you = os.getlogin()
+ msg["Subject"] = "[%s] %s" % (os.path.basename(__file__), subject)
+ msg["From"] = me
+ msg["To"] = you
+
+ # Send the message via our own SMTP server, but don't include the
+ # envelope header.
+ s = smtplib.SMTP("localhost")
+ s.sendmail(me, [you], msg.as_string())
+ s.quit()
+
+
+def Main(argv):
+ """The main function."""
+ # Common initializations
+### command_executer.InitCommandExecuter(True)
+ l = logger.GetLogger()
+
+ parser = optparse.OptionParser()
+ parser.add_option("-t", "--tests", dest="tests",
+ help=("Tests to compare."
+ "Optionally specify per-test iterations by:"
+ "<test>,<iter>:<args>"))
+ parser.add_option("-c", "--chromeos_root", dest="chromeos_root",
+ help="A *single* chromeos_root where scripts can be found.")
+ parser.add_option("-n", "--iterations", dest="iterations",
+ help="Iterations to run per benchmark.",
+ default=1)
+ parser.add_option("-r", "--remote", dest="remote",
+ help="The remote chromeos machine.")
+ parser.add_option("-b", "--board", dest="board",
+ help="The remote board.")
+ parser.add_option("--full_table", dest="full_table",
+ help="Print full tables.",
+ action="store_true",
+ default=True)
+ parser.add_option("--exact_remote",
+ dest="exact_remote",
+ help="Run tests on the exact remote.",
+ action="store_true",
+ default=False)
+ parser.add_option("--fit_string", dest="fit_string",
+ help="Fit strings to fixed sizes.",
+ action="store_true",
+ default=False)
+ parser.add_option("--rerun",
+ dest="rerun",
+ help="Re-run regardless of cache hit.",
+ action="store_true",
+ default=False)
+ parser.add_option("--rerun_if_failed",
+ dest="rerun_if_failed",
+ help="Re-run if previous run was a failure.",
+ action="store_true",
+ default=False)
+ parser.add_option("--no_lock",
+ dest="no_lock",
+ help="Do not lock the machine before running the tests.",
+ action="store_true",
+ default=False)
+ l.LogOutput(" ".join(argv))
+ [options, args] = parser.parse_args(argv)
+
+ if options.remote is None:
+ l.LogError("No remote machine specified.")
+ parser.print_help()
+ return 1
+
+ if not options.board:
+ l.LogError("No board specified.")
+ parser.print_help()
+ return 1
+
+ remote = options.remote
+ tests = options.tests
+ board = options.board
+ exact_remote = options.exact_remote
+ iterations = int(options.iterations)
+
+ autotests = CreateAutotestListFromString(tests, iterations)
+
+ main_chromeos_root = options.chromeos_root
+ images = args[1:]
+ fit_string = options.fit_string
+ full_table = options.full_table
+ rerun = options.rerun
+ rerun_if_failed = options.rerun_if_failed
+
+ MachineManagerSingleton().no_lock = options.no_lock
+
+ # Now try creating all the Autotests
+ autotest_runs = CreateAutotestRuns(images, autotests, remote, board,
+ exact_remote, rerun, rerun_if_failed,
+ main_chromeos_root)
+
+ try:
+ # At this point we have all the autotest runs.
+ for machine in remote.split(","):
+ MachineManagerSingleton().AddMachine(machine)
+
+ retval = RunAutotestRunsInParallel(autotest_runs)
+ if retval: return retval
+
+ # Now print tables
+ ProduceTables(autotest_runs, full_table, fit_string)
+ finally:
+ # not sure why this isn't called at the end normally...
+ MachineManagerSingleton().__del__()
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(Main(sys.argv))
diff --git a/crb/machine_manager_singleton.py b/crb/machine_manager_singleton.py
new file mode 100644
index 00000000..93629c4c
--- /dev/null
+++ b/crb/machine_manager_singleton.py
@@ -0,0 +1,153 @@
+import image_chromeos
+import lock_machine
+import sys
+import threading
+import time
+from utils import command_executer
+from utils import logger
+
+
+class CrosMachine(object):
+ def __init__(self, name):
+ self.name = name
+ self.image = None
+ self.checksum = None
+ self.locked = False
+ self.released_time = time.time()
+ self.autotest_run = None
+
+ def __str__(self):
+ l = []
+ l.append(self.name)
+ l.append(str(self.image))
+ l.append(str(self.checksum))
+ l.append(str(self.locked))
+ l.append(str(self.released_time))
+ return ", ".join(l)
+
+
+class MachineManagerSingleton(object):
+ _instance = None
+ _lock = threading.RLock()
+ _all_machines = []
+ _machines = []
+ image_lock = threading.Lock()
+ num_reimages = 0
+ chromeos_root = None
+ no_lock = False
+
+ def __new__(cls, *args, **kwargs):
+ with cls._lock:
+ if not cls._instance:
+ cls._instance = super(MachineManagerSingleton, cls).__new__(
+ cls, *args, **kwargs)
+ return cls._instance
+
+ def TryToLockMachine(self, cros_machine):
+ with self._lock:
+ assert cros_machine, "Machine can't be None"
+ for m in self._machines:
+ assert m.name != cros_machine.name, (
+ "Tried to double-lock %s" % cros_machine.name)
+ if self.no_lock:
+ locked = True
+ else:
+ locked = lock_machine.Machine(cros_machine.name).Lock(True, sys.argv[0])
+ if locked:
+ ce = command_executer.GetCommandExecuter()
+ command = "cat %s" % image_chromeos.checksum_file
+ ret, out, err = ce.CrosRunCommand(
+ command, return_output=True, chromeos_root=self.chromeos_root,
+ machine=cros_machine.name)
+ if ret == 0:
+ cros_machine.checksum = out.strip()
+ self._machines.append(cros_machine)
+ else:
+ logger.GetLogger().LogOutput("Warning: Couldn't lock: %s" %
+ cros_machine.name)
+
+ # This is called from single threaded mode.
+ def AddMachine(self, machine_name):
+ with self._lock:
+ for m in self._all_machines:
+ assert m.name != machine_name, "Tried to double-add %s" % machine_name
+ self._all_machines.append(CrosMachine(machine_name))
+
+ def AcquireMachine(self, image_checksum):
+ with self._lock:
+ # Lazily external lock machines
+ if not self._machines:
+ for m in self._all_machines:
+ self.TryToLockMachine(m)
+ assert self._machines, (
+ "Could not lock any machine in %s" % self._all_machines)
+
+### for m in self._machines:
+### if (m.locked and time.time() - m.released_time < 10 and
+### m.checksum == image_checksum):
+### return None
+ for m in [machine for machine in self._machines if not machine.locked]:
+ if m.checksum == image_checksum:
+ m.locked = True
+ m.autotest_run = threading.current_thread()
+ return m
+ for m in [machine for machine in self._machines if not machine.locked]:
+ if not m.checksum:
+ m.locked = True
+ m.autotest_run = threading.current_thread()
+ return m
+ for m in [machine for machine in self._machines if not machine.locked]:
+ if time.time() - m.released_time > 20:
+ m.locked = True
+ m.autotest_run = threading.current_thread()
+ return m
+ return None
+
+ def ReleaseMachine(self, machine):
+ with self._lock:
+ for m in self._machines:
+ if machine.name == m.name:
+ assert m.locked == True, "Tried to double-release %s" % m.name
+ m.released_time = time.time()
+ m.locked = False
+ m.status = "Available"
+ break
+
+ def __del__(self):
+ with self._lock:
+ # Unlock all machines.
+ for m in self._machines:
+ if not self.no_lock:
+ assert lock_machine.Machine(m.name).Unlock(True) == True, (
+ "Couldn't unlock machine: %s" % m.name)
+
+ def __str__(self):
+ with self._lock:
+ l = ["MachineManager Status:"]
+ for m in self._machines:
+ l.append(str(m))
+ return "\n".join(l)
+
+ def AsString(self):
+ with self._lock:
+ stringify_fmt = "%-30s %-10s %-4s %-25s %-32s"
+ header = stringify_fmt % ("Machine", "Thread", "Lock", "Status", "Checksum")
+ table = [header]
+ for m in self._machines:
+ if m.autotest_run:
+ autotest_name = m.autotest_run.name
+ autotest_status = m.autotest_run.status
+ else:
+ autotest_name = ""
+ autotest_status = ""
+
+ try:
+ machine_string = stringify_fmt % (m.name,
+ autotest_name,
+ m.locked,
+ autotest_status,
+ m.checksum)
+ except:
+ machine_string = ""
+ table.append(machine_string)
+ return "Machine Status:\n%s" % "\n".join(table)
diff --git a/crb/table_formatter.py b/crb/table_formatter.py
new file mode 100644
index 00000000..b3b82f09
--- /dev/null
+++ b/crb/table_formatter.py
@@ -0,0 +1,253 @@
+import numpy
+import re
+
+def IsFloat(text):
+ if text is None:
+ return False
+ try:
+ float(text)
+ return True
+ except ValueError:
+ return False
+
+
+def RemoveTrailingZeros(x):
+ ret = x
+ ret = re.sub("\.0*$", "", ret)
+ ret = re.sub("(\.[1-9]*)0+$", "\\1", ret)
+ return ret
+
+
+def HumanizeFloat(x, n=2):
+ if not IsFloat(x):
+ return x
+ digits = re.findall("[0-9.]", str(x))
+ decimal_found = False
+ ret = ""
+ sig_figs = 0
+ for digit in digits:
+ if digit == ".":
+ decimal_found = True
+ elif sig_figs != 0 or digit != "0":
+ sig_figs += 1
+ if decimal_found and sig_figs >= n:
+ break
+ ret += digit
+ return ret
+
+
+def GetNSigFigs(x, n=2):
+ if not IsFloat(x):
+ return x
+ my_fmt = "%." + str(n-1) + "e"
+ x_string = my_fmt % x
+ f = float(x_string)
+ return f
+
+
+def GetFormattedPercent(baseline, other, bad_result="--"):
+ result = "%8s" % GetPercent(baseline, other, bad_result)
+ return result
+
+
+def GetPercent(baseline, other, bad_result="--"):
+ result = bad_result
+ if IsFloat(baseline) and IsFloat(other):
+ try:
+ pct = (float(other)/float(baseline) - 1) * 100
+ result = "%+1.1f" % pct
+ except ZeroDivisionError:
+ pass
+ return result
+
+
+def FitString(text, length):
+ if len(text) == length:
+ return text
+ elif len(text) > length:
+ return text[-length:]
+ else:
+ fmt = "%%%ds" % length
+ return fmt % text
+
+
+class TableFormatter(object):
+ def __init__(self):
+ self.d = "\t"
+ self.bad_result = "x"
+
+ def GetTablePercents(self, table):
+ # Assumes table is not transposed.
+ pct_table = []
+
+ pct_table.append(table[0])
+ for i in range(1, len(table)):
+ row = []
+ row.append(table[i][0])
+ for j in range (1, len(table[0])):
+ c = table[i][j]
+ b = table[i][1]
+ p = GetPercent(b, c, self.bad_result)
+ row.append(p)
+ pct_table.append(row)
+ return pct_table
+
+ def FormatFloat(self, c, max_length=8):
+ if not IsFloat(c):
+ return c
+ f = float(c)
+ ret = HumanizeFloat(f, 4)
+ ret = RemoveTrailingZeros(ret)
+ if len(ret) > max_length:
+ ret = "%1.1ef" % f
+ return ret
+
+ def TransposeTable(self, table):
+ transposed_table = []
+ for i in range(len(table[0])):
+ row = []
+ for j in range(len(table)):
+ row.append(table[j][i])
+ transposed_table.append(row)
+ return transposed_table
+
+ def GetTableLabels(self, table):
+ ret = ""
+ header = table[0]
+ for i in range(1, len(header)):
+ ret += "%d: %s\n" % (i, header[i])
+ return ret
+
+ def GetFormattedTable(self, table, transposed=False,
+ first_column_width=30, column_width=14,
+ percents_only=True,
+ fit_string=True):
+ o = ""
+ pct_table = self.GetTablePercents(table)
+ if transposed == True:
+ table = self.TransposeTable(table)
+ pct_table = self.TransposeTable(table)
+
+ for i in range(0, len(table)):
+ for j in range(len(table[0])):
+ if j == 0:
+ width = first_column_width
+ else:
+ width = column_width
+
+ c = table[i][j]
+ p = pct_table[i][j]
+
+ # Replace labels with numbers: 0... n
+ if IsFloat(c):
+ c = self.FormatFloat(c)
+
+ if IsFloat(p) and not percents_only:
+ p = "%s%%" % p
+
+ # Print percent values side by side.
+ if j != 0:
+ if percents_only:
+ c = "%s" % p
+ else:
+ c = "%s (%s)" % (c, p)
+
+ if i == 0 and j != 0:
+ c = str(j)
+
+ if fit_string:
+ o += FitString(c, width) + self.d
+ else:
+ o += c + self.d
+ o += "\n"
+ return o
+
+ def GetGroups(self, table):
+ labels = table[0]
+ groups = []
+ group_dict = {}
+ for i in range(1, len(labels)):
+ label = labels[i]
+ stripped_label = self.GetStrippedLabel(label)
+ if stripped_label not in group_dict:
+ group_dict[stripped_label] = len(groups)
+ groups.append([])
+ groups[group_dict[stripped_label]].append(i)
+ return groups
+
+ def GetSummaryTableValues(self, table):
+ # First get the groups
+ groups = self.GetGroups(table)
+
+ summary_table = []
+
+ labels = table[0]
+
+ summary_labels = ["Summary Table"]
+ for group in groups:
+ label = labels[group[0]]
+ stripped_label = self.GetStrippedLabel(label)
+ group_label = "%s (%d runs)" % (stripped_label, len(group))
+ summary_labels.append(group_label)
+ summary_table.append(summary_labels)
+
+ for i in range(1, len(table)):
+ row = table[i]
+ summary_row = [row[0]]
+ for group in groups:
+ group_runs = []
+ for index in group:
+ group_runs.append(row[index])
+ group_run = self.AggregateResults(group_runs)
+ summary_row.append(group_run)
+ summary_table.append(summary_row)
+
+ return summary_table
+
+ # Drop N% slowest and M% fastest numbers, and return arithmean of
+ # the remaining.
+ @staticmethod
+ def AverageWithDrops(numbers, slow_percent=20, fast_percent=20):
+ sorted_numbers = list(numbers)
+ sorted_numbers.sort()
+ num_slow = int(slow_percent/100.0 * len(sorted_numbers))
+ num_fast = int(fast_percent/100.0 * len(sorted_numbers))
+ sorted_numbers = sorted_numbers[num_slow:]
+ if num_fast:
+ sorted_numbers = sorted_numbers[:-num_fast]
+ return numpy.average(sorted_numbers)
+
+ @staticmethod
+ def AggregateResults(group_results):
+ ret = ""
+ if not group_results:
+ return ret
+ all_floats = True
+ all_passes = True
+ all_fails = True
+ for group_result in group_results:
+ if not IsFloat(group_result):
+ all_floats = False
+ if group_result != "PASSED":
+ all_passes = False
+ if group_result != "FAILED":
+ all_fails = False
+ if all_floats == True:
+ float_results = [float(v) for v in group_results]
+ ret = "%f" % TableFormatter.AverageWithDrops(float_results)
+ # Add this line for standard deviation.
+### ret += " %f" % numpy.std(float_results)
+ elif all_passes == True:
+ ret = "ALL_PASS"
+ elif all_fails == True:
+ ret = "ALL_FAILS"
+ return ret
+
+ @staticmethod
+ def GetStrippedLabel(label):
+ return re.sub("\s*\S+:\S+\s*", "", label)
+### return re.sub("\s*remote:\S*\s*i:\d+$", "", label)
+
+ @staticmethod
+ def GetLabelWithIteration(label, iteration):
+ return "%s i:%d" % (label, iteration)