aboutsummaryrefslogtreecommitdiff
path: root/deprecated/automation
diff options
context:
space:
mode:
Diffstat (limited to 'deprecated/automation')
-rw-r--r--deprecated/automation/PRESUBMIT.py14
-rw-r--r--deprecated/automation/__init__.py1
-rw-r--r--deprecated/automation/all_tests.py16
-rw-r--r--deprecated/automation/clients/__init__.py1
-rwxr-xr-xdeprecated/automation/clients/android.py87
-rwxr-xr-xdeprecated/automation/clients/chromeos.py104
-rwxr-xr-xdeprecated/automation/clients/crosstool.py102
-rwxr-xr-xdeprecated/automation/clients/dejagnu_compiler.py98
-rw-r--r--deprecated/automation/clients/helper/__init__.py1
-rw-r--r--deprecated/automation/clients/helper/android.py319
-rw-r--r--deprecated/automation/clients/helper/chromeos.py180
-rw-r--r--deprecated/automation/clients/helper/crosstool.py168
-rw-r--r--deprecated/automation/clients/helper/jobs.py11
-rw-r--r--deprecated/automation/clients/helper/perforce.py215
-rwxr-xr-xdeprecated/automation/clients/nightly.py51
-rwxr-xr-xdeprecated/automation/clients/output_test.py29
-rwxr-xr-xdeprecated/automation/clients/pwd_test.py27
-rwxr-xr-xdeprecated/automation/clients/report/dejagnu.sh9
-rw-r--r--deprecated/automation/clients/report/dejagnu/__init__.py1
-rw-r--r--deprecated/automation/clients/report/dejagnu/main.py137
-rw-r--r--deprecated/automation/clients/report/dejagnu/manifest.py103
-rw-r--r--deprecated/automation/clients/report/dejagnu/report.html94
-rw-r--r--deprecated/automation/clients/report/dejagnu/report.py115
-rw-r--r--deprecated/automation/clients/report/dejagnu/summary.py262
-rwxr-xr-xdeprecated/automation/clients/report/validate_failures.py239
-rw-r--r--deprecated/automation/common/__init__.py1
-rw-r--r--deprecated/automation/common/command.py241
-rw-r--r--deprecated/automation/common/command_executer.py230
-rwxr-xr-xdeprecated/automation/common/command_executer_test.py210
-rw-r--r--deprecated/automation/common/events.py149
-rw-r--r--deprecated/automation/common/job.py178
-rw-r--r--deprecated/automation/common/job_group.py73
-rw-r--r--deprecated/automation/common/logger.py144
-rw-r--r--deprecated/automation/common/machine.py70
-rwxr-xr-xdeprecated/automation/common/machine_test.py26
-rw-r--r--deprecated/automation/common/state_machine.py54
-rw-r--r--deprecated/automation/server/__init__.py1
-rw-r--r--deprecated/automation/server/job_executer.py138
-rw-r--r--deprecated/automation/server/job_group_manager.py118
-rw-r--r--deprecated/automation/server/job_manager.py194
-rw-r--r--deprecated/automation/server/machine_manager.py77
-rwxr-xr-xdeprecated/automation/server/machine_manager_test.py32
-rw-r--r--deprecated/automation/server/monitor/__init__.py1
-rw-r--r--deprecated/automation/server/monitor/dashboard.py259
-rwxr-xr-xdeprecated/automation/server/monitor/manage.py20
-rw-r--r--deprecated/automation/server/monitor/settings.py49
-rwxr-xr-xdeprecated/automation/server/monitor/start.sh7
-rw-r--r--deprecated/automation/server/monitor/static/style.css101
-rw-r--r--deprecated/automation/server/monitor/templates/base.html30
-rw-r--r--deprecated/automation/server/monitor/templates/job.html29
-rw-r--r--deprecated/automation/server/monitor/templates/job_group.html46
-rw-r--r--deprecated/automation/server/monitor/templates/job_group_list.html35
-rw-r--r--deprecated/automation/server/monitor/templates/job_log.html20
-rw-r--r--deprecated/automation/server/monitor/templates/machine_list.html39
-rw-r--r--deprecated/automation/server/monitor/templates/snippet_attribute_table.html36
-rw-r--r--deprecated/automation/server/monitor/templates/snippet_code.html10
-rw-r--r--deprecated/automation/server/monitor/templates/snippet_links.html7
-rw-r--r--deprecated/automation/server/monitor/urls.py21
-rwxr-xr-xdeprecated/automation/server/server.py125
-rwxr-xr-xdeprecated/automation/server/server_test.py26
-rw-r--r--deprecated/automation/server/test_pool.csv4
61 files changed, 5185 insertions, 0 deletions
diff --git a/deprecated/automation/PRESUBMIT.py b/deprecated/automation/PRESUBMIT.py
new file mode 100644
index 00000000..56a43e6c
--- /dev/null
+++ b/deprecated/automation/PRESUBMIT.py
@@ -0,0 +1,14 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+# Presubmit script for cc'ing c-compiler-chrome on automation related CL's.
+#
+# PRESUBMIT METADATA:
+# [
+# MailTo(
+# p4_filespecs = [
+# "//depot2/gcctools/chromeos/v14/automation/...",
+# ],
+# addresses = ["c-compiler-chrome"],
+# owners = ["asharif", "llozano", "bjanakiraman", "yunlian"]
+# ),
+# ]
diff --git a/deprecated/automation/__init__.py b/deprecated/automation/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/all_tests.py b/deprecated/automation/all_tests.py
new file mode 100644
index 00000000..e7b70884
--- /dev/null
+++ b/deprecated/automation/all_tests.py
@@ -0,0 +1,16 @@
+import glob
+import sys
+import unittest
+
+sys.path.insert(0, 'server')
+sys.path.insert(0, 'clients')
+sys.path.insert(0, 'common')
+
+test_file_strings = glob.glob('*/*_test.py')
+module_strings = [str[0:len(str) - 3] for str in test_file_strings]
+for i in range(len(module_strings)):
+ module_strings[i] = module_strings[i].split('/')[-1]
+suites = [unittest.defaultTestLoader.loadTestsFromName(str)
+ for str in module_strings]
+testSuite = unittest.TestSuite(suites)
+text_runner = unittest.TextTestRunner().run(testSuite)
diff --git a/deprecated/automation/clients/__init__.py b/deprecated/automation/clients/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/clients/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/clients/android.py b/deprecated/automation/clients/android.py
new file mode 100755
index 00000000..06e76d29
--- /dev/null
+++ b/deprecated/automation/clients/android.py
@@ -0,0 +1,87 @@
+#!/usr/bin/python2
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+"""Client for Android nightly jobs.
+
+Does the following jobs:
+ 1. Checkout android toolchain sources
+ 2. Build Android toolchain
+ 3. Build Android tree
+ 4. Checkout/build/run Android benchmarks (TODO)
+ 5. Generate size/performance dashboard ? (TODO)
+"""
+
+__author__ = 'jingyu@google.com (Jing Yu)'
+
+import optparse
+import pickle
+import sys
+import xmlrpclib
+
+from automation.clients.helper import android
+from automation.common import job_group
+from automation.common import logger
+
+
+class AndroidToolchainNightlyClient(object):
+ VALID_GCC_VERSIONS = ['4.4.3', '4.6', 'google_main', 'fsf_trunk']
+
+ def __init__(self, gcc_version, is_release):
+ assert gcc_version in self.VALID_GCC_VERSIONS
+ self.gcc_version = gcc_version
+ if is_release:
+ self.build_type = 'RELEASE'
+ else:
+ self.build_type = 'DEVELOPMENT'
+
+ def Run(self):
+ server = xmlrpclib.Server('http://localhost:8000')
+ server.ExecuteJobGroup(pickle.dumps(self.CreateJobGroup()))
+
+ def CreateJobGroup(self):
+ factory = android.JobsFactory(self.gcc_version, self.build_type)
+
+ p4_androidtc_job, checkout_dir_dep = factory.CheckoutAndroidToolchain()
+
+ tc_build_job, tc_prefix_dep = factory.BuildAndroidToolchain(
+ checkout_dir_dep)
+
+ tree_build_job = factory.BuildAndroidImage(tc_prefix_dep)
+
+ benchmark_job = factory.Benchmark(tc_prefix_dep)
+
+ all_jobs = [p4_androidtc_job, tc_build_job, tree_build_job, benchmark_job]
+
+ return job_group.JobGroup('androidtoolchain_nightly', all_jobs, True, False)
+
+
+@logger.HandleUncaughtExceptions
+def Main(argv):
+ valid_gcc_versions_string = ', '.join(
+ AndroidToolchainNightlyClient.VALID_GCC_VERSIONS)
+
+ parser = optparse.OptionParser()
+ parser.add_option('--with-gcc-version',
+ dest='gcc_version',
+ default='4.6',
+ action='store',
+ choices=AndroidToolchainNightlyClient.VALID_GCC_VERSIONS,
+ help='gcc version: %s.' % valid_gcc_versions_string)
+ parser.add_option('-r',
+ '--release',
+ dest='is_release',
+ default=False,
+ action='store_true',
+ help='Build a release toolchain?')
+ options, _ = parser.parse_args(argv)
+
+ option_list = [opt.dest for opt in parser.option_list if opt.dest]
+
+ kwargs = dict((option, getattr(options, option)) for option in option_list)
+
+ client = AndroidToolchainNightlyClient(**kwargs)
+ client.Run()
+
+
+if __name__ == '__main__':
+ Main(sys.argv)
diff --git a/deprecated/automation/clients/chromeos.py b/deprecated/automation/clients/chromeos.py
new file mode 100755
index 00000000..572320fd
--- /dev/null
+++ b/deprecated/automation/clients/chromeos.py
@@ -0,0 +1,104 @@
+#!/usr/bin/python2
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+"""chromeos.py: Build & Test ChromeOS using custom compilers."""
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+import logging
+import optparse
+import os
+import pickle
+import sys
+import xmlrpclib
+
+from automation.clients.helper import jobs
+from automation.clients.helper import perforce
+from automation.common import command as cmd
+from automation.common import job_group
+from automation.common import logger
+
+
+class ChromeOSNightlyClient(object):
+ DEPOT2_DIR = '//depot2/'
+ P4_CHECKOUT_DIR = 'perforce2/'
+ P4_VERSION_DIR = os.path.join(P4_CHECKOUT_DIR, 'gcctools/chromeos/v14')
+
+ def __init__(self, board, remote, gcc_githash, p4_snapshot=''):
+ self._board = board
+ self._remote = remote
+ self._gcc_githash = gcc_githash
+ self._p4_snapshot = p4_snapshot
+
+ def Run(self):
+ server = xmlrpclib.Server('http://localhost:8000')
+ server.ExecuteJobGroup(pickle.dumps(self.CreateJobGroup()))
+
+ def CheckoutV14Dir(self):
+ p4view = perforce.View(self.DEPOT2_DIR, [
+ perforce.PathMapping('gcctools/chromeos/v14/...')
+ ])
+ return self.GetP4Snapshot(p4view)
+
+ def GetP4Snapshot(self, p4view):
+ p4client = perforce.CommandsFactory(self.P4_CHECKOUT_DIR, p4view)
+
+ if self._p4_snapshot:
+ return p4client.CheckoutFromSnapshot(self._p4_snapshot)
+ else:
+ return p4client.SetupAndDo(p4client.Sync(), p4client.Remove())
+
+ def CreateJobGroup(self):
+ chain = cmd.Chain(
+ self.CheckoutV14Dir(),
+ cmd.Shell('python',
+ os.path.join(self.P4_VERSION_DIR, 'test_toolchains.py'),
+ '--force-mismatch',
+ '--clean',
+ '--public', # crbug.com/145822
+ '--board=%s' % self._board,
+ '--remote=%s' % self._remote,
+ '--githashes=%s' % self._gcc_githash))
+ label = 'testlabel'
+ job = jobs.CreateLinuxJob(label, chain, timeout=24 * 60 * 60)
+
+ return job_group.JobGroup(label, [job], True, False)
+
+
+@logger.HandleUncaughtExceptions
+def Main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-b',
+ '--board',
+ dest='board',
+ help='Run performance tests on these boards')
+ parser.add_option('-r',
+ '--remote',
+ dest='remote',
+ help='Run performance tests on these remotes')
+ parser.add_option('-g',
+ '--gcc_githash',
+ dest='gcc_githash',
+ help='Use this gcc_githash.')
+ parser.add_option('-p',
+ '--p4_snapshot',
+ dest='p4_snapshot',
+ default='',
+ help='Use this p4_snapshot.')
+ options, _ = parser.parse_args(argv)
+
+ if not all([options.board, options.remote, options.gcc_githash]):
+ logging.error('Specify a board, remote and gcc_githash')
+ return 1
+
+ client = ChromeOSNightlyClient(options.board,
+ options.remote,
+ options.gcc_githash,
+ p4_snapshot=options.p4_snapshot)
+ client.Run()
+ return 0
+
+
+if __name__ == '__main__':
+ logger.SetUpRootLogger(level=logging.DEBUG, display_flags={'name': False})
+ sys.exit(Main(sys.argv))
diff --git a/deprecated/automation/clients/crosstool.py b/deprecated/automation/clients/crosstool.py
new file mode 100755
index 00000000..9ba83807
--- /dev/null
+++ b/deprecated/automation/clients/crosstool.py
@@ -0,0 +1,102 @@
+#!/usr/bin/python2
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import logging
+import optparse
+import pickle
+import sys
+import xmlrpclib
+
+from automation.clients.helper import crosstool
+from automation.common import job_group
+from automation.common import logger
+
+
+class CrosstoolNightlyClient(object):
+ VALID_TARGETS = ['gcc-4.6.x-ubuntu_lucid-arm',
+ 'gcc-4.6.x-ubuntu_lucid-x86_64',
+ 'gcc-4.6.x-grtev2-armv7a-vfpv3.d16-hard',
+ 'gcc-4.6.x-glibc-2.11.1-grte',
+ 'gcc-4.6.x-glibc-2.11.1-powerpc']
+ VALID_BOARDS = ['qemu', 'pandaboard', 'unix']
+
+ def __init__(self, target, boards):
+ assert target in self.VALID_TARGETS
+ assert all(board in self.VALID_BOARDS for board in boards)
+
+ self._target = target
+ self._boards = boards
+
+ def Run(self):
+ server = xmlrpclib.Server('http://localhost:8000')
+ server.ExecuteJobGroup(pickle.dumps(self.CreateJobGroup()))
+
+ def CreateJobGroup(self):
+ factory = crosstool.JobsFactory()
+
+ checkout_crosstool_job, checkout_dir, manifests_dir = \
+ factory.CheckoutCrosstool(self._target)
+
+ all_jobs = [checkout_crosstool_job]
+
+ # Build crosstool target
+ build_release_job, build_tree_dir = factory.BuildRelease(checkout_dir,
+ self._target)
+ all_jobs.append(build_release_job)
+
+ testruns = []
+
+ # Perform crosstool tests
+ for board in self._boards:
+ for component in ('gcc', 'binutils'):
+ test_job, testrun_dir = factory.RunTests(checkout_dir, build_tree_dir,
+ self._target, board, component)
+ all_jobs.append(test_job)
+ testruns.append(testrun_dir)
+
+ if testruns:
+ all_jobs.append(factory.GenerateReport(testruns, manifests_dir,
+ self._target, self._boards))
+
+ return job_group.JobGroup('Crosstool Nightly Build (%s)' % self._target,
+ all_jobs, True, False)
+
+
+@logger.HandleUncaughtExceptions
+def Main(argv):
+ valid_boards_string = ', '.join(CrosstoolNightlyClient.VALID_BOARDS)
+
+ parser = optparse.OptionParser()
+ parser.add_option(
+ '-b',
+ '--board',
+ dest='boards',
+ action='append',
+ choices=CrosstoolNightlyClient.VALID_BOARDS,
+ default=[],
+ help=('Run DejaGNU tests on selected boards: %s.' % valid_boards_string))
+ options, args = parser.parse_args(argv)
+
+ if len(args) == 2:
+ target = args[1]
+ else:
+ logging.error('Exactly one target required as a command line argument!')
+ logging.info('List of valid targets:')
+ for pair in enumerate(CrosstoolNightlyClient.VALID_TARGETS, start=1):
+ logging.info('%d) %s', pair)
+ sys.exit(1)
+
+ option_list = [opt.dest for opt in parser.option_list if opt.dest]
+
+ kwargs = dict((option, getattr(options, option)) for option in option_list)
+
+ client = CrosstoolNightlyClient(target, **kwargs)
+ client.Run()
+
+
+if __name__ == '__main__':
+ logger.SetUpRootLogger(level=logging.DEBUG, display_flags={'name': False})
+ Main(sys.argv)
diff --git a/deprecated/automation/clients/dejagnu_compiler.py b/deprecated/automation/clients/dejagnu_compiler.py
new file mode 100755
index 00000000..7448b87e
--- /dev/null
+++ b/deprecated/automation/clients/dejagnu_compiler.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python2
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+"""dejagnu_compiler.py: Run dejagnu test."""
+
+__author__ = 'shenhan@google.com (Han Shen)'
+
+import logging
+import optparse
+import os
+import pickle
+import sys
+import xmlrpclib
+
+from automation.clients.helper import jobs
+from automation.clients.helper import perforce
+from automation.common import command as cmd
+from automation.common import job_group
+from automation.common import logger
+
+
+class DejagnuCompilerNightlyClient:
+ DEPOT2_DIR = '//depot2/'
+ P4_CHECKOUT_DIR = 'perforce2/'
+ P4_VERSION_DIR = os.path.join(P4_CHECKOUT_DIR, 'gcctools/chromeos/v14')
+
+ def __init__(self, board, remote, p4_snapshot, cleanup):
+ self._board = board
+ self._remote = remote
+ self._p4_snapshot = p4_snapshot
+ self._cleanup = cleanup
+
+ def Run(self):
+ server = xmlrpclib.Server('http://localhost:8000')
+ server.ExecuteJobGroup(pickle.dumps(self.CreateJobGroup()))
+
+ def CheckoutV14Dir(self):
+ p4view = perforce.View(self.DEPOT2_DIR, [
+ perforce.PathMapping('gcctools/chromeos/v14/...')
+ ])
+ return self.GetP4Snapshot(p4view)
+
+ def GetP4Snapshot(self, p4view):
+ p4client = perforce.CommandsFactory(self.P4_CHECKOUT_DIR, p4view)
+
+ if self._p4_snapshot:
+ return p4client.CheckoutFromSnapshot(self._p4_snapshot)
+ else:
+ return p4client.SetupAndDo(p4client.Sync(), p4client.Remove())
+
+ def CreateJobGroup(self):
+ chain = cmd.Chain(self.CheckoutV14Dir(), cmd.Shell(
+ 'python', os.path.join(self.P4_VERSION_DIR, 'test_gcc_dejagnu.py'),
+ '--board=%s' % self._board, '--remote=%s' % self._remote,
+ '--cleanup=%s' % self._cleanup))
+ label = 'dejagnu'
+ job = jobs.CreateLinuxJob(label, chain, timeout=8 * 60 * 60)
+ return job_group.JobGroup(label,
+ [job],
+ cleanup_on_failure=True,
+ cleanup_on_completion=True)
+
+
+@logger.HandleUncaughtExceptions
+def Main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-b',
+ '--board',
+ dest='board',
+ help='Run performance tests on these boards')
+ parser.add_option('-r',
+ '--remote',
+ dest='remote',
+ help='Run performance tests on these remotes')
+ parser.add_option('-p',
+ '--p4_snapshot',
+ dest='p4_snapshot',
+ help=('For development only. '
+ 'Use snapshot instead of checking out.'))
+ parser.add_option('--cleanup',
+ dest='cleanup',
+ default='mount',
+ help=('Cleanup test directory, values could be one of '
+ '"mount", "chroot" or "chromeos"'))
+ options, _ = parser.parse_args(argv)
+
+ if not all([options.board, options.remote]):
+ logging.error('Specify a board and remote.')
+ return 1
+
+ client = DejagnuCompilerNightlyClient(options.board, options.remote,
+ options.p4_snapshot, options.cleanup)
+ client.Run()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/deprecated/automation/clients/helper/__init__.py b/deprecated/automation/clients/helper/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/clients/helper/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/clients/helper/android.py b/deprecated/automation/clients/helper/android.py
new file mode 100644
index 00000000..7ff2ac1c
--- /dev/null
+++ b/deprecated/automation/clients/helper/android.py
@@ -0,0 +1,319 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+"""Helper modules for Android toolchain test infrastructure.
+
+Provides following Android toolchain test jobs and commands.
+. Checkout Android toolchain source code
+. Build Android toolchain
+. Checkout and build Android tree
+. Checkout/build/run Android benchmarks, generate size dashboard
+. Transform size dashboard to report, send perflab jobid to
+ perflab dashboard server.(TODO)
+"""
+
+__author__ = 'jingyu@google.com (Jing Yu)'
+
+import os.path
+
+from automation.clients.helper import jobs
+from automation.clients.helper import perforce
+from automation.common import command as cmd
+from automation.common import job
+
+
+class JobsFactory(object):
+
+ def __init__(self, gcc_version='4.4.3', build_type='DEVELOPMENT'):
+ assert gcc_version in ['4.4.3', '4.6', 'google_main', 'fsf_trunk']
+ assert build_type in ['DEVELOPMENT', 'RELEASE']
+
+ self.gcc_version = gcc_version
+ self.commands = CommandsFactory(gcc_version, build_type)
+ self.tc_tag = 'gcc-%s-%s' % (gcc_version, build_type)
+
+ def CheckoutAndroidToolchain(self):
+ """Check out Android toolchain sources by release and gcc version."""
+ command = self.commands.CheckoutAndroidToolchain()
+ new_job = jobs.CreateLinuxJob('AndroidCheckoutToolchain(%s)' % self.tc_tag,
+ command)
+ checkout_dir_dep = job.FolderDependency(new_job, self.commands.CHECKOUT_DIR)
+ return new_job, checkout_dir_dep
+
+ def BuildAndroidToolchain(self, checkout_dir_dep):
+ """Build Android Toolchain."""
+ command = self.commands.BuildAndroidToolchain()
+ new_job = jobs.CreateLinuxJob('AndroidBuildToolchain(%s)' % self.tc_tag,
+ command)
+ new_job.DependsOnFolder(checkout_dir_dep)
+ tc_prefix_dep = job.FolderDependency(new_job,
+ self.commands.toolchain_prefix_dir)
+ return new_job, tc_prefix_dep
+
+ def BuildAndroidImage(self,
+ tc_prefix_dep,
+ product='stingray',
+ branch='ics-release'):
+ assert product in ['stingray', 'passion', 'trygon', 'soju']
+ assert branch in ['honeycomb-release', 'ics-release']
+ command = self.commands.BuildAndroidImage(product, branch)
+ new_job = jobs.CreateLinuxJob('AndroidGetBuildTree(%s)' % self.tc_tag,
+ command)
+ new_job.DependsOnFolder(tc_prefix_dep)
+ return new_job
+
+ def Benchmark(self, tc_prefix_dep, arch='soju'):
+ assert arch in ['soju', 'stingray']
+ script_cmd = self.commands.CheckoutScripts()
+ experiment_tag = 'android/nightly/%s/%s/$JOB_ID' % (self.tc_tag, arch)
+ build_run_benchmark_cmd = self.commands.BuildRunBenchmark(arch,
+ experiment_tag)
+ command = cmd.Chain(script_cmd, build_run_benchmark_cmd)
+ new_job = jobs.CreateLinuxJob('AndroidBenchmarking(%s)' % self.tc_tag,
+ command)
+ new_job.DependsOnFolder(tc_prefix_dep)
+ return new_job
+
+
+class CommandsFactory(object):
+ CHECKOUT_DIR = 'androidtc-checkout-dir'
+ TOOLCHAIN_SRC_DIR = os.path.join(CHECKOUT_DIR, 'src')
+ TOOLCHAIN_BUILD_DIR = 'obj'
+ ANDROID_TREES_DIR = 'android_trees'
+ TOOLS_DIR = 'android-tools'
+ BENCHMARK_OUT_DIR = 'results'
+
+ def __init__(self, gcc_version, build_type):
+ assert gcc_version in ['4.4.3', '4.6', 'google_main', 'fsf_trunk']
+ assert build_type in ['DEVELOPMENT', 'RELEASE']
+
+ self.build_type = build_type
+ self.gcc_version = gcc_version
+ self.binutils_version = '2.21'
+ self.gold_version = '2.21'
+ self.toolchain_prefix_dir = 'install-gcc-%s-%s' % (gcc_version, build_type)
+ self.p4client = self._CreatePerforceClient()
+ self.scripts = ScriptsFactory(self.gcc_version, self.binutils_version,
+ self.gold_version)
+
+ def _CreatePerforceClient(self):
+ p4_dev_path = 'gcctools/google_vendor_src_branch'
+ mobile_rel_branch = ('branches/'
+ 'mobile_toolchain_v15_release_branch/gcctools/'
+ 'google_vendor_src_branch')
+ gcc_443_rel_branch = ('branches/'
+ 'android_compiler_v14_release_branch/gcctools/'
+ 'google_vendor_src_branch')
+
+ # Common views for tools
+ p4view = perforce.View('depot2', perforce.PathMapping.ListFromPathTuples([(
+ 'gcctools/android/build/...', 'src/build/...'), (
+ 'gcctools/android/Tarballs/...', 'src/tarballs/...')]))
+ for mapping in perforce.PathMapping.ListFromPathDict(
+ {'gcctools/android': ['tools/scripts/...', 'master/...']}):
+ p4view.add(mapping)
+
+ # Add views for gdb
+ p4view.add(perforce.PathMapping(p4_dev_path, 'src',
+ 'gdb/gdb-7.1.x-android/...'))
+
+ # Add view for binutils for ld and gold
+ if self.build_type is 'RELEASE':
+ binutils_branch = mobile_rel_branch
+ else:
+ binutils_branch = p4_dev_path
+ p4view.add(perforce.PathMapping(binutils_branch, 'src', (
+ 'binutils/binutils-%s/...' % self.binutils_version)))
+ if self.binutils_version != self.gold_version:
+ p4view.add(perforce.PathMapping(binutils_branch, 'src', (
+ 'binutils/binutils-%s/...' % self.gold_version)))
+
+ # Add view for gcc if gcc_version is '4.4.3'.
+ if self.gcc_version == '4.4.3':
+ gcc443_path = 'gcc/gcc-4.4.3/...'
+ if self.build_type is 'RELEASE':
+ p4view.add(perforce.PathMapping(gcc_443_rel_branch, 'src', gcc443_path))
+ else:
+ p4view.add(perforce.PathMapping(p4_dev_path, 'src', gcc443_path))
+
+ return perforce.CommandsFactory(self.CHECKOUT_DIR, p4view)
+
+ def _CheckoutGCCFromSVN(self):
+ """Check out gcc from fsf svn.
+
+ Return the command that check out gcc from svn
+ to gcc_required_dir (=TOOLCHAIN_SRC_DIR/src/gcc/gcc-xxx).
+
+ TODO:
+ Create a svn class that does these jobs.
+ Parallelize p4 checkout and svn checkout.
+ """
+ if self.gcc_version == '4.4.3':
+ return ''
+ assert self.gcc_version in ['4.6', 'google_main', 'fsf_trunk']
+
+ gcc_branches_dir = {'4.6': 'branches/google/gcc-4_6',
+ 'google_main': 'branches/google/main',
+ 'fsf_trunk': 'trunk'}
+
+ # Find GCC revision number, output it to TOOLCHAIN_SRC_DIR/CLNUM_GCC
+ svn_get_revision = cmd.Pipe(
+ cmd.Shell('svn', 'info'),
+ cmd.Shell('grep', '"Revision:"'),
+ cmd.Shell('sed', '-E', '"s,Revision: ([0-9]+).*,\\1,"'),
+ output='../../../CLNUM_GCC')
+
+ svn_co_command = 'svn co svn://gcc.gnu.org/svn/gcc/%s .' % (
+ gcc_branches_dir[self.gcc_version])
+
+ gcc_required_dir = os.path.join(self.TOOLCHAIN_SRC_DIR, 'gcc',
+ 'gcc-%s' % self.gcc_version)
+
+ return cmd.Chain(
+ cmd.MakeDir(gcc_required_dir),
+ cmd.Wrapper(
+ cmd.Chain(svn_co_command, svn_get_revision),
+ cwd=gcc_required_dir))
+
+ def CheckoutAndroidToolchain(self):
+ p4client = self.p4client
+ command = p4client.SetupAndDo(p4client.Sync(),
+ p4client.SaveCurrentCLNumber('CLNUM'),
+ p4client.Remove())
+ if self.gcc_version != '4.4.3':
+ command.append(self._CheckoutGCCFromSVN())
+
+ return command
+
+ def BuildAndroidToolchain(self):
+ script_cmd = self.scripts.BuildAndroidToolchain(
+ self.toolchain_prefix_dir, self.CHECKOUT_DIR, self.TOOLCHAIN_BUILD_DIR,
+ self.TOOLCHAIN_SRC_DIR)
+
+ # Record toolchain and gcc CL number
+ record_cl_cmd = cmd.Copy(
+ os.path.join(self.CHECKOUT_DIR, 'CLNUM*'),
+ to_dir=self.toolchain_prefix_dir)
+ save_cmd = cmd.Tar(
+ os.path.join('$JOB_TMP', 'results', '%s.tar.bz2' %
+ self.toolchain_prefix_dir), self.toolchain_prefix_dir)
+ return cmd.Chain(script_cmd, record_cl_cmd, save_cmd)
+
+ def _BuildAndroidTree(self, local_android_branch_dir, product):
+ target_tools_prefix = os.path.join('$JOB_TMP', self.toolchain_prefix_dir,
+ 'bin', 'arm-linux-androideabi-')
+ java_path = '/usr/lib/jvm/java-6-sun/bin'
+ build_cmd = cmd.Shell('make', '-j8', 'PRODUCT-%s-userdebug' % product,
+ 'TARGET_TOOLS_PREFIX=%s' % target_tools_prefix,
+ 'PATH=%s:$PATH' % java_path)
+ return cmd.Wrapper(build_cmd, cwd=local_android_branch_dir)
+
+ def BuildAndroidImage(self, product, branch):
+ assert product in ['stingray', 'passion', 'trygon', 'soju']
+
+ # Copy the tree from atree.mtv.corp to ANDROID_TREES_DIR/branch
+ androidtrees_host = 'atree.mtv.corp.google.com'
+ androidtrees_path = ('/usr/local/google2/home/mobiletc-prebuild/'
+ 'android_trees')
+ remote_android_branch_path = os.path.join(androidtrees_path, branch)
+ local_android_branch_dir = os.path.join(self.ANDROID_TREES_DIR, branch)
+ gettree_cmd = cmd.RemoteCopyFrom(
+ androidtrees_host, remote_android_branch_path, local_android_branch_dir)
+
+ # Configure and build the tree
+ buildtree_cmd = self._BuildAndroidTree(local_android_branch_dir, product)
+
+ # Compress and copy system.img to result
+ result_system_img = os.path.join(local_android_branch_dir, 'out', 'target',
+ 'product', product, 'system.img')
+ copy_img = cmd.Copy(result_system_img, to_dir='results')
+ compress_img = cmd.Shell('bzip2', os.path.join('results', 'system.img'))
+
+ return cmd.Chain(gettree_cmd, buildtree_cmd, copy_img, compress_img)
+
+ def CheckoutScripts(self):
+ p4view = perforce.View('depot2',
+ [perforce.PathMapping('gcctools/android/tools/...',
+ 'tools/...')])
+ p4client = perforce.CommandsFactory(self.TOOLS_DIR, p4view)
+ return p4client.SetupAndDo(p4client.Sync(), p4client.Remove())
+
+ def BuildRunBenchmark(self, arch, run_experiment):
+ # Copy base benchmark binaries from atree.mtv.corp
+ base_benchbin_host = 'atree.mtv.corp.google.com'
+ base_benchbin_path = ('/usr/local/google2/home/mobiletc-prebuild/'
+ 'archive/v3binaries/2011-10-18')
+ local_basebenchbin_dir = 'base_benchmark_bin'
+ getbase_cmd = cmd.RemoteCopyFrom(base_benchbin_host, base_benchbin_path,
+ local_basebenchbin_dir)
+
+ # Build and run benchmark.
+ android_arch = 'android_%s' % arch
+ run_label = 'normal'
+ benchmark_cmd = self.scripts.RunBenchmark(
+ self.toolchain_prefix_dir, self.TOOLS_DIR, self.BENCHMARK_OUT_DIR,
+ run_label, run_experiment, android_arch, local_basebenchbin_dir)
+
+ # Extract jobid from BENCHMARK_OUT_DIR/log/jobid_normal.log file.
+ # Copy jobid to www server to generate performance dashboard.
+ # TODO(jingyu)
+
+ return cmd.Chain(getbase_cmd, benchmark_cmd)
+
+
+class ScriptsFactory(object):
+
+ def __init__(self, gcc_version, binutils_version, gold_version):
+ self._gcc_version = gcc_version
+ self._binutils_version = binutils_version
+ self._gold_version = gold_version
+
+ def BuildAndroidToolchain(self, toolchain_prefix_dir, checkout_dir,
+ toolchain_build_dir, androidtc_src_dir):
+ if self._gcc_version == '4.4.3':
+ gold_option = 'both/gold'
+ else:
+ gold_option = 'default'
+
+ return cmd.Shell(
+ 'build_androidtoolchain.sh',
+ '--toolchain-src=%s' % os.path.join('$JOB_TMP', androidtc_src_dir),
+ '--build-path=%s' % os.path.join('$JOB_TMP', toolchain_build_dir),
+ '--install-prefix=%s' % os.path.join('$JOB_TMP', toolchain_prefix_dir),
+ '--target=arm-linux-androideabi',
+ '--enable-gold=%s' % gold_option,
+ '--with-gcc-version=%s' % self._gcc_version,
+ '--with-binutils-version=%s' % self._binutils_version,
+ '--with-gold-version=%s' % self._gold_version,
+ '--with-gdb-version=7.1.x-android',
+ '--log-path=%s/logs' % '$JOB_HOME',
+ '--android-sysroot=%s' % os.path.join('$JOB_TMP', checkout_dir,
+ 'gcctools', 'android', 'master',
+ 'honeycomb_generic_sysroot'),
+ path=os.path.join(checkout_dir, 'gcctools', 'android', 'tools',
+ 'scripts'))
+
+ def RunBenchmark(self,
+ toolchain_prefix_dir,
+ checkout_dir,
+ output_dir,
+ run_label,
+ run_experiment,
+ arch,
+ base_bench_bin=None):
+ if base_bench_bin:
+ base_bench_opt = '--base_benchmark_bin=%s' % base_bench_bin
+ else:
+ base_bench_opt = ''
+
+ return cmd.Shell(
+ 'benchmark.sh',
+ '--android_toolchain=%s' % os.path.join('$JOB_TMP',
+ toolchain_prefix_dir),
+ '--bench_space=%s' % os.path.join('$JOB_TMP', 'bench'),
+ '--benchmark_bin=%s' % os.path.join('$JOB_TMP', output_dir,
+ 'bench_bin'),
+ base_bench_opt,
+ '--log_path=%s' % os.path.join('$JOB_TMP', output_dir, 'log'),
+ '--arch=%s' % arch,
+ '--run_label=%s' % run_label,
+ '--run_experiment=%s' % run_experiment,
+ path=os.path.join(checkout_dir, 'tools', 'scripts'))
diff --git a/deprecated/automation/clients/helper/chromeos.py b/deprecated/automation/clients/helper/chromeos.py
new file mode 100644
index 00000000..e7157451
--- /dev/null
+++ b/deprecated/automation/clients/helper/chromeos.py
@@ -0,0 +1,180 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+import os.path
+import re
+
+from automation.clients.helper import jobs
+from automation.clients.helper import perforce
+from automation.common import command as cmd
+from automation.common import machine
+
+
+class ScriptsFactory(object):
+
+ def __init__(self, chromeos_root, scripts_path):
+ self._chromeos_root = chromeos_root
+ self._scripts_path = scripts_path
+
+ def SummarizeResults(self, logs_path):
+ return cmd.Shell('summarize_results.py', logs_path, path=self._scripts_path)
+
+ def Buildbot(self, config_name):
+ buildbot = os.path.join(self._chromeos_root,
+ 'chromite/cbuildbot/cbuildbot.py')
+
+ return cmd.Shell(buildbot, '--buildroot=%s' % self._chromeos_root,
+ '--resume', '--noarchive', '--noprebuilts', '--nosync',
+ '--nouprev', '--notests', '--noclean', config_name)
+
+ def RunBenchmarks(self, board, tests):
+ image_path = os.path.join(self._chromeos_root, 'src/build/images', board,
+ 'latest/chromiumos_image.bin')
+
+ return cmd.Shell('cros_run_benchmarks.py',
+ '--remote=$SECONDARY_MACHINES[0]',
+ '--board=%s' % board,
+ '--tests=%s' % tests,
+ '--full_table',
+ image_path,
+ path='/home/mobiletc-prebuild')
+
+ def SetupChromeOS(self, version='latest', use_minilayout=False):
+ setup_chromeos = cmd.Shell('setup_chromeos.py',
+ '--public',
+ '--dir=%s' % self._chromeos_root,
+ '--version=%s' % version,
+ path=self._scripts_path)
+
+ if use_minilayout:
+ setup_chromeos.AddOption('--minilayout')
+ return setup_chromeos
+
+
+class CommandsFactory(object):
+ DEPOT2_DIR = '//depot2/'
+ P4_CHECKOUT_DIR = 'perforce2/'
+ P4_VERSION_DIR = os.path.join(P4_CHECKOUT_DIR, 'gcctools/chromeos/v14')
+
+ CHROMEOS_ROOT = 'chromeos'
+ CHROMEOS_SCRIPTS_DIR = os.path.join(CHROMEOS_ROOT, 'src/scripts')
+ CHROMEOS_BUILDS_DIR = '/home/mobiletc-prebuild/www/chromeos_builds'
+
+ def __init__(self, chromeos_version, board, toolchain, p4_snapshot):
+ self.chromeos_version = chromeos_version
+ self.board = board
+ self.toolchain = toolchain
+ self.p4_snapshot = p4_snapshot
+
+ self.scripts = ScriptsFactory(self.CHROMEOS_ROOT, self.P4_VERSION_DIR)
+
+ def AddBuildbotConfig(self, config_name, config_list):
+ config_header = 'add_config(%r, [%s])' % (config_name,
+ ', '.join(config_list))
+ config_file = os.path.join(self.CHROMEOS_ROOT,
+ 'chromite/cbuildbot/cbuildbot_config.py')
+ quoted_config_header = '%r' % config_header
+ quoted_config_header = re.sub("'", "\\\"", quoted_config_header)
+
+ return cmd.Pipe(
+ cmd.Shell('echo', quoted_config_header),
+ cmd.Shell('tee', '--append', config_file))
+
+ def RunBuildbot(self):
+ config_dict = {'board': self.board,
+ 'build_tests': True,
+ 'chrome_tests': True,
+ 'unittests': False,
+ 'vm_tests': False,
+ 'prebuilts': False,
+ 'latest_toolchain': True,
+ 'useflags': ['chrome_internal'],
+ 'usepkg_chroot': True,
+ self.toolchain: True}
+ config_name = '%s-toolchain-test' % self.board
+ if 'arm' in self.board:
+ config_list = ['arm']
+ else:
+ config_list = []
+ config_list.extend(['internal', 'full', 'official', str(config_dict)])
+
+ add_config_shell = self.AddBuildbotConfig(config_name, config_list)
+ return cmd.Chain(add_config_shell, self.scripts.Buildbot(config_name))
+
+ def BuildAndBenchmark(self):
+ return cmd.Chain(
+ self.CheckoutV14Dir(),
+ self.SetupChromeOSCheckout(self.chromeos_version, True),
+ self.RunBuildbot(),
+ self.scripts.RunBenchmarks(self.board, 'BootPerfServer,10:Page,3'))
+
+ def GetP4Snapshot(self, p4view):
+ p4client = perforce.CommandsFactory(self.P4_CHECKOUT_DIR, p4view)
+
+ if self.p4_snapshot:
+ return p4client.CheckoutFromSnapshot(self.p4_snapshot)
+ else:
+ return p4client.SetupAndDo(p4client.Sync(), p4client.Remove())
+
+ def CheckoutV14Dir(self):
+ p4view = perforce.View(self.DEPOT2_DIR, [
+ perforce.PathMapping('gcctools/chromeos/v14/...')
+ ])
+ return self.GetP4Snapshot(p4view)
+
+ def SetupChromeOSCheckout(self, version, use_minilayout=False):
+ version_re = '^\d+\.\d+\.\d+\.[a-zA-Z0-9]+$'
+
+ location = os.path.join(self.CHROMEOS_BUILDS_DIR, version)
+
+ if version in ['weekly', 'quarterly']:
+ assert os.path.islink(location), 'Symlink %s does not exist.' % location
+
+ location_expanded = os.path.abspath(os.path.realpath(location))
+ version = os.path.basename(location_expanded)
+
+ if version in ['top', 'latest'] or re.match(version_re, version):
+ return self.scripts.SetupChromeOS(version, use_minilayout)
+
+ elif version.endswith('bz2') or version.endswith('gz'):
+ return cmd.UnTar(location_expanded, self.CHROMEOS_ROOT)
+
+ else:
+ signature_file_location = os.path.join(location,
+ 'src/scripts/enter_chroot.sh')
+ assert os.path.exists(signature_file_location), (
+ 'Signature file %s does not exist.' % signature_file_location)
+
+ return cmd.Copy(location, to_dir=self.CHROMEOS_ROOT, recursive=True)
+
+
+class JobsFactory(object):
+
+ def __init__(self,
+ chromeos_version='top',
+ board='x86-mario',
+ toolchain='trunk',
+ p4_snapshot=''):
+ self.chromeos_version = chromeos_version
+ self.board = board
+ self.toolchain = toolchain
+
+ self.commands = CommandsFactory(chromeos_version, board, toolchain,
+ p4_snapshot)
+
+ def BuildAndBenchmark(self):
+ command = self.commands.BuildAndBenchmark()
+
+ label = 'BuildAndBenchmark(%s,%s,%s)' % (self.toolchain, self.board,
+ self.chromeos_version)
+
+ machine_label = 'chromeos-%s' % self.board
+
+ job = jobs.CreateLinuxJob(label, command)
+ job.DependsOnMachine(
+ machine.MachineSpecification(label=machine_label,
+ lock_required=True),
+ False)
+
+ return job
diff --git a/deprecated/automation/clients/helper/crosstool.py b/deprecated/automation/clients/helper/crosstool.py
new file mode 100644
index 00000000..80154b25
--- /dev/null
+++ b/deprecated/automation/clients/helper/crosstool.py
@@ -0,0 +1,168 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import os.path
+import time
+
+from automation.clients.helper import jobs
+from automation.clients.helper import perforce
+from automation.common import command as cmd
+from automation.common import job
+
+
+class JobsFactory(object):
+
+ def __init__(self):
+ self.commands = CommandsFactory()
+
+ def CheckoutCrosstool(self, target):
+ command = self.commands.CheckoutCrosstool()
+ new_job = jobs.CreateLinuxJob('CheckoutCrosstool(%s)' % target, command)
+ checkout_dir_dep = job.FolderDependency(new_job,
+ CommandsFactory.CHECKOUT_DIR)
+ manifests_dir_dep = job.FolderDependency(
+ new_job, os.path.join(self.commands.buildit_path, target), 'manifests')
+ return new_job, checkout_dir_dep, manifests_dir_dep
+
+ def BuildRelease(self, checkout_dir, target):
+ command = self.commands.BuildRelease(target)
+ new_job = jobs.CreateLinuxJob('BuildRelease(%s)' % target, command)
+ new_job.DependsOnFolder(checkout_dir)
+ build_tree_dep = job.FolderDependency(new_job,
+ self.commands.buildit_work_dir_path)
+ return new_job, build_tree_dep
+
+ def RunTests(self, checkout_dir, build_tree_dir, target, board, component):
+ command = self.commands.RunTests(target, board, component)
+ new_job = jobs.CreateLinuxJob('RunTests(%s, %s, %s)' %
+ (target, component, board), command)
+ new_job.DependsOnFolder(checkout_dir)
+ new_job.DependsOnFolder(build_tree_dir)
+ testrun_dir_dep = job.FolderDependency(
+ new_job, self.commands.dejagnu_output_path, board)
+ return new_job, testrun_dir_dep
+
+ def GenerateReport(self, testrun_dirs, manifests_dir, target, boards):
+ command = self.commands.GenerateReport(boards)
+ new_job = jobs.CreateLinuxJob('GenerateReport(%s)' % target, command)
+ new_job.DependsOnFolder(manifests_dir)
+ for testrun_dir in testrun_dirs:
+ new_job.DependsOnFolder(testrun_dir)
+ return new_job
+
+
+class CommandsFactory(object):
+ CHECKOUT_DIR = 'crosstool-checkout-dir'
+
+ def __init__(self):
+ self.buildit_path = os.path.join(self.CHECKOUT_DIR, 'gcctools', 'crosstool',
+ 'v15')
+
+ self.buildit_work_dir = 'buildit-tmp'
+ self.buildit_work_dir_path = os.path.join('$JOB_TMP', self.buildit_work_dir)
+ self.dejagnu_output_path = os.path.join(self.buildit_work_dir_path,
+ 'dejagnu-output')
+
+ paths = {
+ 'gcctools': [
+ 'crosstool/v15/...', 'scripts/...'
+ ],
+ 'gcctools/google_vendor_src_branch': [
+ 'binutils/binutils-2.21/...', 'gdb/gdb-7.2.x/...',
+ 'zlib/zlib-1.2.3/...'
+ ],
+ 'gcctools/vendor_src': [
+ 'gcc/google/gcc-4_6/...'
+ ]
+ }
+
+ p4view = perforce.View('depot2',
+ perforce.PathMapping.ListFromPathDict(paths))
+
+ self.p4client = perforce.CommandsFactory(self.CHECKOUT_DIR, p4view)
+
+ def CheckoutCrosstool(self):
+ p4client = self.p4client
+
+ return p4client.SetupAndDo(p4client.Sync(),
+ p4client.SaveCurrentCLNumber('CLNUM'),
+ p4client.Remove())
+
+ def BuildRelease(self, target):
+ clnum_path = os.path.join('$JOB_TMP', self.CHECKOUT_DIR, 'CLNUM')
+
+ toolchain_root = os.path.join('/google/data/rw/projects/toolchains', target,
+ 'unstable')
+ toolchain_path = os.path.join(toolchain_root, '${CLNUM}')
+
+ build_toolchain = cmd.Wrapper(
+ cmd.Chain(
+ cmd.MakeDir(toolchain_path),
+ cmd.Shell('buildit',
+ '--keep-work-dir',
+ '--build-type=release',
+ '--work-dir=%s' % self.buildit_work_dir_path,
+ '--results-dir=%s' % toolchain_path,
+ '--force-release=%s' % '${CLNUM}',
+ target,
+ path='.')),
+ cwd=self.buildit_path,
+ umask='0022',
+ env={'CLNUM': '$(< %s)' % clnum_path})
+
+ # remove all but 10 most recent directories
+ remove_old_toolchains_from_x20 = cmd.Wrapper(
+ cmd.Pipe(
+ cmd.Shell('ls', '-1', '-r'), cmd.Shell('sed', '-e', '1,10d'),
+ cmd.Shell('xargs', 'rm', '-r', '-f')),
+ cwd=toolchain_root)
+
+ return cmd.Chain(build_toolchain, remove_old_toolchains_from_x20)
+
+ def RunTests(self, target, board, component='gcc'):
+ dejagnu_flags = ['--outdir=%s' % self.dejagnu_output_path,
+ '--target_board=%s' % board]
+
+ # Look for {pandaboard,qemu}.exp files in
+ # //depot/google3/experimental/users/kbaclawski/dejagnu/boards
+
+ site_exp_file = os.path.join('/google/src/head/depot/google3',
+ 'experimental/users/kbaclawski',
+ 'dejagnu/site.exp')
+
+ build_dir_path = os.path.join(target, 'rpmbuild/BUILD/crosstool*-0.0',
+ 'build-%s' % component)
+
+ run_dejagnu = cmd.Wrapper(
+ cmd.Chain(
+ cmd.MakeDir(self.dejagnu_output_path),
+ cmd.Shell('make',
+ 'check',
+ '-k',
+ '-j $(grep -c processor /proc/cpuinfo)',
+ 'RUNTESTFLAGS="%s"' % ' '.join(dejagnu_flags),
+ 'DEJAGNU="%s"' % site_exp_file,
+ ignore_error=True)),
+ cwd=os.path.join(self.buildit_work_dir_path, build_dir_path),
+ env={'REMOTE_TMPDIR': 'job-$JOB_ID'})
+
+ save_results = cmd.Copy(self.dejagnu_output_path,
+ to_dir='$JOB_TMP/results',
+ recursive=True)
+
+ return cmd.Chain(run_dejagnu, save_results)
+
+ def GenerateReport(self, boards):
+ sumfiles = [os.path.join('$JOB_TMP', board, '*.sum') for board in boards]
+
+ return cmd.Wrapper(
+ cmd.Shell('dejagnu.sh',
+ 'report',
+ '-m',
+ '$JOB_TMP/manifests/*.xfail',
+ '-o',
+ '$JOB_TMP/results/report.html',
+ *sumfiles,
+ path='.'),
+ cwd='$HOME/automation/clients/report')
diff --git a/deprecated/automation/clients/helper/jobs.py b/deprecated/automation/clients/helper/jobs.py
new file mode 100644
index 00000000..96a1c408
--- /dev/null
+++ b/deprecated/automation/clients/helper/jobs.py
@@ -0,0 +1,11 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+from automation.common import job
+from automation.common import machine
+
+
+def CreateLinuxJob(label, command, lock=False, timeout=4 * 60 * 60):
+ to_return = job.Job(label, command, timeout)
+ to_return.DependsOnMachine(machine.MachineSpecification(os='linux',
+ lock_required=lock))
+ return to_return
diff --git a/deprecated/automation/clients/helper/perforce.py b/deprecated/automation/clients/helper/perforce.py
new file mode 100644
index 00000000..1f2dfe79
--- /dev/null
+++ b/deprecated/automation/clients/helper/perforce.py
@@ -0,0 +1,215 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import collections
+import os.path
+
+from automation.common import command as cmd
+
+
+class PathMapping(object):
+ """Stores information about relative path mapping (remote to local)."""
+
+ @classmethod
+ def ListFromPathDict(cls, prefix_path_dict):
+ """Takes {'prefix1': ['path1',...], ...} and returns a list of mappings."""
+
+ mappings = []
+
+ for prefix, paths in sorted(prefix_path_dict.items()):
+ for path in sorted(paths):
+ mappings.append(cls(os.path.join(prefix, path)))
+
+ return mappings
+
+ @classmethod
+ def ListFromPathTuples(cls, tuple_list):
+ """Takes a list of tuples and returns a list of mappings.
+
+ Args:
+ tuple_list: [('remote_path1', 'local_path1'), ...]
+
+ Returns:
+ a list of mapping objects
+ """
+ mappings = []
+ for remote_path, local_path in tuple_list:
+ mappings.append(cls(remote_path, local_path))
+
+ return mappings
+
+ def __init__(self, remote, local=None, common_suffix=None):
+ suffix = self._FixPath(common_suffix or '')
+
+ self.remote = os.path.join(remote, suffix)
+ self.local = os.path.join(local or remote, suffix)
+
+ @staticmethod
+ def _FixPath(path_s):
+ parts = [part for part in path_s.strip('/').split('/') if part]
+
+ if not parts:
+ return ''
+
+ return os.path.join(*parts)
+
+ def _GetRemote(self):
+ return self._remote
+
+ def _SetRemote(self, path_s):
+ self._remote = self._FixPath(path_s)
+
+ remote = property(_GetRemote, _SetRemote)
+
+ def _GetLocal(self):
+ return self._local
+
+ def _SetLocal(self, path_s):
+ self._local = self._FixPath(path_s)
+
+ local = property(_GetLocal, _SetLocal)
+
+ def GetAbsolute(self, depot, client):
+ return (os.path.join('//', depot, self.remote),
+ os.path.join('//', client, self.local))
+
+ def __str__(self):
+ return '%s(%s => %s)' % (self.__class__.__name__, self.remote, self.local)
+
+
+class View(collections.MutableSet):
+ """Keeps all information about local client required to work with perforce."""
+
+ def __init__(self, depot, mappings=None, client=None):
+ self.depot = depot
+
+ if client:
+ self.client = client
+
+ self._mappings = set(mappings or [])
+
+ @staticmethod
+ def _FixRoot(root_s):
+ parts = root_s.strip('/').split('/', 1)
+
+ if len(parts) != 1:
+ return None
+
+ return parts[0]
+
+ def _GetDepot(self):
+ return self._depot
+
+ def _SetDepot(self, depot_s):
+ depot = self._FixRoot(depot_s)
+ assert depot, 'Not a valid depot name: "%s".' % depot_s
+ self._depot = depot
+
+ depot = property(_GetDepot, _SetDepot)
+
+ def _GetClient(self):
+ return self._client
+
+ def _SetClient(self, client_s):
+ client = self._FixRoot(client_s)
+ assert client, 'Not a valid client name: "%s".' % client_s
+ self._client = client
+
+ client = property(_GetClient, _SetClient)
+
+ def add(self, mapping):
+ assert type(mapping) is PathMapping
+ self._mappings.add(mapping)
+
+ def discard(self, mapping):
+ assert type(mapping) is PathMapping
+ self._mappings.discard(mapping)
+
+ def __contains__(self, value):
+ return value in self._mappings
+
+ def __len__(self):
+ return len(self._mappings)
+
+ def __iter__(self):
+ return iter(mapping for mapping in self._mappings)
+
+ def AbsoluteMappings(self):
+ return iter(mapping.GetAbsolute(self.depot, self.client)
+ for mapping in self._mappings)
+
+
+class CommandsFactory(object):
+ """Creates shell commands used for interaction with Perforce."""
+
+ def __init__(self, checkout_dir, p4view, name=None, port=None):
+ self.port = port or 'perforce2:2666'
+ self.view = p4view
+ self.view.client = name or 'p4-automation-$HOSTNAME-$JOB_ID'
+ self.checkout_dir = checkout_dir
+ self.p4config_path = os.path.join(self.checkout_dir, '.p4config')
+
+ def Initialize(self):
+ return cmd.Chain('mkdir -p %s' % self.checkout_dir, 'cp ~/.p4config %s' %
+ self.checkout_dir, 'chmod u+w %s' % self.p4config_path,
+ 'echo "P4PORT=%s" >> %s' % (self.port, self.p4config_path),
+ 'echo "P4CLIENT=%s" >> %s' %
+ (self.view.client, self.p4config_path))
+
+ def Create(self):
+ # TODO(kbaclawski): Could we support value list for options consistently?
+ mappings = ['-a \"%s %s\"' % mapping
+ for mapping in self.view.AbsoluteMappings()]
+
+ # First command will create client with default mappings. Second one will
+ # replace default mapping with desired. Unfortunately, it seems that it
+ # cannot be done in one step. P4EDITOR is defined to /bin/true because we
+ # don't want "g4 client" to enter real editor and wait for user actions.
+ return cmd.Wrapper(
+ cmd.Chain(
+ cmd.Shell('g4', 'client'),
+ cmd.Shell('g4', 'client', '--replace', *mappings)),
+ env={'P4EDITOR': '/bin/true'})
+
+ def SaveSpecification(self, filename=None):
+ return cmd.Pipe(cmd.Shell('g4', 'client', '-o'), output=filename)
+
+ def Sync(self, revision=None):
+ sync_arg = '...'
+ if revision:
+ sync_arg = '%s@%s' % (sync_arg, revision)
+ return cmd.Shell('g4', 'sync', sync_arg)
+
+ def SaveCurrentCLNumber(self, filename=None):
+ return cmd.Pipe(
+ cmd.Shell('g4', 'changes', '-m1', '...#have'),
+ cmd.Shell('sed', '-E', '"s,Change ([0-9]+) .*,\\1,"'),
+ output=filename)
+
+ def Remove(self):
+ return cmd.Shell('g4', 'client', '-d', self.view.client)
+
+ def SetupAndDo(self, *commands):
+ return cmd.Chain(self.Initialize(),
+ self.InCheckoutDir(self.Create(), *commands))
+
+ def InCheckoutDir(self, *commands):
+ return cmd.Wrapper(cmd.Chain(*commands), cwd=self.checkout_dir)
+
+ def CheckoutFromSnapshot(self, snapshot):
+ cmds = cmd.Chain()
+
+ for mapping in self.view:
+ local_path, file_part = mapping.local.rsplit('/', 1)
+
+ if file_part == '...':
+ remote_dir = os.path.join(snapshot, local_path)
+ local_dir = os.path.join(self.checkout_dir, os.path.dirname(local_path))
+
+ cmds.extend([
+ cmd.Shell('mkdir', '-p', local_dir), cmd.Shell(
+ 'rsync', '-lr', remote_dir, local_dir)
+ ])
+
+ return cmds
diff --git a/deprecated/automation/clients/nightly.py b/deprecated/automation/clients/nightly.py
new file mode 100755
index 00000000..d35c4eca
--- /dev/null
+++ b/deprecated/automation/clients/nightly.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+import optparse
+import pickle
+import sys
+import xmlrpclib
+
+from automation.clients.helper import chromeos
+from automation.common import job_group
+
+
+def Main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('-c',
+ '--chromeos_version',
+ dest='chromeos_version',
+ default='quarterly',
+ help='ChromeOS version to use.')
+ parser.add_option('-t',
+ '--toolchain',
+ dest='toolchain',
+ default='latest-toolchain',
+ help='Toolchain to use {latest-toolchain,gcc_46}.')
+ parser.add_option('-b',
+ '--board',
+ dest='board',
+ default='x86-generic',
+ help='Board to use for the nightly job.')
+ options = parser.parse_args(argv)[0]
+
+ toolchain = options.toolchain
+ board = options.board
+ chromeos_version = options.chromeos_version
+
+ # Build toolchain
+ jobs_factory = chromeos.JobsFactory(chromeos_version=chromeos_version,
+ board=board,
+ toolchain=toolchain)
+ benchmark_job = jobs_factory.BuildAndBenchmark()
+
+ group_label = 'nightly_client_%s' % board
+ group = job_group.JobGroup(group_label, [benchmark_job], True, False)
+
+ server = xmlrpclib.Server('http://localhost:8000')
+ server.ExecuteJobGroup(pickle.dumps(group))
+
+
+if __name__ == '__main__':
+ Main(sys.argv)
diff --git a/deprecated/automation/clients/output_test.py b/deprecated/automation/clients/output_test.py
new file mode 100755
index 00000000..73c26eed
--- /dev/null
+++ b/deprecated/automation/clients/output_test.py
@@ -0,0 +1,29 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+import os.path
+import pickle
+import sys
+import xmlrpclib
+
+from automation.common import job
+from automation.common import job_group
+from automation.common import machine
+
+
+def Main():
+ server = xmlrpclib.Server('http://localhost:8000')
+
+ command = os.path.join(
+ os.path.dirname(sys.argv[0]), '../../produce_output.py')
+
+ pwd_job = job.Job('pwd_job', command)
+ pwd_job.DependsOnMachine(machine.MachineSpecification(os='linux'))
+
+ group = job_group.JobGroup('pwd_client', [pwd_job])
+ server.ExecuteJobGroup(pickle.dumps(group))
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/deprecated/automation/clients/pwd_test.py b/deprecated/automation/clients/pwd_test.py
new file mode 100755
index 00000000..493444d5
--- /dev/null
+++ b/deprecated/automation/clients/pwd_test.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+import pickle
+import xmlrpclib
+
+from automation.common import job
+from automation.common import job_group
+from automation.common import machine
+
+
+def Main():
+ server = xmlrpclib.Server('http://localhost:8000')
+
+ command = ['echo These following 3 lines should be the same', 'pwd', '$(pwd)',
+ 'echo ${PWD}']
+
+ pwd_job = job.Job('pwd_job', ' && '.join(command))
+ pwd_job.DependsOnMachine(machine.MachineSpecification(os='linux'))
+
+ group = job_group.JobGroup('pwd_client', [pwd_job])
+ server.ExecuteJobGroup(pickle.dumps(group))
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/deprecated/automation/clients/report/dejagnu.sh b/deprecated/automation/clients/report/dejagnu.sh
new file mode 100755
index 00000000..fadd8a0c
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+# Author: kbaclawski@google.com (Krystian Baclawski)
+#
+
+export PYTHONPATH="$(pwd)"
+
+python dejagnu/main.py $@
diff --git a/deprecated/automation/clients/report/dejagnu/__init__.py b/deprecated/automation/clients/report/dejagnu/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/clients/report/dejagnu/main.py b/deprecated/automation/clients/report/dejagnu/main.py
new file mode 100644
index 00000000..62f095e1
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu/main.py
@@ -0,0 +1,137 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+# Author: kbaclawski@google.com (Krystian Baclawski)
+#
+
+from contextlib import contextmanager
+import glob
+from itertools import chain
+import logging
+import optparse
+import os.path
+import sys
+
+from manifest import Manifest
+import report
+from summary import DejaGnuTestRun
+
+
+def ExpandGlobExprList(paths):
+ """Returns an iterator that goes over expanded glob paths."""
+ return chain.from_iterable(map(glob.glob, paths))
+
+
+@contextmanager
+def OptionChecker(parser):
+ """Provides scoped environment for command line option checking."""
+ try:
+ yield
+ except SystemExit as ex:
+ parser.print_help()
+ print ''
+ sys.exit('ERROR: %s' % str(ex))
+
+
+def ManifestCommand(argv):
+ parser = optparse.OptionParser(
+ description=
+ ('Read in one or more DejaGNU summary files (.sum), parse their '
+ 'content and generate manifest files. Manifest files store a list '
+ 'of failed tests that should be ignored. Generated files are '
+ 'stored in current directory under following name: '
+ '${tool}-${board}.xfail (e.g. "gcc-unix.xfail").'),
+ usage='Usage: %prog manifest [file.sum] (file2.sum ...)')
+
+ _, args = parser.parse_args(argv[2:])
+
+ with OptionChecker(parser):
+ if not args:
+ sys.exit('At least one *.sum file required.')
+
+ for filename in chain.from_iterable(map(glob.glob, args)):
+ test_run = DejaGnuTestRun.FromFile(filename)
+
+ manifest = Manifest.FromDejaGnuTestRun(test_run)
+ manifest_filename = '%s-%s.xfail' % (test_run.tool, test_run.board)
+
+ with open(manifest_filename, 'w') as manifest_file:
+ manifest_file.write(manifest.Generate())
+
+ logging.info('Wrote manifest to "%s" file.', manifest_filename)
+
+
+def ReportCommand(argv):
+ parser = optparse.OptionParser(
+ description=
+ ('Read in one or more DejaGNU summary files (.sum), parse their '
+ 'content and generate a single report file in selected format '
+ '(currently only HTML).'),
+ usage=('Usage: %prog report (-m manifest.xfail) [-o report.html] '
+ '[file.sum (file2.sum ...)'))
+ parser.add_option(
+ '-o',
+ dest='output',
+ type='string',
+ default=None,
+ help=('Suppress failures for test listed in provided manifest files. '
+ '(use -m for each manifest file you want to read)'))
+ parser.add_option(
+ '-m',
+ dest='manifests',
+ type='string',
+ action='append',
+ default=None,
+ help=('Suppress failures for test listed in provided manifest files. '
+ '(use -m for each manifest file you want to read)'))
+
+ opts, args = parser.parse_args(argv[2:])
+
+ with OptionChecker(parser):
+ if not args:
+ sys.exit('At least one *.sum file required.')
+
+ if not opts.output:
+ sys.exit('Please provide name for report file.')
+
+ manifests = []
+
+ for filename in ExpandGlobExprList(opts.manifests or []):
+ logging.info('Using "%s" manifest.', filename)
+ manifests.append(Manifest.FromFile(filename))
+
+ test_runs = [DejaGnuTestRun.FromFile(filename)
+ for filename in chain.from_iterable(map(glob.glob, args))]
+
+ html = report.Generate(test_runs, manifests)
+
+ if html:
+ with open(opts.output, 'w') as html_file:
+ html_file.write(html)
+ logging.info('Wrote report to "%s" file.', opts.output)
+ else:
+ sys.exit(1)
+
+
+def HelpCommand(argv):
+ sys.exit('\n'.join([
+ 'Usage: %s command [options]' % os.path.basename(argv[
+ 0]), '', 'Commands:',
+ ' manifest - manage files containing a list of suppressed test failures',
+ ' report - generate report file for selected test runs'
+ ]))
+
+
+def Main(argv):
+ try:
+ cmd_name = argv[1]
+ except IndexError:
+ cmd_name = None
+
+ cmd_map = {'manifest': ManifestCommand, 'report': ReportCommand}
+ cmd_map.get(cmd_name, HelpCommand)(argv)
+
+
+if __name__ == '__main__':
+ FORMAT = '%(asctime)-15s %(levelname)s %(message)s'
+ logging.basicConfig(format=FORMAT, level=logging.INFO)
+
+ Main(sys.argv)
diff --git a/deprecated/automation/clients/report/dejagnu/manifest.py b/deprecated/automation/clients/report/dejagnu/manifest.py
new file mode 100644
index 00000000..5831d1b0
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu/manifest.py
@@ -0,0 +1,103 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+# Author: kbaclawski@google.com (Krystian Baclawski)
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+from collections import namedtuple
+from cStringIO import StringIO
+import logging
+
+from summary import DejaGnuTestResult
+
+
+class Manifest(namedtuple('Manifest', 'tool board results')):
+ """Stores a list of unsuccessful tests.
+
+ Any line that starts with '#@' marker carries auxiliary data in form of a
+ key-value pair, for example:
+
+ #@ tool: *
+ #@ board: unix
+
+ So far tool and board parameters are recognized. Their value can contain
+ arbitrary glob expression. Based on aforementioned parameters given manifest
+ will be applied for all test results, but only in selected test runs. Note
+ that all parameters are optional. Their default value is '*' (i.e. for all
+ tools/boards).
+
+ The meaning of lines above is as follows: corresponding test results to follow
+ should only be suppressed if test run was performed on "unix" board.
+
+ The summary line used to build the test result should have this format:
+
+ attrlist | UNRESOLVED: gcc.dg/unroll_1.c (test for excess errors)
+ ^^^^^^^^ ^^^^^^^^^^ ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
+ optional result name variant
+ attributes
+ """
+ SUPPRESSIBLE_RESULTS = ['FAIL', 'UNRESOLVED', 'XPASS', 'ERROR']
+
+ @classmethod
+ def FromDejaGnuTestRun(cls, test_run):
+ results = [result
+ for result in test_run.results
+ if result.result in cls.SUPPRESSIBLE_RESULTS]
+
+ return cls(test_run.tool, test_run.board, results)
+
+ @classmethod
+ def FromFile(cls, filename):
+ """Creates manifest instance from a file in format described above."""
+ params = {}
+ results = []
+
+ with open(filename, 'r') as manifest_file:
+ for line in manifest_file:
+ if line.startswith('#@'):
+ # parse a line with a parameter
+ try:
+ key, value = line[2:].split(':', 1)
+ except ValueError:
+ logging.warning('Malformed parameter line: "%s".', line)
+ else:
+ params[key.strip()] = value.strip()
+ else:
+ # remove comment
+ try:
+ line, _ = line.split('#', 1)
+ except ValueError:
+ pass
+
+ line = line.strip()
+
+ if line:
+ # parse a line with a test result
+ result = DejaGnuTestResult.FromLine(line)
+
+ if result:
+ results.append(result)
+ else:
+ logging.warning('Malformed test result line: "%s".', line)
+
+ tool = params.get('tool', '*')
+ board = params.get('board', '*')
+
+ return cls(tool, board, results)
+
+ def Generate(self):
+ """Dumps manifest to string."""
+ text = StringIO()
+
+ for name in ['tool', 'board']:
+ text.write('#@ {0}: {1}\n'.format(name, getattr(self, name)))
+
+ text.write('\n')
+
+ for result in sorted(self.results, key=lambda r: r.result):
+ text.write('{0}\n'.format(result))
+
+ return text.getvalue()
+
+ def __iter__(self):
+ return iter(self.results)
diff --git a/deprecated/automation/clients/report/dejagnu/report.html b/deprecated/automation/clients/report/dejagnu/report.html
new file mode 100644
index 00000000..39b39e09
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu/report.html
@@ -0,0 +1,94 @@
+<link type="text/css" rel="Stylesheet"
+href="http://ajax.googleapis.com/ajax/libs/jqueryui/1.8.16/themes/ui-lightness/jquery-ui.css"/>
+
+<script type="text/javascript" src="https://www.google.com/jsapi"></script>
+<script type="text/javascript">
+ google.load("visualization", "1.1", {packages: ["corechart", "table"]});
+ google.load("jquery", "1.6.2");
+ google.load("jqueryui", "1.8.16");
+
+ function drawChart(name, label, table) {
+ var data = google.visualization.arrayToDataTable(table);
+ var chart = new google.visualization.PieChart(
+ document.getElementById(name));
+
+ chart.draw(data,
+ {title: label, pieSliceText: "value", width: 800, height: 400});
+ }
+
+ function drawTable(name, table) {
+ var data = google.visualization.arrayToDataTable(table);
+ var table = new google.visualization.Table(
+ document.getElementById(name));
+
+ table.draw(data, {
+ showRowNumber: false, allowHtml: true, sortColumn: 0});
+ }
+
+ google.setOnLoadCallback(function () {
+ $( "#testruns" ).tabs();
+
+ {% for test_run in test_runs %}
+ $( "#testrun{{ test_run.id }}" ).tabs();
+
+ {% for result_type, group in test_run.groups.items %}
+ $( "#testrun{{ test_run.id }}-{{ result_type }}-tables" ).accordion({
+ autoHeight: false, collapsible: true, active: false });
+
+ drawChart(
+ "testrun{{ test_run.id }}-{{ result_type }}-chart",
+ "DejaGNU test {{ result_type }} summary for {{ test_run.name }}",
+ [
+ ["Result", "Count"],
+ {% for result, count in group.summary %}
+ ["{{ result }}", {{ count }}],{% endfor %}
+ ]);
+
+ {% for description, test_list in group.tests %}
+ {% if test_list %}
+ drawTable(
+ "testrun{{ test_run.id }}-{{ result_type }}-table-{{ forloop.counter }}",
+ [
+ ["Test", "Variant"],
+ {% for test, variant in test_list %}
+ ["{{ test }}", "{{ variant }}"],{% endfor %}
+ ]);
+ {% endif %}
+ {% endfor %}
+ {% endfor %}
+ {% endfor %}
+ });
+</script>
+
+<div id="testruns">
+ <ul>
+ {% for test_run in test_runs %}
+ <li><a href="#testrun{{ test_run.id }}">{{ test_run.name }}</a></li>
+ {% endfor %}
+ </ul>
+
+ {% for test_run in test_runs %}
+ <div id="testrun{{ test_run.id }}" style="padding: 0px">
+ <ul>
+ {% for result_type, group in test_run.groups.items %}
+ <li>
+ <a href="#testrun{{ test_run.id }}-{{ forloop.counter }}">{{ result_type }}</a>
+ </li>
+ {% endfor %}
+ </ul>
+ {% for result_type, group in test_run.groups.items %}
+ <div id="testrun{{ test_run.id }}-{{ forloop.counter }}">
+ <div id="testrun{{ test_run.id }}-{{ result_type }}-chart" style="text-align: center"></div>
+ <div id="testrun{{ test_run.id }}-{{ result_type }}-tables">
+ {% for description, test_list in group.tests %}
+ {% if test_list %}
+ <h3><a href="#">{{ description }}</a></h3>
+ <div id="testrun{{ test_run.id }}-{{ result_type }}-table-{{ forloop.counter }}"></div>
+ {% endif %}
+ {% endfor %}
+ </div>
+ </div>
+ {% endfor %}
+ </div>
+{% endfor %}
+</div>
diff --git a/deprecated/automation/clients/report/dejagnu/report.py b/deprecated/automation/clients/report/dejagnu/report.py
new file mode 100644
index 00000000..191a5389
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu/report.py
@@ -0,0 +1,115 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+# Author: kbaclawski@google.com (Krystian Baclawski)
+#
+
+import logging
+import os.path
+
+RESULT_DESCRIPTION = {
+ 'ERROR': 'DejaGNU errors',
+ 'FAIL': 'Failed tests',
+ 'NOTE': 'DejaGNU notices',
+ 'PASS': 'Passed tests',
+ 'UNRESOLVED': 'Unresolved tests',
+ 'UNSUPPORTED': 'Unsupported tests',
+ 'UNTESTED': 'Not executed tests',
+ 'WARNING': 'DejaGNU warnings',
+ 'XFAIL': 'Expected test failures',
+ 'XPASS': 'Unexpectedly passed tests'
+}
+
+RESULT_GROUPS = {
+ 'Successes': ['PASS', 'XFAIL'],
+ 'Failures': ['FAIL', 'XPASS', 'UNRESOLVED'],
+ 'Suppressed': ['!FAIL', '!XPASS', '!UNRESOLVED', '!ERROR'],
+ 'Framework': ['UNTESTED', 'UNSUPPORTED', 'ERROR', 'WARNING', 'NOTE']
+}
+
+ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
+
+
+def _GetResultDescription(name):
+ if name.startswith('!'):
+ name = name[1:]
+
+ try:
+ return RESULT_DESCRIPTION[name]
+ except KeyError:
+ raise ValueError('Unknown result: "%s"' % name)
+
+
+def _PrepareSummary(res_types, summary):
+
+ def GetResultCount(res_type):
+ return summary.get(res_type, 0)
+
+ return [(_GetResultDescription(rt), GetResultCount(rt)) for rt in res_types]
+
+
+def _PrepareTestList(res_types, tests):
+
+ def GetTestsByResult(res_type):
+ return [(test.name, test.variant or '')
+ for test in sorted(tests) if test.result == res_type]
+
+ return [(_GetResultDescription(rt), GetTestsByResult(rt))
+ for rt in res_types if rt != 'PASS']
+
+
+def Generate(test_runs, manifests):
+ """Generate HTML report from provided test runs.
+
+ Args:
+ test_runs: DejaGnuTestRun objects list.
+ manifests: Manifest object list that will drive test result suppression.
+
+ Returns:
+ String to which the HTML report was rendered.
+ """
+ tmpl_args = []
+
+ for test_run_id, test_run in enumerate(test_runs):
+ logging.info('Generating report for: %s.', test_run)
+
+ test_run.CleanUpTestResults()
+ test_run.SuppressTestResults(manifests)
+
+ # Generate summary and test list for each result group
+ groups = {}
+
+ for res_group, res_types in RESULT_GROUPS.items():
+ summary_all = _PrepareSummary(res_types, test_run.summary)
+ tests_all = _PrepareTestList(res_types, test_run.results)
+
+ has_2nd = lambda tuple2: bool(tuple2[1])
+ summary = filter(has_2nd, summary_all)
+ tests = filter(has_2nd, tests_all)
+
+ if summary or tests:
+ groups[res_group] = {'summary': summary, 'tests': tests}
+
+ tmpl_args.append({
+ 'id': test_run_id,
+ 'name': '%s @%s' % (test_run.tool, test_run.board),
+ 'groups': groups
+ })
+
+ logging.info('Rendering report in HTML format.')
+
+ try:
+ from django import template
+ from django.template import loader
+ from django.conf import settings
+ except ImportError:
+ logging.error('Django framework not installed!')
+ logging.error('Failed to generate report in HTML format!')
+ return ''
+
+ settings.configure(DEBUG=True,
+ TEMPLATE_DEBUG=True,
+ TEMPLATE_DIRS=(ROOT_PATH,))
+
+ tmpl = loader.get_template('report.html')
+ ctx = template.Context({'test_runs': tmpl_args})
+
+ return tmpl.render(ctx)
diff --git a/deprecated/automation/clients/report/dejagnu/summary.py b/deprecated/automation/clients/report/dejagnu/summary.py
new file mode 100644
index 00000000..d573c691
--- /dev/null
+++ b/deprecated/automation/clients/report/dejagnu/summary.py
@@ -0,0 +1,262 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+# Author: kbaclawski@google.com (Krystian Baclawski)
+#
+
+from collections import defaultdict
+from collections import namedtuple
+from datetime import datetime
+from fnmatch import fnmatch
+from itertools import groupby
+import logging
+import os.path
+import re
+
+
+class DejaGnuTestResult(namedtuple('Result', 'name variant result flaky')):
+ """Stores the result of a single test case."""
+
+ # avoid adding __dict__ to the class
+ __slots__ = ()
+
+ LINE_RE = re.compile(r'([A-Z]+):\s+([\w/+.-]+)(.*)')
+
+ @classmethod
+ def FromLine(cls, line):
+ """Alternate constructor which takes a string and parses it."""
+ try:
+ attrs, line = line.split('|', 1)
+
+ if attrs.strip() != 'flaky':
+ return None
+
+ line = line.strip()
+ flaky = True
+ except ValueError:
+ flaky = False
+
+ fields = cls.LINE_RE.match(line.strip())
+
+ if fields:
+ result, path, variant = fields.groups()
+
+ # some of the tests are generated in build dir and are issued from there,
+ # because every test run is performed in randomly named tmp directory we
+ # need to remove random part
+ try:
+ # assume that 2nd field is a test path
+ path_parts = path.split('/')
+
+ index = path_parts.index('testsuite')
+ path = '/'.join(path_parts[index + 1:])
+ except ValueError:
+ path = '/'.join(path_parts)
+
+ # Remove junk from test description.
+ variant = variant.strip(', ')
+
+ substitutions = [
+ # remove include paths - they contain name of tmp directory
+ ('-I\S+', ''),
+ # compress white spaces
+ ('\s+', ' ')
+ ]
+
+ for pattern, replacement in substitutions:
+ variant = re.sub(pattern, replacement, variant)
+
+ # Some tests separate last component of path by space, so actual filename
+ # ends up in description instead of path part. Correct that.
+ try:
+ first, rest = variant.split(' ', 1)
+ except ValueError:
+ pass
+ else:
+ if first.endswith('.o'):
+ path = os.path.join(path, first)
+ variant = rest
+
+ # DejaGNU framework errors don't contain path part at all, so description
+ # part has to be reconstructed.
+ if not any(os.path.basename(path).endswith('.%s' % suffix)
+ for suffix in ['h', 'c', 'C', 'S', 'H', 'cc', 'i', 'o']):
+ variant = '%s %s' % (path, variant)
+ path = ''
+
+ # Some tests are picked up from current directory (presumably DejaGNU
+ # generates some test files). Remove the prefix for these files.
+ if path.startswith('./'):
+ path = path[2:]
+
+ return cls(path, variant or '', result, flaky=flaky)
+
+ def __str__(self):
+ """Returns string representation of a test result."""
+ if self.flaky:
+ fmt = 'flaky | '
+ else:
+ fmt = ''
+ fmt += '{2}: {0}'
+ if self.variant:
+ fmt += ' {1}'
+ return fmt.format(*self)
+
+
+class DejaGnuTestRun(object):
+ """Container for test results that were a part of single test run.
+
+ The class stores also metadata related to the test run.
+
+ Attributes:
+ board: Name of DejaGNU board, which was used to run the tests.
+ date: The date when the test run was started.
+ target: Target triple.
+ host: Host triple.
+ tool: The tool that was tested (e.g. gcc, binutils, g++, etc.)
+ results: a list of DejaGnuTestResult objects.
+ """
+
+ __slots__ = ('board', 'date', 'target', 'host', 'tool', 'results')
+
+ def __init__(self, **kwargs):
+ assert all(name in self.__slots__ for name in kwargs)
+
+ self.results = set()
+ self.date = kwargs.get('date', datetime.now())
+
+ for name in ('board', 'target', 'tool', 'host'):
+ setattr(self, name, kwargs.get(name, 'unknown'))
+
+ @classmethod
+ def FromFile(cls, filename):
+ """Alternate constructor - reads a DejaGNU output file."""
+ test_run = cls()
+ test_run.FromDejaGnuOutput(filename)
+ test_run.CleanUpTestResults()
+ return test_run
+
+ @property
+ def summary(self):
+ """Returns a summary as {ResultType -> Count} dictionary."""
+ summary = defaultdict(int)
+
+ for r in self.results:
+ summary[r.result] += 1
+
+ return summary
+
+ def _ParseBoard(self, fields):
+ self.board = fields.group(1).strip()
+
+ def _ParseDate(self, fields):
+ self.date = datetime.strptime(fields.group(2).strip(), '%a %b %d %X %Y')
+
+ def _ParseTarget(self, fields):
+ self.target = fields.group(2).strip()
+
+ def _ParseHost(self, fields):
+ self.host = fields.group(2).strip()
+
+ def _ParseTool(self, fields):
+ self.tool = fields.group(1).strip()
+
+ def FromDejaGnuOutput(self, filename):
+ """Read in and parse DejaGNU output file."""
+
+ logging.info('Reading "%s" DejaGNU output file.', filename)
+
+ with open(filename, 'r') as report:
+ lines = [line.strip() for line in report.readlines() if line.strip()]
+
+ parsers = ((re.compile(r'Running target (.*)'), self._ParseBoard),
+ (re.compile(r'Test Run By (.*) on (.*)'), self._ParseDate),
+ (re.compile(r'=== (.*) tests ==='), self._ParseTool),
+ (re.compile(r'Target(\s+)is (.*)'), self._ParseTarget),
+ (re.compile(r'Host(\s+)is (.*)'), self._ParseHost))
+
+ for line in lines:
+ result = DejaGnuTestResult.FromLine(line)
+
+ if result:
+ self.results.add(result)
+ else:
+ for regexp, parser in parsers:
+ fields = regexp.match(line)
+ if fields:
+ parser(fields)
+ break
+
+ logging.debug('DejaGNU output file parsed successfully.')
+ logging.debug(self)
+
+ def CleanUpTestResults(self):
+ """Remove certain test results considered to be spurious.
+
+ 1) Large number of test reported as UNSUPPORTED are also marked as
+ UNRESOLVED. If that's the case remove latter result.
+ 2) If a test is performed on compiler output and for some reason compiler
+ fails, we don't want to report all failures that depend on the former.
+ """
+ name_key = lambda v: v.name
+ results_by_name = sorted(self.results, key=name_key)
+
+ for name, res_iter in groupby(results_by_name, key=name_key):
+ results = set(res_iter)
+
+ # If DejaGnu was unable to compile a test it will create following result:
+ failed = DejaGnuTestResult(name, '(test for excess errors)', 'FAIL',
+ False)
+
+ # If a test compilation failed, remove all results that are dependent.
+ if failed in results:
+ dependants = set(filter(lambda r: r.result != 'FAIL', results))
+
+ self.results -= dependants
+
+ for res in dependants:
+ logging.info('Removed {%s} dependance.', res)
+
+ # Remove all UNRESOLVED results that were also marked as UNSUPPORTED.
+ unresolved = [res._replace(result='UNRESOLVED')
+ for res in results if res.result == 'UNSUPPORTED']
+
+ for res in unresolved:
+ if res in self.results:
+ self.results.remove(res)
+ logging.info('Removed {%s} duplicate.', res)
+
+ def _IsApplicable(self, manifest):
+ """Checks if test results need to be reconsidered based on the manifest."""
+ check_list = [(self.tool, manifest.tool), (self.board, manifest.board)]
+
+ return all(fnmatch(text, pattern) for text, pattern in check_list)
+
+ def SuppressTestResults(self, manifests):
+ """Suppresses all test results listed in manifests."""
+
+ # Get a set of tests results that are going to be suppressed if they fail.
+ manifest_results = set()
+
+ for manifest in filter(self._IsApplicable, manifests):
+ manifest_results |= set(manifest.results)
+
+ suppressed_results = self.results & manifest_results
+
+ for result in sorted(suppressed_results):
+ logging.debug('Result suppressed for {%s}.', result)
+
+ new_result = '!' + result.result
+
+ # Mark result suppression as applied.
+ manifest_results.remove(result)
+
+ # Rewrite test result.
+ self.results.remove(result)
+ self.results.add(result._replace(result=new_result))
+
+ for result in sorted(manifest_results):
+ logging.warning('Result {%s} listed in manifest but not suppressed.',
+ result)
+
+ def __str__(self):
+ return '{0}, {1} @{2} on {3}'.format(self.target, self.tool, self.board,
+ self.date)
diff --git a/deprecated/automation/clients/report/validate_failures.py b/deprecated/automation/clients/report/validate_failures.py
new file mode 100755
index 00000000..d8776ba5
--- /dev/null
+++ b/deprecated/automation/clients/report/validate_failures.py
@@ -0,0 +1,239 @@
+#!/usr/bin/python2
+
+# Script to compare testsuite failures against a list of known-to-fail
+# tests.
+
+# Contributed by Diego Novillo <dnovillo@google.com>
+# Overhaul by Krystian Baclawski <kbaclawski@google.com>
+#
+# Copyright (C) 2011 Free Software Foundation, Inc.
+#
+# This file is part of GCC.
+#
+# GCC is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3, or (at your option)
+# any later version.
+#
+# GCC is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GCC; see the file COPYING. If not, write to
+# the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+# Boston, MA 02110-1301, USA.
+"""This script provides a coarser XFAILing mechanism that requires no
+detailed DejaGNU markings. This is useful in a variety of scenarios:
+
+- Development branches with many known failures waiting to be fixed.
+- Release branches with known failures that are not considered
+ important for the particular release criteria used in that branch.
+
+The script must be executed from the toplevel build directory. When
+executed it will:
+
+1) Determine the target built: TARGET
+2) Determine the source directory: SRCDIR
+3) Look for a failure manifest file in
+ <SRCDIR>/contrib/testsuite-management/<TARGET>.xfail
+4) Collect all the <tool>.sum files from the build tree.
+5) Produce a report stating:
+ a) Failures expected in the manifest but not present in the build.
+ b) Failures in the build not expected in the manifest.
+6) If all the build failures are expected in the manifest, it exits
+ with exit code 0. Otherwise, it exits with error code 1.
+"""
+
+import optparse
+import logging
+import os
+import sys
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)))
+
+from dejagnu.manifest import Manifest
+from dejagnu.summary import DejaGnuTestResult
+from dejagnu.summary import DejaGnuTestRun
+
+# Pattern for naming manifest files. The first argument should be
+# the toplevel GCC source directory. The second argument is the
+# target triple used during the build.
+_MANIFEST_PATH_PATTERN = '%s/contrib/testsuite-management/%s.xfail'
+
+
+def GetMakefileVars(makefile_path):
+ assert os.path.exists(makefile_path)
+
+ with open(makefile_path) as lines:
+ kvs = [line.split('=', 1) for line in lines if '=' in line]
+
+ return dict((k.strip(), v.strip()) for k, v in kvs)
+
+
+def GetSumFiles(build_dir):
+ summaries = []
+
+ for root, _, filenames in os.walk(build_dir):
+ summaries.extend([os.path.join(root, filename)
+ for filename in filenames if filename.endswith('.sum')])
+
+ return map(os.path.normpath, summaries)
+
+
+def ValidBuildDirectory(build_dir, target):
+ mandatory_paths = [build_dir, os.path.join(build_dir, 'Makefile')]
+
+ extra_paths = [os.path.join(build_dir, target),
+ os.path.join(build_dir, 'build-%s' % target)]
+
+ return (all(map(os.path.exists, mandatory_paths)) and
+ any(map(os.path.exists, extra_paths)))
+
+
+def GetManifestPath(build_dir):
+ makefile = GetMakefileVars(os.path.join(build_dir, 'Makefile'))
+ srcdir = makefile['srcdir']
+ target = makefile['target']
+
+ if not ValidBuildDirectory(build_dir, target):
+ target = makefile['target_alias']
+
+ if not ValidBuildDirectory(build_dir, target):
+ logging.error('%s is not a valid GCC top level build directory.', build_dir)
+ sys.exit(1)
+
+ logging.info('Discovered source directory: "%s"', srcdir)
+ logging.info('Discovered build target: "%s"', target)
+
+ return _MANIFEST_PATH_PATTERN % (srcdir, target)
+
+
+def CompareResults(manifest, actual):
+ """Compare sets of results and return two lists:
+ - List of results present in MANIFEST but missing from ACTUAL.
+ - List of results present in ACTUAL but missing from MANIFEST.
+ """
+ # Report all the actual results not present in the manifest.
+ actual_vs_manifest = actual - manifest
+
+ # Filter out tests marked flaky.
+ manifest_without_flaky_tests = set(filter(lambda result: not result.flaky,
+ manifest))
+
+ # Simlarly for all the tests in the manifest.
+ manifest_vs_actual = manifest_without_flaky_tests - actual
+
+ return actual_vs_manifest, manifest_vs_actual
+
+
+def LogResults(level, results):
+ log_fun = getattr(logging, level)
+
+ for num, result in enumerate(sorted(results), start=1):
+ log_fun(' %d) %s', num, result)
+
+
+def CheckExpectedResults(manifest_path, build_dir):
+ logging.info('Reading manifest file: "%s"', manifest_path)
+
+ manifest = set(Manifest.FromFile(manifest_path))
+
+ logging.info('Getting actual results from build directory: "%s"',
+ os.path.realpath(build_dir))
+
+ summaries = GetSumFiles(build_dir)
+
+ actual = set()
+
+ for summary in summaries:
+ test_run = DejaGnuTestRun.FromFile(summary)
+ failures = set(Manifest.FromDejaGnuTestRun(test_run))
+ actual.update(failures)
+
+ if manifest:
+ logging.debug('Tests expected to fail:')
+ LogResults('debug', manifest)
+
+ if actual:
+ logging.debug('Actual test failures:')
+ LogResults('debug', actual)
+
+ actual_vs_manifest, manifest_vs_actual = CompareResults(manifest, actual)
+
+ if actual_vs_manifest:
+ logging.info('Build results not in the manifest:')
+ LogResults('info', actual_vs_manifest)
+
+ if manifest_vs_actual:
+ logging.info('Manifest results not present in the build:')
+ LogResults('info', manifest_vs_actual)
+ logging.info('NOTE: This is not a failure! ',
+ 'It just means that the manifest expected these tests to '
+ 'fail, but they worked in this configuration.')
+
+ if actual_vs_manifest or manifest_vs_actual:
+ sys.exit(1)
+
+ logging.info('No unexpected failures.')
+
+
+def ProduceManifest(manifest_path, build_dir, overwrite):
+ if os.path.exists(manifest_path) and not overwrite:
+ logging.error('Manifest file "%s" already exists.', manifest_path)
+ logging.error('Use --force to overwrite.')
+ sys.exit(1)
+
+ testruns = map(DejaGnuTestRun.FromFile, GetSumFiles(build_dir))
+ manifests = map(Manifest.FromDejaGnuTestRun, testruns)
+
+ with open(manifest_path, 'w') as manifest_file:
+ manifest_strings = [manifest.Generate() for manifest in manifests]
+ logging.info('Writing manifest to "%s".', manifest_path)
+ manifest_file.write('\n'.join(manifest_strings))
+
+
+def Main(argv):
+ parser = optparse.OptionParser(usage=__doc__)
+ parser.add_option(
+ '-b',
+ '--build_dir',
+ dest='build_dir',
+ action='store',
+ metavar='PATH',
+ default=os.getcwd(),
+ help='Build directory to check. (default: current directory)')
+ parser.add_option('-m',
+ '--manifest',
+ dest='manifest',
+ action='store_true',
+ help='Produce the manifest for the current build.')
+ parser.add_option(
+ '-f',
+ '--force',
+ dest='force',
+ action='store_true',
+ help=('Overwrite an existing manifest file, if user requested creating '
+ 'new one. (default: False)'))
+ parser.add_option('-v',
+ '--verbose',
+ dest='verbose',
+ action='store_true',
+ help='Increase verbosity.')
+ options, _ = parser.parse_args(argv[1:])
+
+ if options.verbose:
+ logging.root.setLevel(logging.DEBUG)
+
+ manifest_path = GetManifestPath(options.build_dir)
+
+ if options.manifest:
+ ProduceManifest(manifest_path, options.build_dir, options.force)
+ else:
+ CheckExpectedResults(manifest_path, options.build_dir)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
+ Main(sys.argv)
diff --git a/deprecated/automation/common/__init__.py b/deprecated/automation/common/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/common/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/common/command.py b/deprecated/automation/common/command.py
new file mode 100644
index 00000000..c56e9fad
--- /dev/null
+++ b/deprecated/automation/common/command.py
@@ -0,0 +1,241 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import abc
+import collections
+import os.path
+
+
+class Shell(object):
+ """Class used to build a string representation of a shell command."""
+
+ def __init__(self, cmd, *args, **kwargs):
+ assert all(key in ['path', 'ignore_error'] for key in kwargs)
+
+ self._cmd = cmd
+ self._args = list(args)
+ self._path = kwargs.get('path', '')
+ self._ignore_error = bool(kwargs.get('ignore_error', False))
+
+ def __str__(self):
+ cmdline = [os.path.join(self._path, self._cmd)]
+ cmdline.extend(self._args)
+
+ cmd = ' '.join(cmdline)
+
+ if self._ignore_error:
+ cmd = '{ %s; true; }' % cmd
+
+ return cmd
+
+ def AddOption(self, option):
+ self._args.append(option)
+
+
+class Wrapper(object):
+ """Wraps a command with environment which gets cleaned up after execution."""
+
+ _counter = 1
+
+ def __init__(self, command, cwd=None, env=None, umask=None):
+ # @param cwd: temporary working directory
+ # @param env: dictionary of environment variables
+ self._command = command
+ self._prefix = Chain()
+ self._suffix = Chain()
+
+ if cwd:
+ self._prefix.append(Shell('pushd', cwd))
+ self._suffix.insert(0, Shell('popd'))
+
+ if env:
+ for env_var, value in env.items():
+ self._prefix.append(Shell('%s=%s' % (env_var, value)))
+ self._suffix.insert(0, Shell('unset', env_var))
+
+ if umask:
+ umask_save_var = 'OLD_UMASK_%d' % self.counter
+
+ self._prefix.append(Shell('%s=$(umask)' % umask_save_var))
+ self._prefix.append(Shell('umask', umask))
+ self._suffix.insert(0, Shell('umask', '$%s' % umask_save_var))
+
+ @property
+ def counter(self):
+ counter = self._counter
+ self._counter += 1
+ return counter
+
+ def __str__(self):
+ return str(Chain(self._prefix, self._command, self._suffix))
+
+
+class AbstractCommandContainer(collections.MutableSequence):
+ """Common base for all classes that behave like command container."""
+
+ def __init__(self, *commands):
+ self._commands = list(commands)
+
+ def __contains__(self, command):
+ return command in self._commands
+
+ def __iter__(self):
+ return iter(self._commands)
+
+ def __len__(self):
+ return len(self._commands)
+
+ def __getitem__(self, index):
+ return self._commands[index]
+
+ def __setitem__(self, index, command):
+ self._commands[index] = self._ValidateCommandType(command)
+
+ def __delitem__(self, index):
+ del self._commands[index]
+
+ def insert(self, index, command):
+ self._commands.insert(index, self._ValidateCommandType(command))
+
+ @abc.abstractmethod
+ def __str__(self):
+ pass
+
+ @abc.abstractproperty
+ def stored_types(self):
+ pass
+
+ def _ValidateCommandType(self, command):
+ if type(command) not in self.stored_types:
+ raise TypeError('Command cannot have %s type.' % type(command))
+ else:
+ return command
+
+ def _StringifyCommands(self):
+ cmds = []
+
+ for cmd in self:
+ if isinstance(cmd, AbstractCommandContainer) and len(cmd) > 1:
+ cmds.append('{ %s; }' % cmd)
+ else:
+ cmds.append(str(cmd))
+
+ return cmds
+
+
+class Chain(AbstractCommandContainer):
+ """Container that chains shell commands using (&&) shell operator."""
+
+ @property
+ def stored_types(self):
+ return [str, Shell, Chain, Pipe]
+
+ def __str__(self):
+ return ' && '.join(self._StringifyCommands())
+
+
+class Pipe(AbstractCommandContainer):
+ """Container that chains shell commands using pipe (|) operator."""
+
+ def __init__(self, *commands, **kwargs):
+ assert all(key in ['input', 'output'] for key in kwargs)
+
+ AbstractCommandContainer.__init__(self, *commands)
+
+ self._input = kwargs.get('input', None)
+ self._output = kwargs.get('output', None)
+
+ @property
+ def stored_types(self):
+ return [str, Shell]
+
+ def __str__(self):
+ pipe = self._StringifyCommands()
+
+ if self._input:
+ pipe.insert(str(Shell('cat', self._input), 0))
+
+ if self._output:
+ pipe.append(str(Shell('tee', self._output)))
+
+ return ' | '.join(pipe)
+
+# TODO(kbaclawski): Unfortunately we don't have any policy describing which
+# directories can or cannot be touched by a job. Thus, I cannot decide how to
+# protect a system against commands that are considered to be dangerous (like
+# RmTree("${HOME}")). AFAIK we'll have to execute some commands with root access
+# (especially for ChromeOS related jobs, which involve chroot-ing), which is
+# even more scary.
+
+
+def Copy(*args, **kwargs):
+ assert all(key in ['to_dir', 'recursive'] for key in kwargs.keys())
+
+ options = []
+
+ if 'to_dir' in kwargs:
+ options.extend(['-t', kwargs['to_dir']])
+
+ if 'recursive' in kwargs:
+ options.append('-r')
+
+ options.extend(args)
+
+ return Shell('cp', *options)
+
+
+def RemoteCopyFrom(from_machine, from_path, to_path, username=None):
+ from_path = os.path.expanduser(from_path) + '/'
+ to_path = os.path.expanduser(to_path) + '/'
+
+ if not username:
+ login = from_machine
+ else:
+ login = '%s@%s' % (username, from_machine)
+
+ return Chain(
+ MakeDir(to_path), Shell('rsync', '-a', '%s:%s' %
+ (login, from_path), to_path))
+
+
+def MakeSymlink(to_path, link_name):
+ return Shell('ln', '-f', '-s', '-T', to_path, link_name)
+
+
+def MakeDir(*dirs, **kwargs):
+ options = ['-p']
+
+ mode = kwargs.get('mode', None)
+
+ if mode:
+ options.extend(['-m', str(mode)])
+
+ options.extend(dirs)
+
+ return Shell('mkdir', *options)
+
+
+def RmTree(*dirs):
+ return Shell('rm', '-r', '-f', *dirs)
+
+
+def UnTar(tar_file, dest_dir):
+ return Chain(
+ MakeDir(dest_dir), Shell('tar', '-x', '-f', tar_file, '-C', dest_dir))
+
+
+def Tar(tar_file, *args):
+ options = ['-c']
+
+ if tar_file.endswith('.tar.bz2'):
+ options.append('-j')
+ elif tar_file.endswith('.tar.gz'):
+ options.append('-z')
+ else:
+ assert tar_file.endswith('.tar')
+
+ options.extend(['-f', tar_file])
+ options.extend(args)
+
+ return Chain(MakeDir(os.path.dirname(tar_file)), Shell('tar', *options))
diff --git a/deprecated/automation/common/command_executer.py b/deprecated/automation/common/command_executer.py
new file mode 100644
index 00000000..c0f314f5
--- /dev/null
+++ b/deprecated/automation/common/command_executer.py
@@ -0,0 +1,230 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+"""Classes that help running commands in a subshell.
+
+Commands can be run locally, or remotly using SSH connection. You may log the
+output of a command to a terminal or a file, or any other destination.
+"""
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import fcntl
+import logging
+import os
+import select
+import subprocess
+import time
+
+from automation.common import logger
+
+
+class CommandExecuter(object):
+ DRY_RUN = False
+
+ def __init__(self, dry_run=False):
+ self._logger = logging.getLogger(self.__class__.__name__)
+ self._dry_run = dry_run or self.DRY_RUN
+
+ @classmethod
+ def Configure(cls, dry_run):
+ cls.DRY_RUN = dry_run
+
+ def RunCommand(self,
+ cmd,
+ machine=None,
+ username=None,
+ command_terminator=None,
+ command_timeout=None):
+ cmd = str(cmd)
+
+ if self._dry_run:
+ return 0
+
+ if not command_terminator:
+ command_terminator = CommandTerminator()
+
+ if command_terminator.IsTerminated():
+ self._logger.warning('Command has been already terminated!')
+ return 1
+
+ # Rewrite command for remote execution.
+ if machine:
+ if username:
+ login = '%s@%s' % (username, machine)
+ else:
+ login = machine
+
+ self._logger.debug("Executing '%s' on %s.", cmd, login)
+
+ # FIXME(asharif): Remove this after crosbug.com/33007 is fixed.
+ cmd = "ssh -t -t %s -- '%s'" % (login, cmd)
+ else:
+ self._logger.debug("Executing: '%s'.", cmd)
+
+ child = self._SpawnProcess(cmd, command_terminator, command_timeout)
+
+ self._logger.debug('{PID: %d} Finished with %d code.', child.pid,
+ child.returncode)
+
+ return child.returncode
+
+ def _Terminate(self, child, command_timeout, wait_timeout=10):
+ """Gracefully shutdown the child by sending SIGTERM."""
+
+ if command_timeout:
+ self._logger.warning('{PID: %d} Timeout of %s seconds reached since '
+ 'process started.', child.pid, command_timeout)
+
+ self._logger.warning('{PID: %d} Terminating child.', child.pid)
+
+ try:
+ child.terminate()
+ except OSError:
+ pass
+
+ wait_started = time.time()
+
+ while not child.poll():
+ if time.time() - wait_started >= wait_timeout:
+ break
+ time.sleep(0.1)
+
+ return child.poll()
+
+ def _Kill(self, child):
+ """Kill the child with immediate result."""
+ self._logger.warning('{PID: %d} Process still alive.', child.pid)
+ self._logger.warning('{PID: %d} Killing child.', child.pid)
+ child.kill()
+ child.wait()
+
+ def _SpawnProcess(self, cmd, command_terminator, command_timeout):
+ # Create a child process executing provided command.
+ child = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ shell=True)
+
+ # Close stdin so the child won't be able to block on read.
+ child.stdin.close()
+
+ started_time = time.time()
+
+ # Watch for data on process stdout, stderr.
+ pipes = [child.stdout, child.stderr]
+
+ # Put pipes into non-blocking mode.
+ for pipe in pipes:
+ fd = pipe.fileno()
+ fd_flags = fcntl.fcntl(fd, fcntl.F_GETFL)
+ fcntl.fcntl(fd, fcntl.F_SETFL, fd_flags | os.O_NONBLOCK)
+
+ already_terminated = False
+
+ while pipes:
+ # Maybe timeout reached?
+ if command_timeout and time.time() - started_time > command_timeout:
+ command_terminator.Terminate()
+
+ # Check if terminate request was received.
+ if command_terminator.IsTerminated() and not already_terminated:
+ if not self._Terminate(child, command_timeout):
+ self._Kill(child)
+ # Don't exit the loop immediately. Firstly try to read everything that
+ # was left on stdout and stderr.
+ already_terminated = True
+
+ # Wait for pipes to become ready.
+ ready_pipes, _, _ = select.select(pipes, [], [], 0.1)
+
+ # Handle file descriptors ready to be read.
+ for pipe in ready_pipes:
+ fd = pipe.fileno()
+
+ data = os.read(fd, 4096)
+
+ # check for end-of-file
+ if not data:
+ pipes.remove(pipe)
+ continue
+
+ # read all data that's available
+ while data:
+ if pipe == child.stdout:
+ self.DataReceivedOnOutput(data)
+ elif pipe == child.stderr:
+ self.DataReceivedOnError(data)
+
+ try:
+ data = os.read(fd, 4096)
+ except OSError:
+ # terminate loop if EWOULDBLOCK (EAGAIN) is received
+ data = ''
+
+ if not already_terminated:
+ self._logger.debug('Waiting for command to finish.')
+ child.wait()
+
+ return child
+
+ def DataReceivedOnOutput(self, data):
+ """Invoked when the child process wrote data to stdout."""
+ sys.stdout.write(data)
+
+ def DataReceivedOnError(self, data):
+ """Invoked when the child process wrote data to stderr."""
+ sys.stderr.write(data)
+
+
+class LoggingCommandExecuter(CommandExecuter):
+
+ def __init__(self, *args, **kwargs):
+ super(LoggingCommandExecuter, self).__init__(*args, **kwargs)
+
+ # Create a logger for command's stdout/stderr streams.
+ self._output = logging.getLogger('%s.%s' % (self._logger.name, 'Output'))
+
+ def OpenLog(self, log_path):
+ """The messages are going to be saved to gzip compressed file."""
+ formatter = logging.Formatter('%(asctime)s %(prefix)s: %(message)s',
+ '%Y-%m-%d %H:%M:%S')
+ handler = logger.CompressedFileHandler(log_path, delay=True)
+ handler.setFormatter(formatter)
+ self._output.addHandler(handler)
+
+ # Set a flag to prevent log records from being propagated up the logger
+ # hierarchy tree. We don't want for command output messages to appear in
+ # the main log.
+ self._output.propagate = 0
+
+ def CloseLog(self):
+ """Remove handlers and reattach the logger to its parent."""
+ for handler in list(self._output.handlers):
+ self._output.removeHandler(handler)
+ handler.flush()
+ handler.close()
+
+ self._output.propagate = 1
+
+ def DataReceivedOnOutput(self, data):
+ """Invoked when the child process wrote data to stdout."""
+ for line in data.splitlines():
+ self._output.info(line, extra={'prefix': 'STDOUT'})
+
+ def DataReceivedOnError(self, data):
+ """Invoked when the child process wrote data to stderr."""
+ for line in data.splitlines():
+ self._output.warning(line, extra={'prefix': 'STDERR'})
+
+
+class CommandTerminator(object):
+
+ def __init__(self):
+ self.terminated = False
+
+ def Terminate(self):
+ self.terminated = True
+
+ def IsTerminated(self):
+ return self.terminated
diff --git a/deprecated/automation/common/command_executer_test.py b/deprecated/automation/common/command_executer_test.py
new file mode 100755
index 00000000..2caaa146
--- /dev/null
+++ b/deprecated/automation/common/command_executer_test.py
@@ -0,0 +1,210 @@
+#!/usr/bin/python2
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import cStringIO
+import logging
+import os
+import signal
+import socket
+import sys
+import time
+import unittest
+
+
+def AddScriptDirToPath():
+ """Required for remote python script execution."""
+ path = os.path.abspath(__file__)
+
+ for _ in range(3):
+ path, _ = os.path.split(path)
+
+ if not path in sys.path:
+ sys.path.append(path)
+
+
+AddScriptDirToPath()
+
+from automation.common.command_executer import CommandExecuter
+
+
+class LoggerMock(object):
+
+ def LogCmd(self, cmd, machine='', user=''):
+ if machine:
+ logging.info('[%s] Executing: %s', machine, cmd)
+ else:
+ logging.info('Executing: %s', cmd)
+
+ def LogError(self, msg):
+ logging.error(msg)
+
+ def LogWarning(self, msg):
+ logging.warning(msg)
+
+ def LogOutput(self, msg):
+ logging.info(msg)
+
+
+class CommandExecuterUnderTest(CommandExecuter):
+
+ def __init__(self):
+ CommandExecuter.__init__(self, logger_to_set=LoggerMock())
+
+ # We will record stdout and stderr.
+ self._stderr = cStringIO.StringIO()
+ self._stdout = cStringIO.StringIO()
+
+ @property
+ def stdout(self):
+ return self._stdout.getvalue()
+
+ @property
+ def stderr(self):
+ return self._stderr.getvalue()
+
+ def DataReceivedOnOutput(self, data):
+ self._stdout.write(data)
+
+ def DataReceivedOnError(self, data):
+ self._stderr.write(data)
+
+
+class CommandExecuterLocalTests(unittest.TestCase):
+ HOSTNAME = None
+
+ def setUp(self):
+ self._executer = CommandExecuterUnderTest()
+
+ def tearDown(self):
+ pass
+
+ def RunCommand(self, method, **kwargs):
+ program = os.path.abspath(sys.argv[0])
+
+ return self._executer.RunCommand('%s runHelper %s' % (program, method),
+ machine=self.HOSTNAME,
+ **kwargs)
+
+ def testCommandTimeout(self):
+ exit_code = self.RunCommand('SleepForMinute', command_timeout=3)
+
+ self.assertTrue(-exit_code in [signal.SIGTERM, signal.SIGKILL],
+ 'Invalid exit code: %d' % exit_code)
+
+ def testCommandTimeoutIfSigTermIgnored(self):
+ exit_code = self.RunCommand('IgnoreSigTerm', command_timeout=3)
+
+ self.assertTrue(-exit_code in [signal.SIGTERM, signal.SIGKILL])
+
+ def testCommandSucceeded(self):
+ self.assertFalse(self.RunCommand('ReturnTrue'))
+
+ def testCommandFailed(self):
+ self.assertTrue(self.RunCommand('ReturnFalse'))
+
+ def testStringOnOutputStream(self):
+ self.assertFalse(self.RunCommand('EchoToOutputStream'))
+ self.assertEquals(self._executer.stderr, '')
+ self.assertEquals(self._executer.stdout, 'test')
+
+ def testStringOnErrorStream(self):
+ self.assertFalse(self.RunCommand('EchoToErrorStream'))
+ self.assertEquals(self._executer.stderr, 'test')
+ self.assertEquals(self._executer.stdout, '')
+
+ def testOutputStreamNonInteractive(self):
+ self.assertFalse(
+ self.RunCommand('IsOutputStreamInteractive'),
+ 'stdout stream is a terminal!')
+
+ def testErrorStreamNonInteractive(self):
+ self.assertFalse(
+ self.RunCommand('IsErrorStreamInteractive'),
+ 'stderr stream is a terminal!')
+
+ def testAttemptToRead(self):
+ self.assertFalse(self.RunCommand('WaitForInput', command_timeout=3))
+
+ def testInterruptedProcess(self):
+ self.assertEquals(self.RunCommand('TerminateBySigAbrt'), -signal.SIGABRT)
+
+
+class CommandExecuterRemoteTests(CommandExecuterLocalTests):
+ HOSTNAME = socket.gethostname()
+
+ def testCommandTimeoutIfSigTermIgnored(self):
+ exit_code = self.RunCommand('IgnoreSigTerm', command_timeout=6)
+
+ self.assertEquals(exit_code, 255)
+
+ lines = self._executer.stdout.splitlines()
+ pid = int(lines[0])
+
+ try:
+ with open('/proc/%d/cmdline' % pid) as f:
+ cmdline = f.read()
+ except IOError:
+ cmdline = ''
+
+ self.assertFalse('IgnoreSigTerm' in cmdline, 'Process is still alive.')
+
+
+class CommandExecuterTestHelpers(object):
+
+ def SleepForMinute(self):
+ time.sleep(60)
+ return 1
+
+ def ReturnTrue(self):
+ return 0
+
+ def ReturnFalse(self):
+ return 1
+
+ def EchoToOutputStream(self):
+ sys.stdout.write('test')
+ return 0
+
+ def EchoToErrorStream(self):
+ sys.stderr.write('test')
+ return 0
+
+ def IsOutputStreamInteractive(self):
+ return sys.stdout.isatty()
+
+ def IsErrorStreamInteractive(self):
+ return sys.stderr.isatty()
+
+ def IgnoreSigTerm(self):
+ os.write(1, '%d' % os.getpid())
+ signal.signal(signal.SIGTERM, signal.SIG_IGN)
+ time.sleep(30)
+ return 0
+
+ def WaitForInput(self):
+ try:
+ # can only read end-of-file marker
+ return os.read(0, 1) != ''
+ except OSError:
+ # that means that stdin descriptor is closed
+ return 0
+
+ def TerminateBySigAbrt(self):
+ os.kill(os.getpid(), signal.SIGABRT)
+ return 0
+
+
+if __name__ == '__main__':
+ FORMAT = '%(asctime)-15s %(levelname)s %(message)s'
+ logging.basicConfig(format=FORMAT, level=logging.DEBUG)
+
+ if len(sys.argv) > 1:
+ if sys.argv[1] == 'runHelper':
+ helpers = CommandExecuterTestHelpers()
+ sys.exit(getattr(helpers, sys.argv[2])())
+
+ unittest.main()
diff --git a/deprecated/automation/common/events.py b/deprecated/automation/common/events.py
new file mode 100644
index 00000000..ad3ec844
--- /dev/null
+++ b/deprecated/automation/common/events.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+"""Tools for recording and reporting timeline of abstract events.
+
+You can store any events provided that they can be stringified.
+"""
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import collections
+import datetime
+import time
+
+
+class _EventRecord(object):
+ """Internal class. Attaches extra information to an event."""
+
+ def __init__(self, event, time_started=None, time_elapsed=None):
+ self._event = event
+ self._time_started = time_started or time.time()
+ self._time_elapsed = None
+
+ if time_elapsed:
+ self.time_elapsed = time_elapsed
+
+ @property
+ def event(self):
+ return self._event
+
+ @property
+ def time_started(self):
+ return self._time_started
+
+ def _TimeElapsedGet(self):
+ if self.has_finished:
+ time_elapsed = self._time_elapsed
+ else:
+ time_elapsed = time.time() - self._time_started
+
+ return datetime.timedelta(seconds=time_elapsed)
+
+ def _TimeElapsedSet(self, time_elapsed):
+ if isinstance(time_elapsed, datetime.timedelta):
+ self._time_elapsed = time_elapsed.seconds
+ else:
+ self._time_elapsed = time_elapsed
+
+ time_elapsed = property(_TimeElapsedGet, _TimeElapsedSet)
+
+ @property
+ def has_finished(self):
+ return self._time_elapsed is not None
+
+ def GetTimeStartedFormatted(self):
+ return time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime(self._time_started))
+
+ def GetTimeElapsedRounded(self):
+ return datetime.timedelta(seconds=int(self.time_elapsed.seconds))
+
+ def Finish(self):
+ if not self.has_finished:
+ self._time_elapsed = time.time() - self._time_started
+
+
+class _Transition(collections.namedtuple('_Transition', ('from_', 'to_'))):
+ """Internal class. Represents transition point between events / states."""
+
+ def __str__(self):
+ return '%s => %s' % (self.from_, self.to_)
+
+
+class EventHistory(collections.Sequence):
+ """Records events and provides human readable events timeline."""
+
+ def __init__(self, records=None):
+ self._records = records or []
+
+ def __len__(self):
+ return len(self._records)
+
+ def __iter__(self):
+ return iter(self._records)
+
+ def __getitem__(self, index):
+ return self._records[index]
+
+ @property
+ def last(self):
+ if self._records:
+ return self._records[-1]
+
+ def AddEvent(self, event):
+ if self.last:
+ self.last.Finish()
+
+ evrec = _EventRecord(event)
+ self._records.append(evrec)
+ return evrec
+
+ def GetTotalTime(self):
+ if self._records:
+ total_time_elapsed = sum(evrec.time_elapsed.seconds
+ for evrec in self._records)
+
+ return datetime.timedelta(seconds=int(total_time_elapsed))
+
+ def GetTransitionEventHistory(self):
+ records = []
+
+ if self._records:
+ for num, next_evrec in enumerate(self._records[1:], start=1):
+ evrec = self._records[num - 1]
+
+ records.append(_EventRecord(
+ _Transition(evrec.event, next_evrec.event), evrec.time_started,
+ evrec.time_elapsed))
+
+ if not self.last.has_finished:
+ records.append(_EventRecord(
+ _Transition(self.last.event,
+ 'NOW'), self.last.time_started, self.last.time_elapsed))
+
+ return EventHistory(records)
+
+ @staticmethod
+ def _GetReport(history, report_name):
+ report = [report_name]
+
+ for num, evrec in enumerate(history, start=1):
+ time_elapsed = str(evrec.GetTimeElapsedRounded())
+
+ if not evrec.has_finished:
+ time_elapsed.append(' (not finished)')
+
+ report.append('%d) %s: %s: %s' % (num, evrec.GetTimeStartedFormatted(),
+ evrec.event, time_elapsed))
+
+ report.append('Total Time: %s' % history.GetTotalTime())
+
+ return '\n'.join(report)
+
+ def GetEventReport(self):
+ return EventHistory._GetReport(self, 'Timeline of events:')
+
+ def GetTransitionEventReport(self):
+ return EventHistory._GetReport(self.GetTransitionEventHistory(),
+ 'Timeline of transition events:')
diff --git a/deprecated/automation/common/job.py b/deprecated/automation/common/job.py
new file mode 100644
index 00000000..e845ab25
--- /dev/null
+++ b/deprecated/automation/common/job.py
@@ -0,0 +1,178 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+"""A module for a job in the infrastructure."""
+
+__author__ = 'raymes@google.com (Raymes Khoury)'
+
+import os.path
+
+from automation.common import state_machine
+
+STATUS_NOT_EXECUTED = 'NOT_EXECUTED'
+STATUS_SETUP = 'SETUP'
+STATUS_COPYING = 'COPYING'
+STATUS_RUNNING = 'RUNNING'
+STATUS_SUCCEEDED = 'SUCCEEDED'
+STATUS_FAILED = 'FAILED'
+
+
+class FolderDependency(object):
+
+ def __init__(self, job, src, dest=None):
+ if not dest:
+ dest = src
+
+ # TODO(kbaclawski): rename to producer
+ self.job = job
+ self.src = src
+ self.dest = dest
+
+ @property
+ def read_only(self):
+ return self.dest == self.src
+
+
+class JobStateMachine(state_machine.BasicStateMachine):
+ state_machine = {
+ STATUS_NOT_EXECUTED: [STATUS_SETUP],
+ STATUS_SETUP: [STATUS_COPYING, STATUS_FAILED],
+ STATUS_COPYING: [STATUS_RUNNING, STATUS_FAILED],
+ STATUS_RUNNING: [STATUS_SUCCEEDED, STATUS_FAILED]
+ }
+
+ final_states = [STATUS_SUCCEEDED, STATUS_FAILED]
+
+
+class JobFailure(Exception):
+
+ def __init__(self, message, exit_code):
+ Exception.__init__(self, message)
+ self.exit_code = exit_code
+
+
+class Job(object):
+ """A class representing a job whose commands will be executed."""
+
+ WORKDIR_PREFIX = '/usr/local/google/tmp/automation'
+
+ def __init__(self, label, command, timeout=4 * 60 * 60):
+ self._state = JobStateMachine(STATUS_NOT_EXECUTED)
+ self.predecessors = set()
+ self.successors = set()
+ self.machine_dependencies = []
+ self.folder_dependencies = []
+ self.id = 0
+ self.machines = []
+ self.command = command
+ self._has_primary_machine_spec = False
+ self.group = None
+ self.dry_run = None
+ self.label = label
+ self.timeout = timeout
+
+ def _StateGet(self):
+ return self._state
+
+ def _StateSet(self, new_state):
+ self._state.Change(new_state)
+
+ status = property(_StateGet, _StateSet)
+
+ @property
+ def timeline(self):
+ return self._state.timeline
+
+ def __repr__(self):
+ return '{%s: %s}' % (self.__class__.__name__, self.id)
+
+ def __str__(self):
+ res = []
+ res.append('%d' % self.id)
+ res.append('Predecessors:')
+ res.extend(['%d' % pred.id for pred in self.predecessors])
+ res.append('Successors:')
+ res.extend(['%d' % succ.id for succ in self.successors])
+ res.append('Machines:')
+ res.extend(['%s' % machine for machine in self.machines])
+ res.append(self.PrettyFormatCommand())
+ res.append('%s' % self.status)
+ res.append(self.timeline.GetTransitionEventReport())
+ return '\n'.join(res)
+
+ @staticmethod
+ def _FormatCommand(cmd, substitutions):
+ for pattern, replacement in substitutions:
+ cmd = cmd.replace(pattern, replacement)
+
+ return cmd
+
+ def GetCommand(self):
+ substitutions = [
+ ('$JOB_ID', str(self.id)), ('$JOB_TMP', self.work_dir),
+ ('$JOB_HOME', self.home_dir),
+ ('$PRIMARY_MACHINE', self.primary_machine.hostname)
+ ]
+
+ if len(self.machines) > 1:
+ for num, machine in enumerate(self.machines[1:]):
+ substitutions.append(('$SECONDARY_MACHINES[%d]' % num, machine.hostname
+ ))
+
+ return self._FormatCommand(str(self.command), substitutions)
+
+ def PrettyFormatCommand(self):
+ # TODO(kbaclawski): This method doesn't belong here, but rather to
+ # non existing Command class. If one is created then PrettyFormatCommand
+ # shall become its method.
+ return self._FormatCommand(self.GetCommand(), [
+ ('\{ ', ''), ('; \}', ''), ('\} ', '\n'), ('\s*&&\s*', '\n')
+ ])
+
+ def DependsOnFolder(self, dependency):
+ self.folder_dependencies.append(dependency)
+ self.DependsOn(dependency.job)
+
+ @property
+ def results_dir(self):
+ return os.path.join(self.work_dir, 'results')
+
+ @property
+ def logs_dir(self):
+ return os.path.join(self.home_dir, 'logs')
+
+ @property
+ def log_filename_prefix(self):
+ return 'job-%d.log' % self.id
+
+ @property
+ def work_dir(self):
+ return os.path.join(self.WORKDIR_PREFIX, 'job-%d' % self.id)
+
+ @property
+ def home_dir(self):
+ return os.path.join(self.group.home_dir, 'job-%d' % self.id)
+
+ @property
+ def primary_machine(self):
+ return self.machines[0]
+
+ def DependsOn(self, job):
+ """Specifies Jobs to be finished before this job can be launched."""
+ self.predecessors.add(job)
+ job.successors.add(self)
+
+ @property
+ def is_ready(self):
+ """Check that all our dependencies have been executed."""
+ return all(pred.status == STATUS_SUCCEEDED for pred in self.predecessors)
+
+ def DependsOnMachine(self, machine_spec, primary=True):
+ # Job will run on arbitrarily chosen machine specified by
+ # MachineSpecification class instances passed to this method.
+ if primary:
+ if self._has_primary_machine_spec:
+ raise RuntimeError('Only one primary machine specification allowed.')
+ self._has_primary_machine_spec = True
+ self.machine_dependencies.insert(0, machine_spec)
+ else:
+ self.machine_dependencies.append(machine_spec)
diff --git a/deprecated/automation/common/job_group.py b/deprecated/automation/common/job_group.py
new file mode 100644
index 00000000..96912fc1
--- /dev/null
+++ b/deprecated/automation/common/job_group.py
@@ -0,0 +1,73 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+
+import getpass
+import os
+
+from automation.common.state_machine import BasicStateMachine
+
+STATUS_NOT_EXECUTED = 'NOT_EXECUTED'
+STATUS_EXECUTING = 'EXECUTING'
+STATUS_SUCCEEDED = 'SUCCEEDED'
+STATUS_FAILED = 'FAILED'
+
+
+class JobGroupStateMachine(BasicStateMachine):
+ state_machine = {
+ STATUS_NOT_EXECUTED: [STATUS_EXECUTING],
+ STATUS_EXECUTING: [STATUS_SUCCEEDED, STATUS_FAILED]
+ }
+
+ final_states = [STATUS_SUCCEEDED, STATUS_FAILED]
+
+
+class JobGroup(object):
+ HOMEDIR_PREFIX = os.path.join('/home', getpass.getuser(), 'www', 'automation')
+
+ def __init__(self,
+ label,
+ jobs=None,
+ cleanup_on_completion=True,
+ cleanup_on_failure=False,
+ description=''):
+ self._state = JobGroupStateMachine(STATUS_NOT_EXECUTED)
+ self.id = 0
+ self.label = label
+ self.jobs = []
+ self.cleanup_on_completion = cleanup_on_completion
+ self.cleanup_on_failure = cleanup_on_failure
+ self.description = description
+
+ if jobs:
+ for job in jobs:
+ self.AddJob(job)
+
+ def _StateGet(self):
+ return self._state
+
+ def _StateSet(self, new_state):
+ self._state.Change(new_state)
+
+ status = property(_StateGet, _StateSet)
+
+ @property
+ def home_dir(self):
+ return os.path.join(self.HOMEDIR_PREFIX, 'job-group-%d' % self.id)
+
+ @property
+ def time_submitted(self):
+ try:
+ return self.status.timeline[1].time_started
+ except IndexError:
+ return None
+
+ def __repr__(self):
+ return '{%s: %s}' % (self.__class__.__name__, self.id)
+
+ def __str__(self):
+ return '\n'.join(['Job-Group:', 'ID: %s' % self.id] + [str(
+ job) for job in self.jobs])
+
+ def AddJob(self, job):
+ self.jobs.append(job)
+ job.group = self
diff --git a/deprecated/automation/common/logger.py b/deprecated/automation/common/logger.py
new file mode 100644
index 00000000..4aeee052
--- /dev/null
+++ b/deprecated/automation/common/logger.py
@@ -0,0 +1,144 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+from itertools import chain
+import gzip
+import logging
+import logging.handlers
+import time
+import traceback
+
+
+def SetUpRootLogger(filename=None, level=None, display_flags={}):
+ console_handler = logging.StreamHandler()
+ console_handler.setFormatter(CustomFormatter(AnsiColorCoder(), display_flags))
+ logging.root.addHandler(console_handler)
+
+ if filename:
+ file_handler = logging.handlers.RotatingFileHandler(
+ filename,
+ maxBytes=10 * 1024 * 1024,
+ backupCount=9,
+ delay=True)
+ file_handler.setFormatter(CustomFormatter(NullColorCoder(), display_flags))
+ logging.root.addHandler(file_handler)
+
+ if level:
+ logging.root.setLevel(level)
+
+
+class NullColorCoder(object):
+
+ def __call__(self, *args):
+ return ''
+
+
+class AnsiColorCoder(object):
+ CODES = {'reset': (0,),
+ 'bold': (1, 22),
+ 'italics': (3, 23),
+ 'underline': (4, 24),
+ 'inverse': (7, 27),
+ 'strikethrough': (9, 29),
+ 'black': (30, 40),
+ 'red': (31, 41),
+ 'green': (32, 42),
+ 'yellow': (33, 43),
+ 'blue': (34, 44),
+ 'magenta': (35, 45),
+ 'cyan': (36, 46),
+ 'white': (37, 47)}
+
+ def __call__(self, *args):
+ codes = []
+
+ for arg in args:
+ if arg.startswith('bg-') or arg.startswith('no-'):
+ codes.append(self.CODES[arg[3:]][1])
+ else:
+ codes.append(self.CODES[arg][0])
+
+ return '\033[%sm' % ';'.join(map(str, codes))
+
+
+class CustomFormatter(logging.Formatter):
+ COLORS = {'DEBUG': ('white',),
+ 'INFO': ('green',),
+ 'WARN': ('yellow', 'bold'),
+ 'ERROR': ('red', 'bold'),
+ 'CRIT': ('red', 'inverse', 'bold')}
+
+ def __init__(self, coder, display_flags={}):
+ items = []
+
+ if display_flags.get('datetime', True):
+ items.append('%(asctime)s')
+ if display_flags.get('level', True):
+ items.append('%(levelname)s')
+ if display_flags.get('name', True):
+ items.append(coder('cyan') + '[%(threadName)s:%(name)s]' + coder('reset'))
+ items.append('%(prefix)s%(message)s')
+
+ logging.Formatter.__init__(self, fmt=' '.join(items))
+
+ self._coder = coder
+
+ def formatTime(self, record):
+ ct = self.converter(record.created)
+ t = time.strftime('%Y-%m-%d %H:%M:%S', ct)
+ return '%s.%02d' % (t, record.msecs / 10)
+
+ def formatLevelName(self, record):
+ if record.levelname in ['WARNING', 'CRITICAL']:
+ levelname = record.levelname[:4]
+ else:
+ levelname = record.levelname
+
+ return ''.join([self._coder(*self.COLORS[levelname]), levelname,
+ self._coder('reset')])
+
+ def formatMessagePrefix(self, record):
+ try:
+ return ' %s%s:%s ' % (self._coder('black', 'bold'), record.prefix,
+ self._coder('reset'))
+ except AttributeError:
+ return ''
+
+ def format(self, record):
+ if record.exc_info:
+ if not record.exc_text:
+ record.exc_text = self.formatException(record.exc_info)
+ else:
+ record.exc_text = ''
+
+ fmt = record.__dict__.copy()
+ fmt.update({'levelname': self.formatLevelName(record),
+ 'asctime': self.formatTime(record),
+ 'prefix': self.formatMessagePrefix(record)})
+
+ s = []
+
+ for line in chain(record.getMessage().splitlines(),
+ record.exc_text.splitlines()):
+ fmt['message'] = line
+
+ s.append(self._fmt % fmt)
+
+ return '\n'.join(s)
+
+
+class CompressedFileHandler(logging.FileHandler):
+
+ def _open(self):
+ return gzip.open(self.baseFilename + '.gz', self.mode, 9)
+
+
+def HandleUncaughtExceptions(fun):
+ """Catches all exceptions that would go outside decorated fun scope."""
+
+ def _Interceptor(*args, **kwargs):
+ try:
+ return fun(*args, **kwargs)
+ except StandardError:
+ logging.exception('Uncaught exception:')
+
+ return _Interceptor
diff --git a/deprecated/automation/common/machine.py b/deprecated/automation/common/machine.py
new file mode 100644
index 00000000..4db0db0d
--- /dev/null
+++ b/deprecated/automation/common/machine.py
@@ -0,0 +1,70 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+from fnmatch import fnmatch
+
+
+class Machine(object):
+ """Stores information related to machine and its state."""
+
+ def __init__(self, hostname, label, cpu, cores, os, username):
+ self.hostname = hostname
+ self.label = label
+ self.cpu = cpu
+ self.cores = cores
+ self.os = os
+ self.username = username
+
+ # MachineManager related attributes.
+ self.uses = 0
+ self.locked = False
+
+ def Acquire(self, exclusively):
+ assert not self.locked
+
+ if exclusively:
+ self.locked = True
+ self.uses += 1
+
+ def Release(self):
+ assert self.uses > 0
+
+ self.uses -= 1
+
+ if not self.uses:
+ self.locked = False
+
+ def __repr__(self):
+ return '{%s: %s@%s}' % (self.__class__.__name__, self.username,
+ self.hostname)
+
+ def __str__(self):
+ return '\n'.join(
+ ['Machine Information:', 'Hostname: %s' % self.hostname, 'Label: %s' %
+ self.label, 'CPU: %s' % self.cpu, 'Cores: %d' % self.cores, 'OS: %s' %
+ self.os, 'Uses: %d' % self.uses, 'Locked: %s' % self.locked])
+
+
+class MachineSpecification(object):
+ """Helper class used to find a machine matching your requirements."""
+
+ def __init__(self, hostname='*', label='*', os='*', lock_required=False):
+ self.hostname = hostname
+ self.label = label
+ self.os = os
+ self.lock_required = lock_required
+ self.preferred_machines = []
+
+ def __str__(self):
+ return '\n'.join(['Machine Specification:', 'Name: %s' % self.name, 'OS: %s'
+ % self.os, 'Lock required: %s' % self.lock_required])
+
+ def IsMatch(self, machine):
+ return all([not machine.locked, fnmatch(machine.hostname, self.hostname),
+ fnmatch(machine.label, self.label), fnmatch(machine.os,
+ self.os)])
+
+ def AddPreferredMachine(self, hostname):
+ if hostname not in self.preferred_machines:
+ self.preferred_machines.append(hostname)
diff --git a/deprecated/automation/common/machine_test.py b/deprecated/automation/common/machine_test.py
new file mode 100755
index 00000000..f66299f5
--- /dev/null
+++ b/deprecated/automation/common/machine_test.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+"""Machine manager unittest.
+
+MachineManagerTest tests MachineManager.
+"""
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+import machine
+import unittest
+
+
+class MachineTest(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def testPrintMachine(self):
+ mach = machine.Machine('ahmad.mtv', 'core2duo', 4, 'linux', 'asharif')
+ self.assertTrue('ahmad.mtv' in str(mach))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deprecated/automation/common/state_machine.py b/deprecated/automation/common/state_machine.py
new file mode 100644
index 00000000..d1cf42c8
--- /dev/null
+++ b/deprecated/automation/common/state_machine.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+from automation.common import events
+
+
+class BasicStateMachine(object):
+ """Generic class for constructing state machines.
+
+ Keeps all states and possible transition of a state machine. Ensures that
+ transition between two states is always valid. Also stores transition events
+ in a timeline object.
+ """
+ state_machine = {}
+ final_states = []
+
+ def __init__(self, initial_state):
+ assert initial_state in self.state_machine,\
+ 'Initial state does not belong to this state machine'
+
+ self._state = initial_state
+
+ self.timeline = events.EventHistory()
+ self.timeline.AddEvent(self._state)
+
+ def __str__(self):
+ return self._state
+
+ def __eq__(self, value):
+ if isinstance(value, BasicStateMachine):
+ value = str(value)
+
+ return self._state == value
+
+ def __ne__(self, value):
+ return not self == value
+
+ def _TransitionAllowed(self, to_state):
+ return to_state in self.state_machine.get(self._state, [])
+
+ def Change(self, new_state):
+ assert self._TransitionAllowed(new_state),\
+ 'Transition from %s to %s not possible' % (self._state, new_state)
+
+ self._state = new_state
+
+ self.timeline.AddEvent(self._state)
+
+ if self._state in self.final_states:
+ self.timeline.last.Finish()
diff --git a/deprecated/automation/server/__init__.py b/deprecated/automation/server/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/server/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/server/job_executer.py b/deprecated/automation/server/job_executer.py
new file mode 100644
index 00000000..30b59463
--- /dev/null
+++ b/deprecated/automation/server/job_executer.py
@@ -0,0 +1,138 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+
+import logging
+import os.path
+import threading
+
+from automation.common import command as cmd
+from automation.common import job
+from automation.common import logger
+from automation.common.command_executer import LoggingCommandExecuter
+from automation.common.command_executer import CommandTerminator
+
+
+class JobExecuter(threading.Thread):
+
+ def __init__(self, job_to_execute, machines, listeners):
+ threading.Thread.__init__(self)
+
+ assert machines
+
+ self.job = job_to_execute
+ self.listeners = listeners
+ self.machines = machines
+
+ # Set Thread name.
+ self.name = '%s-%s' % (self.__class__.__name__, self.job.id)
+
+ self._logger = logging.getLogger(self.__class__.__name__)
+ self._executer = LoggingCommandExecuter(self.job.dry_run)
+ self._terminator = CommandTerminator()
+
+ def _RunRemotely(self, command, fail_msg, command_timeout=1 * 60 * 60):
+ exit_code = self._executer.RunCommand(command,
+ self.job.primary_machine.hostname,
+ self.job.primary_machine.username,
+ command_terminator=self._terminator,
+ command_timeout=command_timeout)
+ if exit_code:
+ raise job.JobFailure(fail_msg, exit_code)
+
+ def _RunLocally(self, command, fail_msg, command_timeout=1 * 60 * 60):
+ exit_code = self._executer.RunCommand(command,
+ command_terminator=self._terminator,
+ command_timeout=command_timeout)
+ if exit_code:
+ raise job.JobFailure(fail_msg, exit_code)
+
+ def Kill(self):
+ self._terminator.Terminate()
+
+ def CleanUpWorkDir(self):
+ self._logger.debug('Cleaning up %r work directory.', self.job)
+ self._RunRemotely(cmd.RmTree(self.job.work_dir), 'Cleanup workdir failed.')
+
+ def CleanUpHomeDir(self):
+ self._logger.debug('Cleaning up %r home directory.', self.job)
+ self._RunLocally(cmd.RmTree(self.job.home_dir), 'Cleanup homedir failed.')
+
+ def _PrepareRuntimeEnvironment(self):
+ self._RunRemotely(
+ cmd.MakeDir(self.job.work_dir, self.job.logs_dir, self.job.results_dir),
+ 'Creating new job directory failed.')
+
+ # The log directory is ready, so we can prepare to log command's output.
+ self._executer.OpenLog(os.path.join(self.job.logs_dir,
+ self.job.log_filename_prefix))
+
+ def _SatisfyFolderDependencies(self):
+ for dependency in self.job.folder_dependencies:
+ to_folder = os.path.join(self.job.work_dir, dependency.dest)
+ from_folder = os.path.join(dependency.job.work_dir, dependency.src)
+ from_machine = dependency.job.primary_machine
+
+ if from_machine == self.job.primary_machine and dependency.read_only:
+ # No need to make a copy, just symlink it
+ self._RunRemotely(
+ cmd.MakeSymlink(from_folder, to_folder),
+ 'Failed to create symlink to required directory.')
+ else:
+ self._RunRemotely(
+ cmd.RemoteCopyFrom(from_machine.hostname,
+ from_folder,
+ to_folder,
+ username=from_machine.username),
+ 'Failed to copy required files.')
+
+ def _LaunchJobCommand(self):
+ command = self.job.GetCommand()
+
+ self._RunRemotely('%s; %s' % ('PS1=. TERM=linux source ~/.bashrc',
+ cmd.Wrapper(command,
+ cwd=self.job.work_dir)),
+ "Command failed to execute: '%s'." % command,
+ self.job.timeout)
+
+ def _CopyJobResults(self):
+ """Copy test results back to directory."""
+ self._RunLocally(
+ cmd.RemoteCopyFrom(self.job.primary_machine.hostname,
+ self.job.results_dir,
+ self.job.home_dir,
+ username=self.job.primary_machine.username),
+ 'Failed to copy results.')
+
+ def run(self):
+ self.job.status = job.STATUS_SETUP
+ self.job.machines = self.machines
+ self._logger.debug('Executing %r on %r in directory %s.', self.job,
+ self.job.primary_machine.hostname, self.job.work_dir)
+
+ try:
+ self.CleanUpWorkDir()
+
+ self._PrepareRuntimeEnvironment()
+
+ self.job.status = job.STATUS_COPYING
+
+ self._SatisfyFolderDependencies()
+
+ self.job.status = job.STATUS_RUNNING
+
+ self._LaunchJobCommand()
+ self._CopyJobResults()
+
+ # If we get here, the job succeeded.
+ self.job.status = job.STATUS_SUCCEEDED
+ except job.JobFailure as ex:
+ self._logger.error('Job failed. Exit code %s. %s', ex.exit_code, ex)
+ if self._terminator.IsTerminated():
+ self._logger.info('%r was killed', self.job)
+
+ self.job.status = job.STATUS_FAILED
+
+ self._executer.CloseLog()
+
+ for listener in self.listeners:
+ listener.NotifyJobComplete(self.job)
diff --git a/deprecated/automation/server/job_group_manager.py b/deprecated/automation/server/job_group_manager.py
new file mode 100644
index 00000000..d66f5e07
--- /dev/null
+++ b/deprecated/automation/server/job_group_manager.py
@@ -0,0 +1,118 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+
+import copy
+import logging
+import threading
+
+from automation.common import command as cmd
+from automation.common import logger
+from automation.common.command_executer import CommandExecuter
+from automation.common import job
+from automation.common import job_group
+from automation.server.job_manager import IdProducerPolicy
+
+
+class JobGroupManager(object):
+
+ def __init__(self, job_manager):
+ self.all_job_groups = []
+
+ self.job_manager = job_manager
+ self.job_manager.AddListener(self)
+
+ self._lock = threading.Lock()
+ self._job_group_finished = threading.Condition(self._lock)
+
+ self._id_producer = IdProducerPolicy()
+ self._id_producer.Initialize(job_group.JobGroup.HOMEDIR_PREFIX,
+ 'job-group-(?P<id>\d+)')
+
+ self._logger = logging.getLogger(self.__class__.__name__)
+
+ def GetJobGroup(self, group_id):
+ with self._lock:
+ for group in self.all_job_groups:
+ if group.id == group_id:
+ return group
+
+ return None
+
+ def GetAllJobGroups(self):
+ with self._lock:
+ return copy.deepcopy(self.all_job_groups)
+
+ def AddJobGroup(self, group):
+ with self._lock:
+ group.id = self._id_producer.GetNextId()
+
+ self._logger.debug('Creating runtime environment for %r.', group)
+
+ CommandExecuter().RunCommand(cmd.Chain(
+ cmd.RmTree(group.home_dir), cmd.MakeDir(group.home_dir)))
+
+ with self._lock:
+ self.all_job_groups.append(group)
+
+ for job_ in group.jobs:
+ self.job_manager.AddJob(job_)
+
+ group.status = job_group.STATUS_EXECUTING
+
+ self._logger.info('Added %r to queue.', group)
+
+ return group.id
+
+ def KillJobGroup(self, group):
+ with self._lock:
+ self._logger.debug('Killing all jobs that belong to %r.', group)
+
+ for job_ in group.jobs:
+ self.job_manager.KillJob(job_)
+
+ self._logger.debug('Waiting for jobs to quit.')
+
+ # Lets block until the group is killed so we know it is completed
+ # when we return.
+ while group.status not in [job_group.STATUS_SUCCEEDED,
+ job_group.STATUS_FAILED]:
+ self._job_group_finished.wait()
+
+ def NotifyJobComplete(self, job_):
+ self._logger.debug('Handling %r completion event.', job_)
+
+ group = job_.group
+
+ with self._lock:
+ # We need to perform an action only if the group hasn't already failed.
+ if group.status != job_group.STATUS_FAILED:
+ if job_.status == job.STATUS_FAILED:
+ # We have a failed job, abort the job group
+ group.status = job_group.STATUS_FAILED
+ if group.cleanup_on_failure:
+ for job_ in group.jobs:
+ # TODO(bjanakiraman): We should probably only kill dependent jobs
+ # instead of the whole job group.
+ self.job_manager.KillJob(job_)
+ self.job_manager.CleanUpJob(job_)
+ else:
+ # The job succeeded successfully -- lets check to see if we are done.
+ assert job_.status == job.STATUS_SUCCEEDED
+ finished = True
+ for other_job in group.jobs:
+ assert other_job.status != job.STATUS_FAILED
+ if other_job.status != job.STATUS_SUCCEEDED:
+ finished = False
+ break
+
+ if finished and group.status != job_group.STATUS_SUCCEEDED:
+ # TODO(kbaclawski): Without check performed above following code
+ # could be called more than once. This would trigger StateMachine
+ # crash, because it cannot transition from STATUS_SUCCEEDED to
+ # STATUS_SUCCEEDED. Need to address that bug in near future.
+ group.status = job_group.STATUS_SUCCEEDED
+ if group.cleanup_on_completion:
+ for job_ in group.jobs:
+ self.job_manager.CleanUpJob(job_)
+
+ self._job_group_finished.notifyAll()
diff --git a/deprecated/automation/server/job_manager.py b/deprecated/automation/server/job_manager.py
new file mode 100644
index 00000000..7a65b918
--- /dev/null
+++ b/deprecated/automation/server/job_manager.py
@@ -0,0 +1,194 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+#
+
+import logging
+import os
+import re
+import threading
+
+from automation.common import job
+from automation.common import logger
+from automation.server.job_executer import JobExecuter
+
+
+class IdProducerPolicy(object):
+ """Produces series of unique integer IDs.
+
+ Example:
+ id_producer = IdProducerPolicy()
+ id_a = id_producer.GetNextId()
+ id_b = id_producer.GetNextId()
+ assert id_a != id_b
+ """
+
+ def __init__(self):
+ self._counter = 1
+
+ def Initialize(self, home_prefix, home_pattern):
+ """Find first available ID based on a directory listing.
+
+ Args:
+ home_prefix: A directory to be traversed.
+ home_pattern: A regexp describing all files/directories that will be
+ considered. The regexp must contain exactly one match group with name
+ "id", which must match an integer number.
+
+ Example:
+ id_producer.Initialize(JOBDIR_PREFIX, 'job-(?P<id>\d+)')
+ """
+ harvested_ids = []
+
+ if os.path.isdir(home_prefix):
+ for filename in os.listdir(home_prefix):
+ path = os.path.join(home_prefix, filename)
+
+ if os.path.isdir(path):
+ match = re.match(home_pattern, filename)
+
+ if match:
+ harvested_ids.append(int(match.group('id')))
+
+ self._counter = max(harvested_ids or [0]) + 1
+
+ def GetNextId(self):
+ """Calculates another ID considered to be unique."""
+ new_id = self._counter
+ self._counter += 1
+ return new_id
+
+
+class JobManager(threading.Thread):
+
+ def __init__(self, machine_manager):
+ threading.Thread.__init__(self, name=self.__class__.__name__)
+ self.all_jobs = []
+ self.ready_jobs = []
+ self.job_executer_mapping = {}
+
+ self.machine_manager = machine_manager
+
+ self._lock = threading.Lock()
+ self._jobs_available = threading.Condition(self._lock)
+ self._exit_request = False
+
+ self.listeners = []
+ self.listeners.append(self)
+
+ self._id_producer = IdProducerPolicy()
+ self._id_producer.Initialize(job.Job.WORKDIR_PREFIX, 'job-(?P<id>\d+)')
+
+ self._logger = logging.getLogger(self.__class__.__name__)
+
+ def StartJobManager(self):
+ self._logger.info('Starting...')
+
+ with self._lock:
+ self.start()
+ self._jobs_available.notifyAll()
+
+ def StopJobManager(self):
+ self._logger.info('Shutdown request received.')
+
+ with self._lock:
+ for job_ in self.all_jobs:
+ self._KillJob(job_.id)
+
+ # Signal to die
+ self._exit_request = True
+ self._jobs_available.notifyAll()
+
+ # Wait for all job threads to finish
+ for executer in self.job_executer_mapping.values():
+ executer.join()
+
+ def KillJob(self, job_id):
+ """Kill a job by id.
+
+ Does not block until the job is completed.
+ """
+ with self._lock:
+ self._KillJob(job_id)
+
+ def GetJob(self, job_id):
+ for job_ in self.all_jobs:
+ if job_.id == job_id:
+ return job_
+ return None
+
+ def _KillJob(self, job_id):
+ self._logger.info('Killing [Job: %d].', job_id)
+
+ if job_id in self.job_executer_mapping:
+ self.job_executer_mapping[job_id].Kill()
+ for job_ in self.ready_jobs:
+ if job_.id == job_id:
+ self.ready_jobs.remove(job_)
+ break
+
+ def AddJob(self, job_):
+ with self._lock:
+ job_.id = self._id_producer.GetNextId()
+
+ self.all_jobs.append(job_)
+ # Only queue a job as ready if it has no dependencies
+ if job_.is_ready:
+ self.ready_jobs.append(job_)
+
+ self._jobs_available.notifyAll()
+
+ return job_.id
+
+ def CleanUpJob(self, job_):
+ with self._lock:
+ if job_.id in self.job_executer_mapping:
+ self.job_executer_mapping[job_.id].CleanUpWorkDir()
+ del self.job_executer_mapping[job_.id]
+ # TODO(raymes): remove job from self.all_jobs
+
+ def NotifyJobComplete(self, job_):
+ self.machine_manager.ReturnMachines(job_.machines)
+
+ with self._lock:
+ self._logger.debug('Handling %r completion event.', job_)
+
+ if job_.status == job.STATUS_SUCCEEDED:
+ for succ in job_.successors:
+ if succ.is_ready:
+ if succ not in self.ready_jobs:
+ self.ready_jobs.append(succ)
+
+ self._jobs_available.notifyAll()
+
+ def AddListener(self, listener):
+ self.listeners.append(listener)
+
+ @logger.HandleUncaughtExceptions
+ def run(self):
+ self._logger.info('Started.')
+
+ while not self._exit_request:
+ with self._lock:
+ # Get the next ready job, block if there are none
+ self._jobs_available.wait()
+
+ while self.ready_jobs:
+ ready_job = self.ready_jobs.pop()
+
+ required_machines = ready_job.machine_dependencies
+ for pred in ready_job.predecessors:
+ required_machines[0].AddPreferredMachine(
+ pred.primary_machine.hostname)
+
+ machines = self.machine_manager.GetMachines(required_machines)
+ if not machines:
+ # If we can't get the necessary machines right now, simply wait
+ # for some jobs to complete
+ self.ready_jobs.insert(0, ready_job)
+ break
+ else:
+ # Mark as executing
+ executer = JobExecuter(ready_job, machines, self.listeners)
+ executer.start()
+ self.job_executer_mapping[ready_job.id] = executer
+
+ self._logger.info('Stopped.')
diff --git a/deprecated/automation/server/machine_manager.py b/deprecated/automation/server/machine_manager.py
new file mode 100644
index 00000000..b7186077
--- /dev/null
+++ b/deprecated/automation/server/machine_manager.py
@@ -0,0 +1,77 @@
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+from operator import attrgetter
+import copy
+import csv
+import threading
+import os.path
+
+from automation.common import machine
+
+DEFAULT_MACHINES_FILE = os.path.join(os.path.dirname(__file__), 'test_pool.csv')
+
+
+class MachineManager(object):
+ """Container for list of machines one can run jobs on."""
+
+ @classmethod
+ def FromMachineListFile(cls, filename):
+ # Read the file and skip header
+ csv_file = csv.reader(open(filename, 'rb'), delimiter=',', quotechar='"')
+ csv_file.next()
+
+ return cls([machine.Machine(hostname, label, cpu, int(cores), os, user)
+ for hostname, label, cpu, cores, os, user in csv_file])
+
+ def __init__(self, machines):
+ self._machine_pool = machines
+ self._lock = threading.RLock()
+
+ def _GetMachine(self, mach_spec):
+ available_pool = [m for m in self._machine_pool if mach_spec.IsMatch(m)]
+
+ if available_pool:
+ # find a machine with minimum uses
+ uses = attrgetter('uses')
+
+ mach = min(available_pool, key=uses)
+
+ if mach_spec.preferred_machines:
+ preferred_pool = [m
+ for m in available_pool
+ if m.hostname in mach_spec.preferred_machines]
+ if preferred_pool:
+ mach = min(preferred_pool, key=uses)
+
+ mach.Acquire(mach_spec.lock_required)
+
+ return mach
+
+ def GetMachines(self, required_machines):
+ """Acquire machines for use by a job."""
+
+ with self._lock:
+ acquired_machines = [self._GetMachine(ms) for ms in required_machines]
+
+ if not all(acquired_machines):
+ # Roll back acquires
+ while acquired_machines:
+ mach = acquired_machines.pop()
+ if mach:
+ mach.Release()
+
+ return acquired_machines
+
+ def GetMachineList(self):
+ with self._lock:
+ return copy.deepcopy(self._machine_pool)
+
+ def ReturnMachines(self, machines):
+ with self._lock:
+ for m in machines:
+ m.Release()
+
+ def __str__(self):
+ return str(self._machine_pool)
diff --git a/deprecated/automation/server/machine_manager_test.py b/deprecated/automation/server/machine_manager_test.py
new file mode 100755
index 00000000..2fa5bb4b
--- /dev/null
+++ b/deprecated/automation/server/machine_manager_test.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+import unittest
+from automation.common import machine
+from automation.server import machine_manager
+
+
+class MachineManagerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.machine_manager = machine_manager.MachineManager()
+
+ def testPrint(self):
+ print self.machine_manager
+
+ def testGetLinuxBox(self):
+ mach_spec_list = [machine.MachineSpecification(os='linux')]
+ machines = self.machine_manager.GetMachines(mach_spec_list)
+ self.assertTrue(machines)
+
+ def testGetChromeOSBox(self):
+ mach_spec_list = [machine.MachineSpecification(os='chromeos')]
+ machines = self.machine_manager.GetMachines(mach_spec_list)
+ self.assertTrue(machines)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deprecated/automation/server/monitor/__init__.py b/deprecated/automation/server/monitor/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/deprecated/automation/server/monitor/__init__.py
@@ -0,0 +1 @@
+
diff --git a/deprecated/automation/server/monitor/dashboard.py b/deprecated/automation/server/monitor/dashboard.py
new file mode 100644
index 00000000..f6befed8
--- /dev/null
+++ b/deprecated/automation/server/monitor/dashboard.py
@@ -0,0 +1,259 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+from collections import namedtuple
+import glob
+import gzip
+import os.path
+import pickle
+import time
+import xmlrpclib
+
+from django import forms
+from django.http import HttpResponseRedirect
+from django.shortcuts import render_to_response
+from django.template import Context
+from django.views import static
+
+Link = namedtuple('Link', 'href name')
+
+
+def GetServerConnection():
+ return xmlrpclib.Server('http://localhost:8000')
+
+
+def MakeDefaultContext(*args):
+ context = Context({'links': [
+ Link('/job-group', 'Job Groups'), Link('/machine', 'Machines')
+ ]})
+
+ for arg in args:
+ context.update(arg)
+
+ return context
+
+
+class JobInfo(object):
+
+ def __init__(self, job_id):
+ self._job = pickle.loads(GetServerConnection().GetJob(job_id))
+
+ def GetAttributes(self):
+ job = self._job
+
+ group = [Link('/job-group/%d' % job.group.id, job.group.label)]
+
+ predecessors = [Link('/job/%d' % pred.id, pred.label)
+ for pred in job.predecessors]
+
+ successors = [Link('/job/%d' % succ.id, succ.label)
+ for succ in job.successors]
+
+ machines = [Link('/machine/%s' % mach.hostname, mach.hostname)
+ for mach in job.machines]
+
+ logs = [Link('/job/%d/log' % job.id, 'Log')]
+
+ commands = enumerate(job.PrettyFormatCommand().split('\n'), start=1)
+
+ return {'text': [('Label', job.label), ('Directory', job.work_dir)],
+ 'link': [('Group', group), ('Predecessors', predecessors),
+ ('Successors', successors), ('Machines', machines),
+ ('Logs', logs)],
+ 'code': [('Command', commands)]}
+
+ def GetTimeline(self):
+ return [{'started': evlog.GetTimeStartedFormatted(),
+ 'state_from': evlog.event.from_,
+ 'state_to': evlog.event.to_,
+ 'elapsed': evlog.GetTimeElapsedRounded()}
+ for evlog in self._job.timeline.GetTransitionEventHistory()]
+
+ def GetLog(self):
+ log_path = os.path.join(self._job.logs_dir,
+ '%s.gz' % self._job.log_filename_prefix)
+
+ try:
+ log = gzip.open(log_path, 'r')
+ except IOError:
+ content = []
+ else:
+ # There's a good chance that file is not closed yet, so EOF handling
+ # function and CRC calculation will fail, thus we need to monkey patch the
+ # _read_eof method.
+ log._read_eof = lambda: None
+
+ def SplitLine(line):
+ prefix, msg = line.split(': ', 1)
+ datetime, stream = prefix.rsplit(' ', 1)
+
+ return datetime, stream, msg
+
+ content = map(SplitLine, log.readlines())
+ finally:
+ log.close()
+
+ return content
+
+
+class JobGroupInfo(object):
+
+ def __init__(self, job_group_id):
+ self._job_group = pickle.loads(GetServerConnection().GetJobGroup(
+ job_group_id))
+
+ def GetAttributes(self):
+ group = self._job_group
+
+ home_dir = [Link('/job-group/%d/files/' % group.id, group.home_dir)]
+
+ return {'text': [('Label', group.label),
+ ('Time submitted', time.ctime(group.time_submitted)),
+ ('State', group.status),
+ ('Cleanup on completion', group.cleanup_on_completion),
+ ('Cleanup on failure', group.cleanup_on_failure)],
+ 'link': [('Directory', home_dir)]}
+
+ def _GetJobStatus(self, job):
+ status_map = {'SUCCEEDED': 'success', 'FAILED': 'failure'}
+ return status_map.get(str(job.status), None)
+
+ def GetJobList(self):
+ return [{'id': job.id,
+ 'label': job.label,
+ 'state': job.status,
+ 'status': self._GetJobStatus(job),
+ 'elapsed': job.timeline.GetTotalTime()}
+ for job in self._job_group.jobs]
+
+ def GetHomeDirectory(self):
+ return self._job_group.home_dir
+
+ def GetReportList(self):
+ job_dir_pattern = os.path.join(self._job_group.home_dir, 'job-*')
+
+ filenames = []
+
+ for job_dir in glob.glob(job_dir_pattern):
+ filename = os.path.join(job_dir, 'report.html')
+
+ if os.access(filename, os.F_OK):
+ filenames.append(filename)
+
+ reports = []
+
+ for filename in sorted(filenames, key=lambda f: os.stat(f).st_ctime):
+ try:
+ with open(filename, 'r') as report:
+ reports.append(report.read())
+ except IOError:
+ pass
+
+ return reports
+
+
+class JobGroupListInfo(object):
+
+ def __init__(self):
+ self._all_job_groups = pickle.loads(GetServerConnection().GetAllJobGroups())
+
+ def _GetJobGroupState(self, group):
+ return str(group.status)
+
+ def _GetJobGroupStatus(self, group):
+ status_map = {'SUCCEEDED': 'success', 'FAILED': 'failure'}
+ return status_map.get(self._GetJobGroupState(group), None)
+
+ def GetList(self):
+ return [{'id': group.id,
+ 'label': group.label,
+ 'submitted': time.ctime(group.time_submitted),
+ 'state': self._GetJobGroupState(group),
+ 'status': self._GetJobGroupStatus(group)}
+ for group in self._all_job_groups]
+
+ def GetLabelList(self):
+ return sorted(set(group.label for group in self._all_job_groups))
+
+
+def JobPageHandler(request, job_id):
+ job = JobInfo(int(job_id))
+
+ ctx = MakeDefaultContext({
+ 'job_id': job_id,
+ 'attributes': job.GetAttributes(),
+ 'timeline': job.GetTimeline()
+ })
+
+ return render_to_response('job.html', ctx)
+
+
+def LogPageHandler(request, job_id):
+ job = JobInfo(int(job_id))
+
+ ctx = MakeDefaultContext({'job_id': job_id, 'log_lines': job.GetLog()})
+
+ return render_to_response('job_log.html', ctx)
+
+
+def JobGroupPageHandler(request, job_group_id):
+ group = JobGroupInfo(int(job_group_id))
+
+ ctx = MakeDefaultContext({
+ 'group_id': job_group_id,
+ 'attributes': group.GetAttributes(),
+ 'job_list': group.GetJobList(),
+ 'reports': group.GetReportList()
+ })
+
+ return render_to_response('job_group.html', ctx)
+
+
+def JobGroupFilesPageHandler(request, job_group_id, path):
+ group = JobGroupInfo(int(job_group_id))
+
+ return static.serve(request,
+ path,
+ document_root=group.GetHomeDirectory(),
+ show_indexes=True)
+
+
+class FilterJobGroupsForm(forms.Form):
+ label = forms.ChoiceField(label='Filter by label:', required=False)
+
+
+def JobGroupListPageHandler(request):
+ groups = JobGroupListInfo()
+ group_list = groups.GetList()
+
+ field = FilterJobGroupsForm.base_fields['label']
+ field.choices = [('*', '--- no filtering ---')]
+ field.choices.extend([(label, label) for label in groups.GetLabelList()])
+
+ if request.method == 'POST':
+ form = FilterJobGroupsForm(request.POST)
+
+ if form.is_valid():
+ label = form.cleaned_data['label']
+
+ if label != '*':
+ group_list = [group for group in group_list if group['label'] == label]
+ else:
+ form = FilterJobGroupsForm({'initial': '*'})
+
+ ctx = MakeDefaultContext({'filter': form, 'groups': group_list})
+
+ return render_to_response('job_group_list.html', ctx)
+
+
+def MachineListPageHandler(request):
+ machine_list = pickle.loads(GetServerConnection().GetMachineList())
+
+ return render_to_response('machine_list.html',
+ MakeDefaultContext({'machines': machine_list}))
+
+
+def DefaultPageHandler(request):
+ return HttpResponseRedirect('/job-group')
diff --git a/deprecated/automation/server/monitor/manage.py b/deprecated/automation/server/monitor/manage.py
new file mode 100755
index 00000000..59f6e216
--- /dev/null
+++ b/deprecated/automation/server/monitor/manage.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python2
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+from django.core.management import execute_manager
+
+try:
+ import settings # Assumed to be in the same directory.
+except ImportError:
+ import sys
+
+ sys.stderr.write('Error: Can\'t find settings.py file in the directory '
+ 'containing %r.' % __file__)
+ sys.exit(1)
+
+if __name__ == '__main__':
+ execute_manager(settings)
diff --git a/deprecated/automation/server/monitor/settings.py b/deprecated/automation/server/monitor/settings.py
new file mode 100644
index 00000000..8cd20e35
--- /dev/null
+++ b/deprecated/automation/server/monitor/settings.py
@@ -0,0 +1,49 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+# Django settings for monitor project.
+#
+# For explanation look here: http://docs.djangoproject.com/en/dev/ref/settings
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+import os.path
+import sys
+
+# Path to the root of application. It's a custom setting, not related to Django.
+ROOT_PATH = os.path.dirname(os.path.realpath(sys.argv[0]))
+
+# Print useful information during runtime if possible.
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+# Sqlite3 database configuration, though we don't use it right now.
+DATABASE_ENGINE = 'sqlite3'
+DATABASE_NAME = os.path.join(ROOT_PATH, 'monitor.db')
+
+# Local time zone for this installation.
+TIME_ZONE = 'America/Los_Angeles'
+
+# Language code for this installation.
+LANGUAGE_CODE = 'en-us'
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# Absolute path to the directory that holds media.
+MEDIA_ROOT = os.path.join(ROOT_PATH, 'static') + '/'
+
+# URL that handles the media served from MEDIA_ROOT. Make sure to use a
+# trailing slash if there is a path component (optional in other cases).
+MEDIA_URL = '/static/'
+
+# Used to provide a seed in secret-key hashing algorithms. Make this unique,
+# and don't share it with anybody.
+SECRET_KEY = '13p5p_4q91*8@yo+tvvt#2k&6#d_&e_zvxdpdil53k419i5sop'
+
+# A string representing the full Python import path to your root URLconf.
+ROOT_URLCONF = 'monitor.urls'
+
+# List of locations of the template source files, in search order.
+TEMPLATE_DIRS = (os.path.join(ROOT_PATH, 'templates'),)
diff --git a/deprecated/automation/server/monitor/start.sh b/deprecated/automation/server/monitor/start.sh
new file mode 100755
index 00000000..4fc53bef
--- /dev/null
+++ b/deprecated/automation/server/monitor/start.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+#
+# Copyright 2011 Google Inc. All Rights Reserved.
+# Author: kbaclawski@google.com (Krystian Baclawski)
+#
+
+./manage.py runserver "$HOSTNAME":8080
diff --git a/deprecated/automation/server/monitor/static/style.css b/deprecated/automation/server/monitor/static/style.css
new file mode 100644
index 00000000..b571b059
--- /dev/null
+++ b/deprecated/automation/server/monitor/static/style.css
@@ -0,0 +1,101 @@
+* { font-family: sans-serif; }
+
+.left { text-align: left; }
+.right { text-align: right; }
+
+.code { font-family: monospace; text-align: left; }
+.line1 { background-color: Gainsboro; }
+.line2 { background-color: WhiteSmoke; }
+
+.title { margin-bottom: 0.25em; }
+
+.success { background-color: LightGreen; }
+.failure { background-color: LightPink; }
+
+pre.code { margin: 0px; }
+
+div.header p.title {
+ border: 1px solid black;
+ font-size: 32px;
+ font-style: bold;
+ background-color: LightBlue;
+ text-align: center;
+ margin: 0px;
+ padding: 10px;
+ font-weight: bold;
+}
+
+div.links {
+ background-color: Azure;
+ margin-top: 2px;
+ padding: 8px 4px 8px 4px;
+ border: solid 1px;
+}
+
+div.content {
+ margin-top: 2px;
+ padding: 8px;
+ border: solid 1px;
+}
+
+div.content p.title {
+ font-size: 28px;
+ text-align: left;
+ margin: 0px;
+ margin-bottom: 8px;
+ padding: 12px;
+ font-weight: bold;
+}
+
+table { border-collapse: collapse; }
+td, th { text-align: center; }
+
+table.list td, th { padding: 3px 8px 2px 8px; border:1px solid black; }
+table.list td { font-family: monospace; }
+table.list th { background-color: LightGray; }
+
+table.attributes td { text-align: left; }
+table.attributes > tbody > tr > td:first-child { font-family: sans-serif; }
+
+table.raw { border-style: none; }
+table.raw td {
+ padding: 0em 0.5em 0em 0.5em;
+ border-style: none;
+ vertical-align: top;
+ text-align: right;
+ font-family: monospace;
+}
+table.raw > tbody > tr > td:first-child { border-left: 0px; }
+table.raw > tbody > tr > td { border-left: 1px solid; }
+
+a.button {
+ background-color: PeachPuff;
+ text-decoration: underline;
+ text-align: center;
+ color: Black;
+ padding: 4px;
+ border: solid 1px;
+}
+
+a.small {
+ padding: 2px 4px 2px 4px;
+ font-size: small;
+ border-color: Gray;
+ background-color: PapayaWhip;
+}
+
+a.button:hover { background-color: LightYellow; }
+a.button:active { background-color: Yellow; }
+
+a.column {
+ border-style: none;
+ display: block;
+ margin: -3px -8px -2px -8px;
+}
+
+div.warning {
+ background-color: MistyRose;
+ border: 1px solid Crimson;
+ padding: 0.5em;
+ font-size: x-large;
+}
diff --git a/deprecated/automation/server/monitor/templates/base.html b/deprecated/automation/server/monitor/templates/base.html
new file mode 100644
index 00000000..95ffc222
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/base.html
@@ -0,0 +1,30 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+ <head>
+ <link rel="stylesheet" href="/static/style.css" />
+ <title>{% block title %}Automation Dashboard{% endblock %}</title>
+ </head>
+
+ <body>
+ <div class="header">
+ {% block header %}
+ <p class="title">Automation Dashboard</p>
+ {% endblock %}
+ </div>
+
+ <div class="links">
+ <span>Subpages:</span>
+ {% block links %}
+ {% for link in links %}
+ <a class="button" href="{{ link.href }}">{{ link.name }}</a>
+ {% endfor %}
+ {% endblock %}
+ </div>
+
+ <div class="content">
+ {% block content %}
+ {% endblock %}
+ </div>
+ </body>
+</html>
diff --git a/deprecated/automation/server/monitor/templates/job.html b/deprecated/automation/server/monitor/templates/job.html
new file mode 100644
index 00000000..90acd969
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/job.html
@@ -0,0 +1,29 @@
+{% extends "base.html" %}
+
+{% block content %}
+<h1 class="title">Job {{ job_id }}</h1>
+
+<h2 class="title">General information</h2>
+{% include "snippet_attribute_table.html" %}
+
+<h2 class="title">Timeline of status events</h2>
+<table class="list">
+ <tbody>
+ <tr>
+ <th>Started</th>
+ <th>From State</th>
+ <th>To State</th>
+ <th>Elapsed</th>
+ </tr>
+ {% for entry in timeline %}
+ <tr>
+ <td>{{ entry.started }}</td>
+ <td>{{ entry.state_from }}</td>
+ <td>{{ entry.state_to }}</td>
+ <td>{{ entry.elapsed }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
+
+{% endblock %}
diff --git a/deprecated/automation/server/monitor/templates/job_group.html b/deprecated/automation/server/monitor/templates/job_group.html
new file mode 100644
index 00000000..b6ed8ea8
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/job_group.html
@@ -0,0 +1,46 @@
+{% extends "base.html" %}
+
+{% block content %}
+<h1 class="title">Job Group {{ group_id }}</h1>
+
+<h2 class="title">General information</h2>
+{% include "snippet_attribute_table.html" %}
+
+<h2 class="title">Job Listing</h2>
+<table class="list">
+ <tbody>
+ <tr>
+ <th>Job ID</th>
+ <th>Label</th>
+ <th>Turnaround Time</th>
+ <th>State</th>
+ </tr>
+ {% for job in job_list %}
+ <tr>
+ <td>
+ <a class="button column" href="/job/{{ job.id }}">{{ job.id }}</a>
+ </td>
+ <td>{{ job.label }}</td>
+ <td>{{ job.elapsed }}</td>
+ {% if job.status %}
+ <td class="{{ job.status }}">{{ job.state }}</td>
+ {% else %}
+ <td>{{ job.state }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
+
+<h2 class="title">Report</h2>
+{% if reports %}
+{% autoescape off %}
+{% for report in reports %}
+{{ report }}
+{% endfor %}
+{% endautoescape %}
+{% else %}
+<div class="warning">No reports found!</div>
+{% endif %}
+
+{% endblock %}
diff --git a/deprecated/automation/server/monitor/templates/job_group_list.html b/deprecated/automation/server/monitor/templates/job_group_list.html
new file mode 100644
index 00000000..b82fa730
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/job_group_list.html
@@ -0,0 +1,35 @@
+{% extends "base.html" %}
+
+{% block content %}
+<p class="title">Job Groups</p>
+
+<form action="/job-group" method="post">
+{{ filter.as_p }}
+<p><input type="submit" value="Filter!" /></p>
+</form>
+
+<table class="list">
+ <tbody>
+ <tr>
+ <th>Group ID</th>
+ <th>Label</th>
+ <th>Time Submitted</th>
+ <th>Status</th>
+ </tr>
+ {% for group in groups %}
+ <tr>
+ <td>
+ <a class="button column" href="/job-group/{{ group.id }}">{{ group.id }}</a>
+ </td>
+ <td>{{ group.label }}</td>
+ <td>{{ group.submitted }}</td>
+ {% if group.status %}
+ <td class="{{ group.status }}">{{ group.state }}</td>
+ {% else %}
+ <td>{{ group.state }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
+{% endblock %}
diff --git a/deprecated/automation/server/monitor/templates/job_log.html b/deprecated/automation/server/monitor/templates/job_log.html
new file mode 100644
index 00000000..937b21b0
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/job_log.html
@@ -0,0 +1,20 @@
+{% extends "base.html" %}
+
+{% block content %}
+<h1 class="title">Job {{ job_id }}</h1>
+
+<h2 class="title">Command output:</h2>
+
+<table class="raw">
+<tbody>
+{% for datetime, stream, line in log_lines %}
+<tr class="{% cycle 'line1' 'line2' %}">
+ <td>{{ datetime }}</td>
+ <td>{{ stream }}</td>
+ <td><pre class="code">{{ line|wordwrap:80 }}</pre></td>
+</tr>
+{% endfor %}
+</tbody>
+</table>
+
+{% endblock %}
diff --git a/deprecated/automation/server/monitor/templates/machine_list.html b/deprecated/automation/server/monitor/templates/machine_list.html
new file mode 100644
index 00000000..f81422d3
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/machine_list.html
@@ -0,0 +1,39 @@
+{% extends "base.html" %}
+
+{% block content %}
+<p class="title">Machines</p>
+
+<table class="list">
+<tbody>
+<tr>
+ <th>Hostname</th>
+ <th>Label</th>
+ <th>CPU</th>
+ <th>Cores</th>
+ <th>Operating System</th>
+ <th>Jobs Running</th>
+ <th>Locked</th>
+</tr>
+{% for machine in machines %}
+<tr>
+ <td>
+ <a class="button column" href="/machine/{{ machine.hostname }}">
+ {{ machine.hostname }}
+ </a>
+ </td>
+ <td>{{ machine.label }}</td>
+ <td>{{ machine.cpu }}</td>
+ <td>{{ machine.cores }}</td>
+ <td>{{ machine.os }}</td>
+ <td>{{ machine.uses }}</td>
+ {% if machine.locked %}
+ <td class="failure">Yes</td>
+ {% else %}
+ <td class="success">No</td>
+ {% endif %}
+</tr>
+{% endfor %}
+</tbody>
+</table>
+
+{% endblock %}
diff --git a/deprecated/automation/server/monitor/templates/snippet_attribute_table.html b/deprecated/automation/server/monitor/templates/snippet_attribute_table.html
new file mode 100644
index 00000000..24bacc17
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/snippet_attribute_table.html
@@ -0,0 +1,36 @@
+<table class="list attributes">
+ <tbody>
+ <tr>
+ <th>Attribute</th>
+ <th>Value</th>
+ </tr>
+ {% for name, value in attributes.text %}
+ <tr>
+ <td>{{ name }}</td>
+ <td>{{ value }}</td>
+ </tr>
+ {% endfor %}
+
+ {% for name, links in attributes.link %}
+ <tr>
+ <td>{{ name }}</td>
+ <td>
+ {% if links %}
+ {% for link in links %}
+ <a class="button small" href="{{ link.href }}">{{ link.name }}</a>
+ {% endfor %}
+ {% else %}
+ None
+ {% endif %}
+ </td>
+ </tr>
+ {% endfor %}
+
+ {% for name, code in attributes.code %}
+ <tr>
+ <td>{{ name }}</td>
+ <td>{% include "snippet_code.html" %}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
diff --git a/deprecated/automation/server/monitor/templates/snippet_code.html b/deprecated/automation/server/monitor/templates/snippet_code.html
new file mode 100644
index 00000000..281754d6
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/snippet_code.html
@@ -0,0 +1,10 @@
+<table class="raw">
+<tbody>
+{% for num, line in code %}
+<tr class="{% cycle 'line1' 'line2' %}">
+ <td>{{ num }}</td>
+ <td><pre class="code">{{ line|wordwrap:120 }}</pre></td>
+</tr>
+{% endfor %}
+</tbody>
+</table>
diff --git a/deprecated/automation/server/monitor/templates/snippet_links.html b/deprecated/automation/server/monitor/templates/snippet_links.html
new file mode 100644
index 00000000..f19fa6e5
--- /dev/null
+++ b/deprecated/automation/server/monitor/templates/snippet_links.html
@@ -0,0 +1,7 @@
+{% if param %}
+{% for link in param %}
+<a class="button small" href="{{ link.href }}">{{ link.name }}</a>
+{% endfor %}
+{% else %}
+None
+{% endif %}
diff --git a/deprecated/automation/server/monitor/urls.py b/deprecated/automation/server/monitor/urls.py
new file mode 100644
index 00000000..1a6b2485
--- /dev/null
+++ b/deprecated/automation/server/monitor/urls.py
@@ -0,0 +1,21 @@
+# Copyright 2011 Google Inc. All Rights Reserved.
+#
+
+__author__ = 'kbaclawski@google.com (Krystian Baclawski)'
+
+from django.conf import settings
+from django.conf.urls.defaults import patterns
+
+urlpatterns = patterns(
+ 'dashboard', (r'^job-group$', 'JobGroupListPageHandler'),
+ (r'^machine$', 'MachineListPageHandler'),
+ (r'^job/(?P<job_id>\d+)/log$', 'LogPageHandler'),
+ (r'^job/(?P<job_id>\d+)$', 'JobPageHandler'), (
+ r'^job-group/(?P<job_group_id>\d+)/files/(?P<path>.*)$',
+ 'JobGroupFilesPageHandler'),
+ (r'^job-group/(?P<job_group_id>\d+)$', 'JobGroupPageHandler'),
+ (r'^$', 'DefaultPageHandler'))
+
+urlpatterns += patterns('',
+ (r'^static/(?P<path>.*)$', 'django.views.static.serve',
+ {'document_root': settings.MEDIA_ROOT}))
diff --git a/deprecated/automation/server/server.py b/deprecated/automation/server/server.py
new file mode 100755
index 00000000..c8f22521
--- /dev/null
+++ b/deprecated/automation/server/server.py
@@ -0,0 +1,125 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+
+import logging
+import optparse
+import pickle
+import signal
+from SimpleXMLRPCServer import SimpleXMLRPCServer
+import sys
+
+from automation.common import logger
+from automation.common.command_executer import CommandExecuter
+from automation.server import machine_manager
+from automation.server.job_group_manager import JobGroupManager
+from automation.server.job_manager import JobManager
+
+
+class Server(object):
+ """Plays a role of external interface accessible over XMLRPC."""
+
+ def __init__(self, machines_file=None, dry_run=False):
+ """Default constructor.
+
+ Args:
+ machines_file: Path to file storing a list of machines.
+ dry_run: If True, the server only simulates command execution.
+ """
+ CommandExecuter.Configure(dry_run)
+
+ self.job_manager = JobManager(
+ machine_manager.MachineManager.FromMachineListFile(
+ machines_file or machine_manager.DEFAULT_MACHINES_FILE))
+
+ self.job_group_manager = JobGroupManager(self.job_manager)
+
+ self._logger = logging.getLogger(self.__class__.__name__)
+
+ def ExecuteJobGroup(self, job_group, dry_run=False):
+ job_group = pickle.loads(job_group)
+ self._logger.info('Received ExecuteJobGroup(%r, dry_run=%s) request.',
+ job_group, dry_run)
+
+ for job in job_group.jobs:
+ job.dry_run = dry_run
+ return self.job_group_manager.AddJobGroup(job_group)
+
+ def GetAllJobGroups(self):
+ self._logger.info('Received GetAllJobGroups() request.')
+ return pickle.dumps(self.job_group_manager.GetAllJobGroups())
+
+ def KillJobGroup(self, job_group_id):
+ self._logger.info('Received KillJobGroup(%d) request.', job_group_id)
+ self.job_group_manager.KillJobGroup(pickle.loads(job_group_id))
+
+ def GetJobGroup(self, job_group_id):
+ self._logger.info('Received GetJobGroup(%d) request.', job_group_id)
+
+ return pickle.dumps(self.job_group_manager.GetJobGroup(job_group_id))
+
+ def GetJob(self, job_id):
+ self._logger.info('Received GetJob(%d) request.', job_id)
+
+ return pickle.dumps(self.job_manager.GetJob(job_id))
+
+ def GetMachineList(self):
+ self._logger.info('Received GetMachineList() request.')
+
+ return pickle.dumps(self.job_manager.machine_manager.GetMachineList())
+
+ def StartServer(self):
+ self.job_manager.StartJobManager()
+
+ def StopServer(self):
+ self.job_manager.StopJobManager()
+ self.job_manager.join()
+
+
+def GetServerOptions():
+ """Get server's settings from command line options."""
+ parser = optparse.OptionParser()
+ parser.add_option('-m',
+ '--machines-file',
+ dest='machines_file',
+ help='The location of the file '
+ 'containing the machines database',
+ default=machine_manager.DEFAULT_MACHINES_FILE)
+ parser.add_option('-n',
+ '--dry-run',
+ dest='dry_run',
+ help='Start the server in dry-run mode, where jobs will '
+ 'not actually be executed.',
+ action='store_true',
+ default=False)
+ return parser.parse_args()[0]
+
+
+def Main():
+ logger.SetUpRootLogger(filename='server.log', level=logging.DEBUG)
+
+ options = GetServerOptions()
+ server = Server(options.machines_file, options.dry_run)
+ server.StartServer()
+
+ def _HandleKeyboardInterrupt(*_):
+ server.StopServer()
+ sys.exit(1)
+
+ signal.signal(signal.SIGINT, _HandleKeyboardInterrupt)
+
+ try:
+ xmlserver = SimpleXMLRPCServer(
+ ('localhost', 8000),
+ allow_none=True,
+ logRequests=False)
+ xmlserver.register_instance(server)
+ xmlserver.serve_forever()
+ except Exception as ex:
+ logging.error(ex)
+ server.StopServer()
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/deprecated/automation/server/server_test.py b/deprecated/automation/server/server_test.py
new file mode 100755
index 00000000..131ebb3b
--- /dev/null
+++ b/deprecated/automation/server/server_test.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python2
+#
+# Copyright 2010 Google Inc. All Rights Reserved.
+"""Machine manager unittest.
+
+MachineManagerTest tests MachineManager.
+"""
+
+__author__ = 'asharif@google.com (Ahmad Sharif)'
+
+import server
+import unittest
+
+
+class ServerTest(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def testGetAllJobs(self):
+ s = server.Server()
+ print s.GetAllJobs()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deprecated/automation/server/test_pool.csv b/deprecated/automation/server/test_pool.csv
new file mode 100644
index 00000000..b0700c9b
--- /dev/null
+++ b/deprecated/automation/server/test_pool.csv
@@ -0,0 +1,4 @@
+hostname,label,cpu,cores,os,username
+"chrotomation.mtv.corp.google.com","pc-workstation","core2duo",8,"linux","mobiletc-prebuild"
+"chromeos-test1.mtv.corp.google.com","cr48","atom",1,"chromeos","chromeos"
+"chromeos-test2.mtv.corp.google.com","cr48","atom",1,"chromeos","chromeos"