aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordan R Abrahams <ajordanr@google.com>2021-10-26 20:26:26 +0000
committerJordan R Abrahams <ajordanr@google.com>2021-10-28 17:01:14 +0000
commit70c83ac3df5d50f4fc4612fa2c9a5b8af4455842 (patch)
tree6afdc3ae810b3dd1a4fc7f032844bcbbaf741587
parentd080198a6e20a85e3d6c57a9fb168d4e19b3503c (diff)
downloadtoolchain-utils-70c83ac3df5d50f4fc4612fa2c9a5b8af4455842.tar.gz
Replace non-inclusive language
Wherever possible, this commit removes non-inclusive languages as per COIL policy. Some of this language exists in dead code, so this commit also removes confirmed dead code. Also some minor lint fixes to get the code past the lint checks. BUG=b:204057159 TEST=Presubmit tests Change-Id: I299bfbe5dfcbe761efc5dcb29caea2337abe3f30 Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/third_party/toolchain-utils/+/3243940 Tested-by: Jordan R Abrahams <ajordanr@google.com> Reviewed-by: Manoj Gupta <manojgupta@chromium.org> Reviewed-by: George Burgess <gbiv@chromium.org>
-rwxr-xr-xandroid_merge_from_upstream.sh13
-rw-r--r--compiler_wrapper/sanitizer_flags.go2
-rw-r--r--cros_utils/manifest_versions.py162
-rw-r--r--cros_utils/misc.py109
-rw-r--r--crosperf/results_cache.py99
-rwxr-xr-xcrosperf/results_cache_unittest.py190
-rw-r--r--crosperf/test_cache/compare_output/results.pickle (renamed from crosperf/test_cache/compare_output/results.txt)bin8124 -> 8081 bytes
-rw-r--r--crosperf/test_cache/test_input/results.pickle (renamed from crosperf/test_cache/test_input/results.txt)0
-rw-r--r--crosperf/test_cache/test_puretelemetry_input/results.pickle (renamed from crosperf/test_cache/test_puretelemetry_input/results.txt)0
-rwxr-xr-xllvm_extra/create_llvm_extra.sh2
-rwxr-xr-xllvm_tools/nightly_revert_checker.py97
-rwxr-xr-xrust_tools/rust_watch.py21
-rwxr-xr-xsetup_chromeos.py252
-rw-r--r--unblocked_terms.txt5
14 files changed, 258 insertions, 694 deletions
diff --git a/android_merge_from_upstream.sh b/android_merge_from_upstream.sh
index b63eb6e1..7430b8dd 100755
--- a/android_merge_from_upstream.sh
+++ b/android_merge_from_upstream.sh
@@ -11,9 +11,9 @@
# https://android-review.googlesource.com/c/platform/external/toolchain-utils/+/1132504/1
local_branch_name="merge_with_upstream"
-local_upstream="aosp/master"
+local_upstream="aosp/master" # nocheck
remote="aosp"
-remote_branch="${remote}/upstream-mirror-master"
+remote_branch="${remote}/upstream-mirror-master" # nocheck
my_dir="$(dirname "$(readlink -m "$0")")"
cd "${my_dir}"
@@ -21,7 +21,7 @@ cd "${my_dir}"
ensure_head_is_upstream_main() {
local current_rev main_rev
current_rev="$(git rev-parse HEAD)"
- main_rev="$(git rev-parse ${local_upstream})"
+ main_rev="$(git rev-parse "${local_upstream}")"
if [[ "${current_rev}" != "${main_rev}" ]]; then
echo "Please checkout ${local_upstream} and rerun this" >&2
exit
@@ -49,7 +49,7 @@ ensure_no_local_branch_present() {
get_merge_commit_list() {
local merge_base
- merge_base="$(git merge-base HEAD ${remote_branch})"
+ merge_base="$(git merge-base HEAD "${remote_branch}")"
git log --oneline "${merge_base}..${remote_branch}"
}
@@ -62,6 +62,9 @@ repo start "${local_branch_name}"
commit_list="$(get_merge_commit_list)"
num_commits="$(wc -l <<< "${commit_list}")"
+
+# Disable shellcheck for the sed substitution warning.
+# shellcheck disable=SC2001
commit_message="Merging ${num_commits} commit(s) from Chromium's toolchain-utils
Merged commit digest:
@@ -69,6 +72,6 @@ $(sed 's/^/ /' <<< "${commit_list}")
"
git merge "${remote_branch}" -m "${commit_message}"
-echo 'NOTE: When you try to `repo upload`, repo might show a scary warning'
+echo 'NOTE: When you try to "repo upload", repo might show a scary warning'
echo 'about the number of changes are being uploaded. That should be fine,'
echo 'since repo will only create CLs for commits not known to our remote.'
diff --git a/compiler_wrapper/sanitizer_flags.go b/compiler_wrapper/sanitizer_flags.go
index fe8d1503..abb385a8 100644
--- a/compiler_wrapper/sanitizer_flags.go
+++ b/compiler_wrapper/sanitizer_flags.go
@@ -14,7 +14,7 @@ func processSanitizerFlags(builder *commandBuilder) {
hasSanitizeFuzzerFlags := false
for _, arg := range builder.args {
// TODO: This should probably be -fsanitize= to not match on
- // e.g. -fsanitize-blacklist
+ // e.g. -fsanitize-blocklist
if arg.fromUser {
if strings.HasPrefix(arg.value, "-fsanitize") {
hasSanitizeFlags = true
diff --git a/cros_utils/manifest_versions.py b/cros_utils/manifest_versions.py
deleted file mode 100644
index 4838de3c..00000000
--- a/cros_utils/manifest_versions.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tools for searching/manipulating the manifests repository."""
-
-from __future__ import print_function
-
-__author__ = 'llozano@google.com (Luis Lozano)'
-
-import copy
-import os
-import re
-import shutil
-import tempfile
-import time
-
-from cros_utils import command_executer
-from cros_utils import logger
-
-MANIFEST_VERSION_MAIN_BRANCH = 'master'
-
-
-def IsCrosVersion(version):
- match = re.search(r'(\d+\.\d+\.\d+\.\d+)', version)
- return match is not None
-
-
-def IsRFormatCrosVersion(version):
- match = re.search(r'(R\d+-\d+\.\d+\.\d+)', version)
- return match is not None
-
-
-def RFormatCrosVersion(version):
- assert IsCrosVersion(version)
- tmp_major, tmp_minor = version.split('.', 1)
- rformat = 'R' + tmp_major + '-' + tmp_minor
- assert IsRFormatCrosVersion(rformat)
- return rformat
-
-
-class ManifestVersions(object):
- """This class handles interactions with the manifests repo."""
-
- def __init__(self, internal=True):
- self.internal = internal
- self.clone_location = tempfile.mkdtemp()
- self.ce = command_executer.GetCommandExecuter()
- if internal:
- versions_git = ('https://chrome-internal.googlesource.com/'
- 'chromeos/manifest-versions.git')
- else:
- versions_git = (
- 'https://chromium.googlesource.com/chromiumos/manifest-versions.git')
- commands = [
- 'cd {0}'.format(self.clone_location),
- 'git clone {0}'.format(versions_git)
- ]
- ret = self.ce.RunCommands(commands)
- if ret:
- logger.GetLogger().LogFatal('Failed to clone manifest-versions.')
-
- def __del__(self):
- if self.clone_location:
- shutil.rmtree(self.clone_location)
-
- def TimeToVersionChromeOS(self, my_time):
- """Convert timestamp to version number, in ChromeOS/Paladin."""
- cur_time = time.mktime(time.gmtime())
- des_time = float(my_time)
- if cur_time - des_time > 7000000:
- logger.GetLogger().LogFatal('The time you specify is too early.')
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout -f $(git rev-list' +
- ' --max-count=1 --before={0} origin/{1})'.format(
- my_time, MANIFEST_VERSION_MAIN_BRANCH)
- ]
- ret = self.ce.RunCommands(commands)
- if ret:
- logger.GetLogger().LogFatal('Failed to checkout manifest at '
- 'specified time')
- path = os.path.realpath('{0}/manifest-versions/LKGM/lkgm.xml'.format(
- self.clone_location))
- pp = path.split('/')
- new_list = copy.deepcopy(pp)
- for i, e in enumerate(pp):
- if e == 'android-LKGM-candidates':
- new_list[i] = 'paladin'
- chrome_path = '/'.join(new_list)
- if not os.path.exists(chrome_path):
- logger.GetLogger().LogOutput('LKGM path is %s' % path)
- logger.GetLogger().LogOutput('Cannot find path %s' % chrome_path)
- pieces = os.path.basename(chrome_path).split('.')
- pieces = pieces[:-2]
- new_base = '.'.join(pieces) + '*'
- wild_path = os.path.join('/', '/'.join(new_list[:-1]), new_base)
- command = 'ls %s' % wild_path
- ret, out, _ = self.ce.RunCommandWOutput(command)
- if ret == 0:
- out = out.strip()
- files = out.split('\n')
- latest = files[-1]
- small = os.path.basename(latest).split('.xml')[0]
- version = pp[-2] + '.' + small
- else:
- small = os.path.basename(path).split('.xml')[0]
- version = pp[-2] + '.' + small
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout {0}'.format(MANIFEST_VERSION_MAIN_BRANCH)
- ]
- self.ce.RunCommands(commands)
- return version
-
- def TimeToVersion(self, my_time):
- """Convert timestamp to version number."""
- cur_time = time.mktime(time.gmtime())
- des_time = float(my_time)
- if cur_time - des_time > 7000000:
- logger.GetLogger().LogFatal('The time you specify is too early.')
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout -f $(git rev-list' +
- ' --max-count=1 --before={0} origin/{1})'.format(
- my_time, MANIFEST_VERSION_MAIN_BRANCH)
- ]
- ret = self.ce.RunCommands(commands)
- if ret:
- logger.GetLogger().LogFatal('Failed to checkout manifest at '
- 'specified time')
- path = os.path.realpath('{0}/manifest-versions/LKGM/lkgm.xml'.format(
- self.clone_location))
- pp = path.split('/')
- small = os.path.basename(path).split('.xml')[0]
- version = pp[-2] + '.' + small
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout {0}'.format(MANIFEST_VERSION_MAIN_BRANCH)
- ]
- self.ce.RunCommands(commands)
- return version
-
- def GetManifest(self, version, to_file):
- """Get the manifest file from a given chromeos-internal version."""
- assert not IsRFormatCrosVersion(version)
- version = version.split('.', 1)[1]
- os.chdir(self.clone_location)
- files = [
- os.path.join(r, f)
- for r, _, fs in os.walk('.')
- for f in fs
- if version in f
- ]
- if files:
- command = 'cp {0} {1}'.format(files[0], to_file)
- ret = self.ce.RunCommand(command)
- if ret:
- raise RuntimeError('Cannot copy manifest to {0}'.format(to_file))
- else:
- raise RuntimeError('Version {0} is not available.'.format(version))
diff --git a/cros_utils/misc.py b/cros_utils/misc.py
index 93d1b3b6..a0d0de73 100644
--- a/cros_utils/misc.py
+++ b/cros_utils/misc.py
@@ -15,7 +15,6 @@ import os
import re
import shutil
import sys
-import traceback
from cros_utils import command_executer
from cros_utils import logger
@@ -24,8 +23,6 @@ CHROMEOS_SCRIPTS_DIR = '/mnt/host/source/src/scripts'
TOOLCHAIN_UTILS_PATH = ('/mnt/host/source/src/third_party/toolchain-utils/'
'cros_utils/toolchain_utils.sh')
-CROS_MAIN_BRANCH = 'cros/master'
-
def GetChromeOSVersionFromLSBVersion(lsb_version):
"""Get Chromeos version from Lsb version."""
@@ -112,8 +109,8 @@ def FormatQuotedCommand(command):
def FormatCommands(commands):
- return ApplySubs(
- str(commands), ('&&', '&&\n'), (';', ';\n'), (r'\n+\s*', '\n'))
+ return ApplySubs(str(commands), ('&&', '&&\n'), (';', ';\n'),
+ (r'\n+\s*', '\n'))
def GetImageDir(chromeos_root, board):
@@ -155,16 +152,16 @@ def GetBuildPackagesCommand(board, usepkg=False, debug=False):
withdebug_flag = '--nowithdebug'
return ('%s/build_packages %s --withdev --withtest --withautotest '
'--skip_toolchain_update %s --board=%s '
- '--accept_licenses=@CHROMEOS' % (CHROMEOS_SCRIPTS_DIR, usepkg_flag,
- withdebug_flag, board))
+ '--accept_licenses=@CHROMEOS' %
+ (CHROMEOS_SCRIPTS_DIR, usepkg_flag, withdebug_flag, board))
def GetBuildImageCommand(board, dev=False):
dev_args = ''
if dev:
dev_args = '--noenable_rootfs_verification --disk_layout=2gb-rootfs'
- return ('%s/build_image --board=%s %s test' % (CHROMEOS_SCRIPTS_DIR, board,
- dev_args))
+ return ('%s/build_image --board=%s %s test' %
+ (CHROMEOS_SCRIPTS_DIR, board, dev_args))
def GetSetupBoardCommand(board, usepkg=None, force=None):
@@ -193,8 +190,8 @@ def CanonicalizePath(path):
def GetCtargetFromBoard(board, chromeos_root):
"""Get Ctarget from board."""
base_board = board.split('_')[0]
- command = ('source %s; get_ctarget_from_board %s' % (TOOLCHAIN_UTILS_PATH,
- base_board))
+ command = ('source %s; get_ctarget_from_board %s' %
+ (TOOLCHAIN_UTILS_PATH, base_board))
ce = command_executer.GetCommandExecuter()
ret, out, _ = ce.ChrootRunCommandWOutput(chromeos_root, command)
if ret != 0:
@@ -207,8 +204,8 @@ def GetCtargetFromBoard(board, chromeos_root):
def GetArchFromBoard(board, chromeos_root):
"""Get Arch from board."""
base_board = board.split('_')[0]
- command = (
- 'source %s; get_board_arch %s' % (TOOLCHAIN_UTILS_PATH, base_board))
+ command = ('source %s; get_board_arch %s' %
+ (TOOLCHAIN_UTILS_PATH, base_board))
ce = command_executer.GetCommandExecuter()
ret, out, _ = ce.ChrootRunCommandWOutput(chromeos_root, command)
if ret != 0:
@@ -316,23 +313,22 @@ def WorkingDirectory(new_dir):
def HasGitStagedChanges(git_dir):
"""Return True if git repository has staged changes."""
- command = 'cd {0} && git diff --quiet --cached --exit-code HEAD'.format(
- git_dir)
+ command = f'cd {git_dir} && git diff --quiet --cached --exit-code HEAD'
return command_executer.GetCommandExecuter().RunCommand(
command, print_to_console=False)
def HasGitUnstagedChanges(git_dir):
"""Return True if git repository has un-staged changes."""
- command = 'cd {0} && git diff --quiet --exit-code HEAD'.format(git_dir)
+ command = f'cd {git_dir} && git diff --quiet --exit-code HEAD'
return command_executer.GetCommandExecuter().RunCommand(
command, print_to_console=False)
def HasGitUntrackedChanges(git_dir):
"""Return True if git repository has un-tracked changes."""
- command = ('cd {0} && test -z '
- '$(git ls-files --exclude-standard --others)').format(git_dir)
+ command = (f'cd {git_dir} && test -z '
+ '$(git ls-files --exclude-standard --others)')
return command_executer.GetCommandExecuter().RunCommand(
command, print_to_console=False)
@@ -352,8 +348,8 @@ def GitGetCommitHash(git_dir, commit_symbolic_name):
The git hash for the symbolic name or None if fails.
"""
- command = ('cd {0} && git log -n 1 --pretty="format:%H" {1}').format(
- git_dir, commit_symbolic_name)
+ command = (f'cd {git_dir} && git log -n 1'
+ f' --pretty="format:%H" {commit_symbolic_name}')
rv, out, _ = command_executer.GetCommandExecuter().RunCommandWOutput(
command, print_to_console=False)
if rv == 0:
@@ -393,7 +389,7 @@ def GetGitChangesAsList(git_dir, path=None, staged=False):
Returns:
A list containing all the changed files.
"""
- command = 'cd {0} && git diff --name-only'.format(git_dir)
+ command = f'cd {git_dir} && git diff --name-only'
if staged:
command += ' --cached'
if path:
@@ -408,8 +404,8 @@ def GetGitChangesAsList(git_dir, path=None, staged=False):
def IsChromeOsTree(chromeos_root):
return (os.path.isdir(
- os.path.join(chromeos_root, 'src/third_party/chromiumos-overlay')) and
- os.path.isdir(os.path.join(chromeos_root, 'manifest')))
+ os.path.join(chromeos_root, 'src/third_party/chromiumos-overlay'))
+ and os.path.isdir(os.path.join(chromeos_root, 'manifest')))
def DeleteChromeOsTree(chromeos_root, dry_run=False):
@@ -423,11 +419,10 @@ def DeleteChromeOsTree(chromeos_root, dry_run=False):
True if everything is ok.
"""
if not IsChromeOsTree(chromeos_root):
- logger.GetLogger().LogWarning(
- '"{0}" does not seem to be a valid chromeos tree, do nothing.'.format(
- chromeos_root))
+ logger.GetLogger().LogWarning(f'"{chromeos_root}" does not seem to be a'
+ ' valid chromeos tree, do nothing.')
return False
- cmd0 = 'cd {0} && cros_sdk --delete'.format(chromeos_root)
+ cmd0 = f'cd {chromeos_root} && cros_sdk --delete'
if dry_run:
print(cmd0)
else:
@@ -435,10 +430,10 @@ def DeleteChromeOsTree(chromeos_root, dry_run=False):
cmd0, print_to_console=True) != 0:
return False
- cmd1 = ('export CHROMEOSDIRNAME="$(dirname $(cd {0} && pwd))" && '
- 'export CHROMEOSBASENAME="$(basename $(cd {0} && pwd))" && '
- 'cd $CHROMEOSDIRNAME && sudo rm -fr $CHROMEOSBASENAME'
- ).format(chromeos_root)
+ cmd1 = (
+ f'export CHROMEOSDIRNAME="$(dirname $(cd {chromeos_root} && pwd))" && '
+ f'export CHROMEOSBASENAME="$(basename $(cd {chromeos_root} && pwd))" && '
+ 'cd $CHROMEOSDIRNAME && sudo rm -fr $CHROMEOSBASENAME')
if dry_run:
print(cmd1)
return True
@@ -447,54 +442,6 @@ def DeleteChromeOsTree(chromeos_root, dry_run=False):
cmd1, print_to_console=True) == 0
-def ApplyGerritPatches(chromeos_root,
- gerrit_patch_string,
- branch=CROS_MAIN_BRANCH):
- """Apply gerrit patches on a chromeos tree.
-
- Args:
- chromeos_root: chromeos tree path
- gerrit_patch_string: a patch string just like the one gives to cbuildbot,
- 'id1 id2 *id3 ... idn'. A prefix of '* means this is an internal patch.
- branch: the tree based on which to apply the patches.
-
- Returns:
- True if success.
- """
-
- ### First of all, we need chromite libs
- sys.path.append(os.path.join(chromeos_root, 'chromite'))
- # Imports below are ok after modifying path to add chromite.
- # Pylint cannot detect that and complains.
- # pylint: disable=import-error, import-outside-toplevel
- from lib import git
- from lib import gerrit
- manifest = git.ManifestCheckout(chromeos_root)
- patch_list = gerrit_patch_string.split(' ')
- ### This takes time, print log information.
- logger.GetLogger().LogOutput('Retrieving patch information from server ...')
- patch_info_list = gerrit.GetGerritPatchInfo(patch_list)
- for pi in patch_info_list:
- project_checkout = manifest.FindCheckout(pi.project, strict=False)
- if not project_checkout:
- logger.GetLogger().LogError(
- 'Failed to find patch project "{project}" in manifest.'.format(
- project=pi.project))
- return False
-
- pi_str = '{project}:{ref}'.format(project=pi.project, ref=pi.ref)
- try:
- project_git_path = project_checkout.GetPath(absolute=True)
- logger.GetLogger().LogOutput('Applying patch "{0}" in "{1}" ...'.format(
- pi_str, project_git_path))
- pi.Apply(project_git_path, branch, trivial=False)
- except Exception:
- traceback.print_exc(file=sys.stdout)
- logger.GetLogger().LogError('Failed to apply patch "{0}"'.format(pi_str))
- return False
- return True
-
-
def BooleanPrompt(prompt='Do you want to continue?',
default=True,
true_value='yes',
@@ -515,8 +462,8 @@ def BooleanPrompt(prompt='Do you want to continue?',
true_value, false_value = true_value.lower(), false_value.lower()
true_text, false_text = true_value, false_value
if true_value == false_value:
- raise ValueError(
- 'true_value and false_value must differ: got %r' % true_value)
+ raise ValueError('true_value and false_value must differ: got %r' %
+ true_value)
if default:
true_text = true_text[0].upper() + true_text[1:]
diff --git a/crosperf/results_cache.py b/crosperf/results_cache.py
index 87e30ecc..5525858c 100644
--- a/crosperf/results_cache.py
+++ b/crosperf/results_cache.py
@@ -27,7 +27,7 @@ import results_report
import test_flag
SCRATCH_DIR = os.path.expanduser('~/cros_scratch')
-RESULTS_FILE = 'results.txt'
+RESULTS_FILE = 'results.pickle'
MACHINE_FILE = 'machine.txt'
AUTOTEST_TARBALL = 'autotest.tbz2'
RESULTS_TARBALL = 'results.tbz2'
@@ -197,9 +197,9 @@ class Result(object):
keyvals_dict[key] = result_dict['value']
elif 'values' in result_dict:
values = result_dict['values']
- if ('type' in result_dict and
- result_dict['type'] == 'list_of_scalar_values' and values and
- values != 'null'):
+ if ('type' in result_dict
+ and result_dict['type'] == 'list_of_scalar_values' and values
+ and values != 'null'):
keyvals_dict[key] = sum(values) / float(len(values))
else:
keyvals_dict[key] = values
@@ -245,13 +245,14 @@ class Result(object):
results_in_chroot = os.path.join(self.chromeos_root, 'chroot', 'tmp')
if not self.temp_dir:
self.temp_dir = tempfile.mkdtemp(dir=results_in_chroot)
- command = 'cp -r {0}/* {1}'.format(self.results_dir, self.temp_dir)
+ command = f'cp -r {self.results_dir}/* {self.temp_dir}'
self.ce.RunCommand(command, print_to_console=False)
command = ('./generate_test_report --no-color --csv %s' %
(os.path.join('/tmp', os.path.basename(self.temp_dir))))
- _, out, _ = self.ce.ChrootRunCommandWOutput(
- self.chromeos_root, command, print_to_console=False)
+ _, out, _ = self.ce.ChrootRunCommandWOutput(self.chromeos_root,
+ command,
+ print_to_console=False)
keyvals_dict = {}
tmp_dir_in_chroot = misc.GetInsideChrootPath(self.chromeos_root,
self.temp_dir)
@@ -322,8 +323,8 @@ class Result(object):
idle_functions = {
'[kernel.kallsyms]':
- ('intel_idle', 'arch_cpu_idle', 'intel_idle', 'cpu_startup_entry',
- 'default_idle', 'cpu_idle_loop', 'do_idle'),
+ ('intel_idle', 'arch_cpu_idle', 'intel_idle', 'cpu_startup_entry',
+ 'default_idle', 'cpu_idle_loop', 'do_idle'),
}
idle_samples = 0
@@ -390,8 +391,8 @@ class Result(object):
result = (
self.FindFilesInResultsDir('-name histograms.json').splitlines())
else:
- result = (
- self.FindFilesInResultsDir('-name results-chart.json').splitlines())
+ result = (self.FindFilesInResultsDir(
+ '-name results-chart.json').splitlines())
return result
def GetTurbostatFile(self):
@@ -449,8 +450,8 @@ class Result(object):
if debug_path:
symfs = '--symfs ' + debug_path
- vmlinux = '--vmlinux ' + os.path.join(debug_path, 'usr', 'lib', 'debug',
- 'boot', 'vmlinux')
+ vmlinux = '--vmlinux ' + os.path.join(debug_path, 'usr', 'lib',
+ 'debug', 'boot', 'vmlinux')
kallsyms = ''
print('** WARNING **: --kallsyms option not applied, no System.map-* '
'for downloaded image.')
@@ -546,9 +547,9 @@ class Result(object):
values = value_dict['values']
if not values:
continue
- if ('type' in value_dict and
- value_dict['type'] == 'list_of_scalar_values' and
- values != 'null'):
+ if ('type' in value_dict
+ and value_dict['type'] == 'list_of_scalar_values'
+ and values != 'null'):
result = sum(values) / float(len(values))
else:
result = values
@@ -746,8 +747,9 @@ class Result(object):
# order.
heapq.heappush(cmd_top5_cpu_use[cmd_with_pid], round(cpu_use, 1))
- for consumer, usage in sorted(
- cmd_total_cpu_use.items(), key=lambda x: x[1], reverse=True):
+ for consumer, usage in sorted(cmd_total_cpu_use.items(),
+ key=lambda x: x[1],
+ reverse=True):
# Iterate through commands by descending order of total CPU usage.
topcmd = {
'cmd': consumer,
@@ -913,7 +915,8 @@ class Result(object):
self.chromeos_root, path_str)
if status:
# Error of reading a perf.data profile is fatal.
- raise PerfDataReadError(f'Failed to read perf.data profile: {path_str}')
+ raise PerfDataReadError(
+ f'Failed to read perf.data profile: {path_str}')
# Pattern to search a line with "perf record" command line:
# # cmdline : /usr/bin/perf record -e instructions -p 123"
@@ -938,7 +941,8 @@ class Result(object):
break
else:
# cmdline wasn't found in the header. It's a fatal error.
- raise PerfDataReadError(f'Perf command line is not found in {path_str}')
+ raise PerfDataReadError(
+ f'Perf command line is not found in {path_str}')
return pids
def VerifyPerfDataPID(self):
@@ -976,11 +980,11 @@ class Result(object):
# Note that this function doesn't know anything about whether there is a
# cache hit or miss. It should process results agnostic of the cache hit
# state.
- if (self.results_file and self.suite == 'telemetry_Crosperf' and
- 'histograms.json' in self.results_file[0]):
+ if (self.results_file and self.suite == 'telemetry_Crosperf'
+ and 'histograms.json' in self.results_file[0]):
self.keyvals = self.ProcessHistogramsResults()
- elif (self.results_file and self.suite != 'telemetry_Crosperf' and
- 'results-chart.json' in self.results_file[0]):
+ elif (self.results_file and self.suite != 'telemetry_Crosperf'
+ and 'results-chart.json' in self.results_file[0]):
self.keyvals = self.ProcessChartResults()
else:
if not use_cache:
@@ -1134,15 +1138,16 @@ class Result(object):
f.write(machine_manager.machine_checksum_string[self.label.name])
if os.path.exists(cache_dir):
- command = 'rm -rf {0}'.format(cache_dir)
+ command = f'rm -rf {cache_dir}'
self.ce.RunCommand(command)
- command = 'mkdir -p {0} && '.format(os.path.dirname(cache_dir))
- command += 'chmod g+x {0} && '.format(temp_dir)
- command += 'mv {0} {1}'.format(temp_dir, cache_dir)
+ parent_dir = os.path.dirname(cache_dir)
+ command = f'mkdir -p {parent_dir} && '
+ command += f'chmod g+x {temp_dir} && '
+ command += f'mv {temp_dir} {cache_dir}'
ret = self.ce.RunCommand(command)
if ret:
- command = 'rm -rf {0}'.format(temp_dir)
+ command = f'rm -rf {temp_dir}'
self.ce.RunCommand(command)
raise RuntimeError('Could not move dir %s to dir %s' %
(temp_dir, cache_dir))
@@ -1241,8 +1246,8 @@ class TelemetryResult(Result):
self.err = pickle.load(f)
self.retval = pickle.load(f)
- self.chrome_version = (
- super(TelemetryResult, self).GetChromeVersionFromCache(cache_dir))
+ self.chrome_version = (super(TelemetryResult,
+ self).GetChromeVersionFromCache(cache_dir))
self.ProcessResults()
@@ -1304,10 +1309,10 @@ class ResultsCache(object):
self.run_local = None
self.cwp_dso = None
- def Init(self, chromeos_image, chromeos_root, test_name, iteration, test_args,
- profiler_args, machine_manager, machine, board, cache_conditions,
- logger_to_use, log_level, label, share_cache, suite,
- show_all_results, run_local, cwp_dso):
+ def Init(self, chromeos_image, chromeos_root, test_name, iteration,
+ test_args, profiler_args, machine_manager, machine, board,
+ cache_conditions, logger_to_use, log_level, label, share_cache,
+ suite, show_all_results, run_local, cwp_dso):
self.chromeos_image = chromeos_image
self.chromeos_root = chromeos_root
self.test_name = test_name
@@ -1319,8 +1324,8 @@ class ResultsCache(object):
self.machine_manager = machine_manager
self.machine = machine
self._logger = logger_to_use
- self.ce = command_executer.GetCommandExecuter(
- self._logger, log_level=log_level)
+ self.ce = command_executer.GetCommandExecuter(self._logger,
+ log_level=log_level)
self.label = label
self.share_cache = share_cache
self.suite = suite
@@ -1406,15 +1411,16 @@ class ResultsCache(object):
temp_test_args = '%s %s %s' % (self.test_args, self.profiler_args,
self.run_local)
- test_args_checksum = hashlib.md5(temp_test_args.encode('utf-8')).hexdigest()
+ test_args_checksum = hashlib.md5(
+ temp_test_args.encode('utf-8')).hexdigest()
return (image_path_checksum, self.test_name, str(self.iteration),
- test_args_checksum, checksum, machine_checksum, machine_id_checksum,
- str(self.CACHE_VERSION))
+ test_args_checksum, checksum, machine_checksum,
+ machine_id_checksum, str(self.CACHE_VERSION))
def ReadResult(self):
if CacheConditions.FALSE in self.cache_conditions:
cache_dir = self.GetCacheDirForWrite()
- command = 'rm -rf %s' % (cache_dir,)
+ command = 'rm -rf %s' % (cache_dir, )
self.ce.RunCommand(command)
return None
cache_dir = self.GetCacheDirForRead()
@@ -1427,14 +1433,15 @@ class ResultsCache(object):
if self.log_level == 'verbose':
self._logger.LogOutput('Trying to read from cache dir: %s' % cache_dir)
- result = Result.CreateFromCacheHit(self._logger, self.log_level, self.label,
- self.machine, cache_dir, self.test_name,
- self.suite, self.cwp_dso)
+ result = Result.CreateFromCacheHit(self._logger, self.log_level,
+ self.label, self.machine, cache_dir,
+ self.test_name, self.suite,
+ self.cwp_dso)
if not result:
return None
- if (result.retval == 0 or
- CacheConditions.RUN_SUCCEEDED not in self.cache_conditions):
+ if (result.retval == 0
+ or CacheConditions.RUN_SUCCEEDED not in self.cache_conditions):
return result
return None
diff --git a/crosperf/results_cache_unittest.py b/crosperf/results_cache_unittest.py
index df3a35e9..94b104c2 100755
--- a/crosperf/results_cache_unittest.py
+++ b/crosperf/results_cache_unittest.py
@@ -11,6 +11,7 @@ from __future__ import print_function
import io
import os
+import pickle
import shutil
import tempfile
import unittest
@@ -31,6 +32,8 @@ from cros_utils import command_executer
from cros_utils import logger
from cros_utils import misc
+# The following hardcoded string has blocked words replaced, and thus
+# is not representative of a true crosperf output.
# pylint: disable=line-too-long
OUTPUT = """CMD (True): ./test_that.sh\
--remote=172.17.128.241 --board=lumpy LibCBench
@@ -42,13 +45,13 @@ INFO : Running the following control files 1 times:
INFO : * 'client/site_tests/platform_LibCBench/control'
INFO : Running client test client/site_tests/platform_LibCBench/control
-./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/test_that.PO1234567/platform_LibCBench --test-retry=0 --args
+./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/test_that.PO1234567/platform_LibCBench --test-retry=0 --args
ERROR:root:import statsd failed, no stats will be reported.
14:20:22 INFO | Results placed in /tmp/test_that.PO1234567/platform_LibCBench
14:20:22 INFO | Processing control file
-14:20:23 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
+14:20:23 INFO | Starting main ssh connection '/usr/bin/ssh -a -x -N -o ControlMain=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-main/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
14:20:23 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts.
-14:20:23 INFO | INFO ---- ---- kernel=3.8.11 localtime=May 22 14:20:23 timestamp=1369257623
+14:20:23 INFO | INFO\t----\t----\tkernel=3.8.11\tlocaltime=May 22 14:20:23\ttimestamp=1369257623
14:20:23 INFO | Installing autotest on 172.17.128.241
14:20:23 INFO | Using installation dir /usr/local/autotest
14:20:23 WARNI| No job_repo_url for <remote host: 172.17.128.241>
@@ -59,11 +62,11 @@ ERROR:root:import statsd failed, no stats will be reported.
14:20:24 INFO | Entered autotestd_monitor.
14:20:24 INFO | Finished launching tail subprocesses.
14:20:24 INFO | Finished waiting on autotestd to start.
-14:20:26 INFO | START ---- ---- timestamp=1369257625 localtime=May 22 14:20:25
-14:20:26 INFO | START platform_LibCBench platform_LibCBench timestamp=1369257625 localtime=May 22 14:20:25
-14:20:30 INFO | GOOD platform_LibCBench platform_LibCBench timestamp=1369257630 localtime=May 22 14:20:30 completed successfully
-14:20:30 INFO | END GOOD platform_LibCBench platform_LibCBench timestamp=1369257630 localtime=May 22 14:20:30
-14:20:31 INFO | END GOOD ---- ---- timestamp=1369257630 localtime=May 22 14:20:30
+14:20:26 INFO | START\t----\t----\ttimestamp=1369257625\tlocaltime=May 22 14:20:25
+14:20:26 INFO | \tSTART\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257625\tlocaltime=May 22 14:20:25
+14:20:30 INFO | \t\tGOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30\tcompleted successfully
+14:20:30 INFO | \tEND GOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30
+14:20:31 INFO | END GOOD\t----\t----\ttimestamp=1369257630\tlocaltime=May 22 14:20:30
14:20:31 INFO | Got lock of exit_code_file.
14:20:31 INFO | Released lock of exit_code_file and closed it.
OUTPUT: ==============================
@@ -72,14 +75,14 @@ Done: 0% [ ]
OUTPUT: Thread Status:
RUNNING: 1 ('ttt: LibCBench (1)' 0:01:21)
Machine Status:
-Machine Thread Lock Status Checksum
+Machine Thread Lock Status Checksum
172.17.128.241 ttt: LibCBench (1) True RUNNING 3ba9f2ecbb222f20887daea5583d86ba
OUTPUT: ==============================
14:20:33 INFO | Killing child processes.
14:20:33 INFO | Client complete
14:20:33 INFO | Finished processing control file
-14:20:33 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
+14:20:33 INFO | Starting main ssh connection '/usr/bin/ssh -a -x -N -o ControlMain=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-main/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
14:20:33 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts.
INFO : Test results:
@@ -116,7 +119,7 @@ platform_LibCBench/platform_LibCBench b_utf8_onebyone__0_
-------------------------------------------------------------------
Total PASS: 2/2 (100%)
-INFO : Elapsed time: 0m16s
+INFO : Elapsed time: 0m16s
"""
error = """
@@ -484,8 +487,8 @@ class ResultTest(unittest.TestCase):
def testCreateFromRun(self):
result = MockResult.CreateFromRun(logger.GetLogger(), 'average',
- self.mock_label, 'remote1', OUTPUT, error,
- 0, True)
+ self.mock_label, 'remote1', OUTPUT,
+ error, 0, True)
self.assertEqual(result.keyvals, keyvals)
self.assertEqual(result.chroot_results_dir,
'/tmp/test_that.PO1234567/platform_LibCBench')
@@ -533,7 +536,8 @@ class ResultTest(unittest.TestCase):
mock_runcmd.call_args_list[1])
self.assertEqual(mock_runcmd.call_args_list[0],
mock_runcmd.call_args_list[2])
- self.assertEqual(mock_runcmd.call_args_list[0][0], ('mkdir -p /tmp/test',))
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('mkdir -p /tmp/test', ))
# test 3. CopyFiles returns 1 (fails).
mock_copyfiles.return_value = 1
@@ -715,7 +719,8 @@ class ResultTest(unittest.TestCase):
mock_mkdtemp.return_value = TMP_DIR1
mock_chrootruncmd.return_value = [
- '', ('%s,PASS\n%s/telemetry_Crosperf,PASS\n') % (TMP_DIR1, TMP_DIR1), ''
+ '', ('%s,PASS\n%s/telemetry_Crosperf,PASS\n') % (TMP_DIR1, TMP_DIR1),
+ ''
]
mock_getpath.return_value = TMP_DIR1
self.result.ce.ChrootRunCommandWOutput = mock_chrootruncmd
@@ -730,7 +735,7 @@ class ResultTest(unittest.TestCase):
self.assertEqual(self.kv_dict, {'': 'PASS', 'telemetry_Crosperf': 'PASS'})
self.assertEqual(mock_runcmd.call_count, 1)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('cp -r /tmp/test_that_resultsNmq/* %s' % TMP_DIR1,))
+ ('cp -r /tmp/test_that_resultsNmq/* %s' % TMP_DIR1, ))
self.assertEqual(mock_chrootruncmd.call_count, 1)
self.assertEqual(
mock_chrootruncmd.call_args_list[0][0],
@@ -770,7 +775,8 @@ class ResultTest(unittest.TestCase):
@mock.patch.object(command_executer.CommandExecuter,
'ChrootRunCommandWOutput')
@mock.patch.object(os.path, 'exists')
- def test_get_samples(self, mock_exists, mock_get_total_samples, mock_getpath):
+ def test_get_samples(self, mock_exists, mock_get_total_samples,
+ mock_getpath):
self.result.perf_data_files = ['/tmp/results/perf.data']
self.result.board = 'samus'
mock_getpath.return_value = '/usr/chromeos/chroot/tmp/results/perf.data'
@@ -811,7 +817,7 @@ class ResultTest(unittest.TestCase):
res = self.result.FindFilesInResultsDir('-name perf.data')
self.assertEqual(mock_runcmd.call_count, 1)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('find /tmp/test_results -name perf.data',))
+ ('find /tmp/test_results -name perf.data', ))
self.assertEqual(res, '/tmp/test_results/perf.data')
mock_runcmd.reset_mock()
@@ -827,7 +833,8 @@ class ResultTest(unittest.TestCase):
self.result.FindFilesInResultsDir = mock_findfiles
res = self.result.GetPerfDataFiles()
self.assertEqual(res, ['line1', 'line1'])
- self.assertEqual(mock_findfiles.call_args_list[0][0], ('-name perf.data',))
+ self.assertEqual(mock_findfiles.call_args_list[0][0],
+ ('-name perf.data', ))
def test_get_perf_report_files(self):
self.args = None
@@ -958,16 +965,18 @@ class ResultTest(unittest.TestCase):
"""Verify perf PID which is present in TOP_DATA."""
self.result.top_cmds = TOP_DATA
# pid is present in TOP_DATA.
- with mock.patch.object(
- Result, 'ReadPidFromPerfData', return_value=['5713']):
+ with mock.patch.object(Result,
+ 'ReadPidFromPerfData',
+ return_value=['5713']):
self.result.VerifyPerfDataPID()
def test_verify_perf_data_pid_fail(self):
"""Test perf PID missing in top raises the error."""
self.result.top_cmds = TOP_DATA
# pid is not in the list of top processes.
- with mock.patch.object(
- Result, 'ReadPidFromPerfData', return_value=['9999']):
+ with mock.patch.object(Result,
+ 'ReadPidFromPerfData',
+ return_value=['9999']):
with self.assertRaises(PidVerificationError):
self.result.VerifyPerfDataPID()
@@ -976,7 +985,9 @@ class ResultTest(unittest.TestCase):
def test_read_pid_from_perf_data_ok(self, mock_runcmd):
"""Test perf header parser, normal flow."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
exp_pid = '12345'
mock_runcmd.return_value = (0, PERF_DATA_HEADER.format(pid=exp_pid), '')
pids = self.result.ReadPidFromPerfData()
@@ -1007,7 +1018,9 @@ class ResultTest(unittest.TestCase):
def test_read_pid_from_perf_data_no_pid(self, mock_runcmd):
"""Test perf.data without PID."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
cmd_line = '# cmdline : /usr/bin/perf record -e instructions'
mock_runcmd.return_value = (0, cmd_line, '')
pids = self.result.ReadPidFromPerfData()
@@ -1019,7 +1032,9 @@ class ResultTest(unittest.TestCase):
def test_read_pid_from_perf_data_system_wide(self, mock_runcmd):
"""Test reading from system-wide profile with PID."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
# There is '-p <pid>' in command line but it's still system-wide: '-a'.
cmd_line = '# cmdline : /usr/bin/perf record -e instructions -a -p 1234'
mock_runcmd.return_value = (0, cmd_line, '')
@@ -1032,7 +1047,9 @@ class ResultTest(unittest.TestCase):
def test_read_pid_from_perf_data_read_fail(self, mock_runcmd):
"""Failure to read perf.data raises the error."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
# Error status of the profile read.
mock_runcmd.return_value = (1, '', '')
with self.assertRaises(PerfDataReadError):
@@ -1043,7 +1060,9 @@ class ResultTest(unittest.TestCase):
def test_read_pid_from_perf_data_fail(self, mock_runcmd):
"""Failure to find cmdline in perf.data header raises the error."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
# Empty output.
mock_runcmd.return_value = (0, '', '')
with self.assertRaises(PerfDataReadError):
@@ -1273,7 +1292,6 @@ class ResultTest(unittest.TestCase):
@mock.patch.object(misc, 'GetOutsideChrootPath')
def test_populate_from_run(self, mock_getpath):
-
def FakeGetResultsDir():
self.callGetResultsDir = True
return '/tmp/results_dir'
@@ -1361,7 +1379,6 @@ class ResultTest(unittest.TestCase):
return {'Total': 10}
def test_process_results(self):
-
def FakeGatherPerfResults():
self.callGatherPerfResults = True
@@ -1407,16 +1424,17 @@ class ResultTest(unittest.TestCase):
self.result.ProcessResults()
shutil.rmtree(os.path.dirname(self.result.results_file[0]))
# Verify the summary for the story is correct
- self.assertEqual(self.result.keyvals['timeToFirstContentfulPaint__typical'],
- [880.000, u'ms_smallerIsBetter'])
+ self.assertEqual(
+ self.result.keyvals['timeToFirstContentfulPaint__typical'],
+ [880.000, u'ms_smallerIsBetter'])
# Veirfy the summary for a certain stroy tag is correct
self.assertEqual(
- self.result
- .keyvals['timeToFirstContentfulPaint__cache_temperature:cold'],
+ self.result.
+ keyvals['timeToFirstContentfulPaint__cache_temperature:cold'],
[1000.000, u'ms_smallerIsBetter'])
self.assertEqual(
- self.result
- .keyvals['timeToFirstContentfulPaint__cache_temperature:warm'],
+ self.result.
+ keyvals['timeToFirstContentfulPaint__cache_temperature:warm'],
[800.000, u'ms_smallerIsBetter'])
@mock.patch.object(Result, 'ProcessCpustatsResults')
@@ -1572,7 +1590,8 @@ class ResultTest(unittest.TestCase):
u'telemetry_page_measurement_results__num_errored': [0, u'count'],
u'string-fasta__string-fasta': [23.2, u'ms'],
u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
- u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
+ u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte':
+ [3.2, u'ms'],
u'access-nsieve__access-nsieve': [7.9, u'ms'],
u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
u'string-validate-input__string-validate-input': [19.3, u'ms'],
@@ -1610,7 +1629,8 @@ class ResultTest(unittest.TestCase):
u'telemetry_page_measurement_results__num_errored': [0, u'count'],
u'string-fasta__string-fasta': [23.2, u'ms'],
u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
- u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
+ u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte':
+ [3.2, u'ms'],
u'access-nsieve__access-nsieve': [7.9, u'ms'],
u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
u'string-validate-input__string-validate-input': [19.3, u'ms'],
@@ -1657,8 +1677,9 @@ class ResultTest(unittest.TestCase):
self.assertEqual(mock_getroot.call_count, 1)
self.assertEqual(mock_runcmd.call_count, 2)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('rm -rf test_results_dir',))
- self.assertEqual(mock_runcmd.call_args_list[1][0], ('rm -rf testtemp_dir',))
+ ('rm -rf test_results_dir', ))
+ self.assertEqual(mock_runcmd.call_args_list[1][0],
+ ('rm -rf testtemp_dir', ))
# Test 2. Same, except ath results_dir name does not contain
# 'test_that_results_'
@@ -1672,8 +1693,9 @@ class ResultTest(unittest.TestCase):
self.assertEqual(mock_getroot.call_count, 1)
self.assertEqual(mock_runcmd.call_count, 2)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('rm -rf /tmp/tmp_AbcXyz',))
- self.assertEqual(mock_runcmd.call_args_list[1][0], ('rm -rf testtemp_dir',))
+ ('rm -rf /tmp/tmp_AbcXyz', ))
+ self.assertEqual(mock_runcmd.call_args_list[1][0],
+ ('rm -rf testtemp_dir', ))
# Test 3. mock_getroot returns nothing; 'rm_chroot_tmp' is False.
mock_getroot.reset_mock()
@@ -1681,7 +1703,8 @@ class ResultTest(unittest.TestCase):
self.result.CleanUp(False)
self.assertEqual(mock_getroot.call_count, 0)
self.assertEqual(mock_runcmd.call_count, 1)
- self.assertEqual(mock_runcmd.call_args_list[0][0], ('rm -rf testtemp_dir',))
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('rm -rf testtemp_dir', ))
# Test 4. 'rm_chroot_tmp' is True, but result_dir & temp_dir are None.
mock_getroot.reset_mock()
@@ -1695,7 +1718,6 @@ class ResultTest(unittest.TestCase):
@mock.patch.object(misc, 'GetInsideChrootPath')
@mock.patch.object(command_executer.CommandExecuter, 'ChrootRunCommand')
def test_store_to_cache_dir(self, mock_chrootruncmd, mock_getpath):
-
def FakeMkdtemp(directory=''):
if directory:
pass
@@ -1730,7 +1752,7 @@ class ResultTest(unittest.TestCase):
base_dir = os.path.join(os.getcwd(), 'test_cache/compare_output')
self.assertTrue(os.path.exists(os.path.join(test_dir, 'autotest.tbz2')))
self.assertTrue(os.path.exists(os.path.join(test_dir, 'machine.txt')))
- self.assertTrue(os.path.exists(os.path.join(test_dir, 'results.txt')))
+ self.assertTrue(os.path.exists(os.path.join(test_dir, 'results.pickle')))
f1 = os.path.join(test_dir, 'machine.txt')
f2 = os.path.join(base_dir, 'machine.txt')
@@ -1738,11 +1760,13 @@ class ResultTest(unittest.TestCase):
[_, out, _] = self.result.ce.RunCommandWOutput(cmd)
self.assertEqual(len(out), 0)
- f1 = os.path.join(test_dir, 'results.txt')
- f2 = os.path.join(base_dir, 'results.txt')
- cmd = 'diff %s %s' % (f1, f2)
- [_, out, _] = self.result.ce.RunCommandWOutput(cmd)
- self.assertEqual(len(out), 0)
+ f1 = os.path.join(test_dir, 'results.pickle')
+ f2 = os.path.join(base_dir, 'results.pickle')
+ with open(f1, 'rb') as f:
+ f1_obj = pickle.load(f)
+ with open(f2, 'rb') as f:
+ f2_obj = pickle.load(f)
+ self.assertEqual(f1_obj, f2_obj)
# Clean up after test.
tempfile.mkdtemp = save_real_mkdtemp
@@ -1753,87 +1777,87 @@ class ResultTest(unittest.TestCase):
TELEMETRY_RESULT_KEYVALS = {
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'math-cordic (ms)':
- '11.4',
+ '11.4',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-nbody (ms)':
- '6.9',
+ '6.9',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-fannkuch (ms)':
- '26.3',
+ '26.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'math-spectral-norm (ms)':
- '6.3',
+ '6.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-nsieve-bits (ms)':
- '9.3',
+ '9.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'math-partial-sums (ms)':
- '32.8',
+ '32.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'regexp-dna (ms)':
- '16.1',
+ '16.1',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'3d-cube (ms)':
- '42.7',
+ '42.7',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'crypto-md5 (ms)':
- '10.8',
+ '10.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'crypto-sha1 (ms)':
- '12.4',
+ '12.4',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-tagcloud (ms)':
- '47.2',
+ '47.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-fasta (ms)':
- '36.3',
+ '36.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-binary-trees (ms)':
- '7.3',
+ '7.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'date-format-xparb (ms)':
- '138.1',
+ '138.1',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'crypto-aes (ms)':
- '19.2',
+ '19.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'Total (ms)':
- '656.5',
+ '656.5',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-base64 (ms)':
- '17.5',
+ '17.5',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-validate-input (ms)':
- '24.8',
+ '24.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'3d-raytrace (ms)':
- '28.7',
+ '28.7',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'controlflow-recursive (ms)':
- '5.3',
+ '5.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-bits-in-byte (ms)':
- '9.8',
+ '9.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'3d-morph (ms)':
- '50.2',
+ '50.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-bitwise-and (ms)':
- '8.8',
+ '8.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-nsieve (ms)':
- '8.6',
+ '8.6',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'date-format-tofte (ms)':
- '31.2',
+ '31.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-3bit-bits-in-byte (ms)':
- '3.5',
+ '3.5',
'retval':
- 0,
+ 0,
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-unpack-code (ms)':
- '45.0'
+ '45.0'
}
PURE_TELEMETRY_OUTPUT = """
@@ -1854,12 +1878,10 @@ class TelemetryResultTest(unittest.TestCase):
'autotest_dir', 'debug_dir', '/tmp', 'lumpy',
'remote', 'image_args', 'cache_dir', 'average',
'gcc', False, None)
- self.mock_machine = machine_manager.MockCrosMachine('falco.cros',
- '/tmp/chromeos',
- 'average')
+ self.mock_machine = machine_manager.MockCrosMachine(
+ 'falco.cros', '/tmp/chromeos', 'average')
def test_populate_from_run(self):
-
def FakeProcessResults():
self.callFakeProcessResults = True
@@ -1932,7 +1954,6 @@ class ResultsCacheTest(unittest.TestCase):
@mock.patch.object(image_checksummer.ImageChecksummer, 'Checksum')
def test_get_cache_dir_for_write(self, mock_checksum):
-
def FakeGetMachines(label):
if label:
pass
@@ -2041,7 +2062,8 @@ class ResultsCacheTest(unittest.TestCase):
# Test 5. Generating cache name for writing, with local image type, and
# specifying that the image path must match the cached image path.
self.results_cache.label.image_type = 'local'
- self.results_cache.cache_conditions.append(CacheConditions.IMAGE_PATH_MATCH)
+ self.results_cache.cache_conditions.append(
+ CacheConditions.IMAGE_PATH_MATCH)
key_list = self.results_cache.GetCacheKeyList(False)
self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127')
self.assertEqual(key_list[3], 'fda29412ceccb72977516c4785d08e2c')
diff --git a/crosperf/test_cache/compare_output/results.txt b/crosperf/test_cache/compare_output/results.pickle
index 592e7161..587863c5 100644
--- a/crosperf/test_cache/compare_output/results.txt
+++ b/crosperf/test_cache/compare_output/results.pickle
Binary files differ
diff --git a/crosperf/test_cache/test_input/results.txt b/crosperf/test_cache/test_input/results.pickle
index 33ba6ab7..33ba6ab7 100644
--- a/crosperf/test_cache/test_input/results.txt
+++ b/crosperf/test_cache/test_input/results.pickle
diff --git a/crosperf/test_cache/test_puretelemetry_input/results.txt b/crosperf/test_cache/test_puretelemetry_input/results.pickle
index 497d1cf3..497d1cf3 100644
--- a/crosperf/test_cache/test_puretelemetry_input/results.txt
+++ b/crosperf/test_cache/test_puretelemetry_input/results.pickle
diff --git a/llvm_extra/create_llvm_extra.sh b/llvm_extra/create_llvm_extra.sh
index 6f34a0b2..b58e0508 100755
--- a/llvm_extra/create_llvm_extra.sh
+++ b/llvm_extra/create_llvm_extra.sh
@@ -82,7 +82,7 @@ function create_llvm_extra_ebuild() {
set -e
-# Sanity checks.
+# Confidence checks.
check_cmd "${@}"
# Create llvm-extra ebuild.
create_llvm_extra_ebuild "${@}"
diff --git a/llvm_tools/nightly_revert_checker.py b/llvm_tools/nightly_revert_checker.py
index 6941f3d6..5e878816 100755
--- a/llvm_tools/nightly_revert_checker.py
+++ b/llvm_tools/nightly_revert_checker.py
@@ -33,7 +33,7 @@ State = t.Any
def _find_interesting_android_shas(android_llvm_toolchain_dir: str
- ) -> t.List[t.Tuple[str, str]]:
+ ) -> t.List[t.Tuple[str, str]]:
llvm_project = os.path.join(android_llvm_toolchain_dir,
'toolchain/llvm-project')
@@ -44,7 +44,7 @@ def _find_interesting_android_shas(android_llvm_toolchain_dir: str
encoding='utf-8',
).strip()
- main_legacy = get_llvm_merge_base('aosp/master-legacy')
+ main_legacy = get_llvm_merge_base('aosp/master-legacy') # nocheck
testing_upstream = get_llvm_merge_base('aosp/testing-upstream')
result = [('main-legacy', main_legacy)]
@@ -55,8 +55,7 @@ def _find_interesting_android_shas(android_llvm_toolchain_dir: str
def _parse_llvm_ebuild_for_shas(ebuild_file: io.TextIOWrapper
- ) -> t.List[t.Tuple[str, str]]:
-
+ ) -> t.List[t.Tuple[str, str]]:
def parse_ebuild_assignment(line: str) -> str:
no_comments = line.split('#')[0]
no_assign = no_comments.split('=', 1)[1].strip()
@@ -84,12 +83,11 @@ def _parse_llvm_ebuild_for_shas(ebuild_file: io.TextIOWrapper
def _find_interesting_chromeos_shas(chromeos_base: str
- ) -> t.List[t.Tuple[str, str]]:
+ ) -> t.List[t.Tuple[str, str]]:
llvm_dir = os.path.join(chromeos_base,
'src/third_party/chromiumos-overlay/sys-devel/llvm')
candidate_ebuilds = [
- os.path.join(llvm_dir, x)
- for x in os.listdir(llvm_dir)
+ os.path.join(llvm_dir, x) for x in os.listdir(llvm_dir)
if '_pre' in x and not os.path.islink(os.path.join(llvm_dir, x))
]
@@ -197,8 +195,10 @@ def find_shas(llvm_dir: str, interesting_shas: t.List[t.Tuple[str, str]],
state: State, new_state: State):
for friendly_name, sha in interesting_shas:
logging.info('Finding reverts across %s (%s)', friendly_name, sha)
- all_reverts = revert_checker.find_reverts(
- llvm_dir, sha, root='origin/' + git_llvm_rev.MAIN_BRANCH)
+ all_reverts = revert_checker.find_reverts(llvm_dir,
+ sha,
+ root='origin/' +
+ git_llvm_rev.MAIN_BRANCH)
logging.info('Detected the following revert(s) across %s:\n%s',
friendly_name, pprint.pformat(all_reverts))
@@ -230,13 +230,12 @@ def do_cherrypick(chroot_path: str, llvm_dir: str,
seen.add(friendly_name)
for sha, reverted_sha in reverts:
try:
- get_upstream_patch.get_from_upstream(
- chroot_path=chroot_path,
- create_cl=True,
- start_sha=reverted_sha,
- patches=[sha],
- reviewers=reviewers,
- cc=cc)
+ get_upstream_patch.get_from_upstream(chroot_path=chroot_path,
+ create_cl=True,
+ start_sha=reverted_sha,
+ patches=[sha],
+ reviewers=reviewers,
+ cc=cc)
except get_upstream_patch.CherrypickError as e:
logging.info('%s, skipping...', str(e))
return new_state
@@ -245,7 +244,6 @@ def do_cherrypick(chroot_path: str, llvm_dir: str,
def do_email(is_dry_run: bool, llvm_dir: str, repository: str,
interesting_shas: t.List[t.Tuple[str, str]], state: State,
recipients: _EmailRecipients) -> State:
-
def prettify_sha(sha: str) -> tiny_render.Piece:
rev = get_llvm_hash.GetVersionFrom(llvm_dir, sha)
@@ -253,8 +251,8 @@ def do_email(is_dry_run: bool, llvm_dir: str, repository: str,
short_sha = sha[:12]
return tiny_render.Switch(
text=f'r{rev} ({short_sha})',
- html=tiny_render.Link(
- href='https://reviews.llvm.org/rG' + sha, inner='r' + str(rev)),
+ html=tiny_render.Link(href='https://reviews.llvm.org/rG' + sha,
+ inner='r' + str(rev)),
)
def get_sha_description(sha: str) -> tiny_render.Piece:
@@ -267,11 +265,12 @@ def do_email(is_dry_run: bool, llvm_dir: str, repository: str,
new_state: State = {}
for friendly_name, sha, new_reverts in find_shas(llvm_dir, interesting_shas,
state, new_state):
- email = _generate_revert_email(repository, friendly_name, sha, prettify_sha,
- get_sha_description, new_reverts)
+ email = _generate_revert_email(repository, friendly_name, sha,
+ prettify_sha, get_sha_description,
+ new_reverts)
if is_dry_run:
- logging.info('Would send email:\nSubject: %s\nBody:\n%s\n', email.subject,
- tiny_render.render_text_pieces(email.body))
+ logging.info('Would send email:\nSubject: %s\nBody:\n%s\n',
+ email.subject, tiny_render.render_text_pieces(email.body))
else:
logging.info('Sending email with subject %r...', email.subject)
_send_revert_email(recipients, email)
@@ -281,16 +280,19 @@ def do_email(is_dry_run: bool, llvm_dir: str, repository: str,
def parse_args(argv: t.List[str]) -> t.Any:
parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'action',
choices=['cherry-pick', 'email', 'dry-run'],
help='Automatically cherry-pick upstream reverts, send an email, or '
'write to stdout.')
- parser.add_argument(
- '--state_file', required=True, help='File to store persistent state in.')
- parser.add_argument(
- '--llvm_dir', required=True, help='Up-to-date LLVM directory to use.')
+ parser.add_argument('--state_file',
+ required=True,
+ help='File to store persistent state in.')
+ parser.add_argument('--llvm_dir',
+ required=True,
+ help='Up-to-date LLVM directory to use.')
parser.add_argument('--debug', action='store_true')
parser.add_argument(
'--reviewers',
@@ -309,8 +311,9 @@ def parse_args(argv: t.List[str]) -> t.Any:
subparsers.required = True
chromeos_subparser = subparsers.add_parser('chromeos')
- chromeos_subparser.add_argument(
- '--chromeos_dir', required=True, help='Up-to-date CrOS directory to use.')
+ chromeos_subparser.add_argument('--chromeos_dir',
+ required=True,
+ help='Up-to-date CrOS directory to use.')
android_subparser = subparsers.add_parser('android')
android_subparser.add_argument(
@@ -322,7 +325,7 @@ def parse_args(argv: t.List[str]) -> t.Any:
def find_chroot(opts: t.Any, reviewers: t.List[str], cc: t.List[str]
- ) -> t.Tuple[str, t.List[t.Tuple[str, str]], _EmailRecipients]:
+ ) -> t.Tuple[str, t.List[t.Tuple[str, str]], _EmailRecipients]:
recipients = reviewers + cc
if opts.repository == 'chromeos':
chroot_path = opts.chromeos_dir
@@ -335,9 +338,9 @@ def find_chroot(opts: t.Any, reviewers: t.List[str], cc: t.List[str]
chroot_path = opts.android_llvm_toolchain_dir
return (chroot_path, _find_interesting_android_shas(chroot_path),
- _EmailRecipients(
- well_known=[],
- direct=['android-llvm-dev@google.com'] + recipients))
+ _EmailRecipients(well_known=[],
+ direct=['android-llvm-dev@google.com'] +
+ recipients))
else:
raise ValueError(f'Unknown repository {opts.repository}')
@@ -366,21 +369,19 @@ def main(argv: t.List[str]) -> int:
# We want to be as free of obvious side-effects as possible in case something
# above breaks. Hence, action as late as possible.
if action == 'cherry-pick':
- new_state = do_cherrypick(
- chroot_path=chroot_path,
- llvm_dir=llvm_dir,
- interesting_shas=interesting_shas,
- state=state,
- reviewers=reviewers,
- cc=cc)
+ new_state = do_cherrypick(chroot_path=chroot_path,
+ llvm_dir=llvm_dir,
+ interesting_shas=interesting_shas,
+ state=state,
+ reviewers=reviewers,
+ cc=cc)
else:
- new_state = do_email(
- is_dry_run=action == 'dry-run',
- llvm_dir=llvm_dir,
- repository=repository,
- interesting_shas=interesting_shas,
- state=state,
- recipients=recipients)
+ new_state = do_email(is_dry_run=action == 'dry-run',
+ llvm_dir=llvm_dir,
+ repository=repository,
+ interesting_shas=interesting_shas,
+ state=state,
+ recipients=recipients)
_write_state(state_file, new_state)
return 0
diff --git a/rust_tools/rust_watch.py b/rust_tools/rust_watch.py
index 66df7c8a..b2c94acd 100755
--- a/rust_tools/rust_watch.py
+++ b/rust_tools/rust_watch.py
@@ -162,7 +162,7 @@ def get_new_gentoo_commits(git_dir: pathlib.Path,
'git',
'log',
'--format=%H %s',
- f'{most_recent_sha}..origin/master',
+ f'{most_recent_sha}..origin/master', # nocheck
'--',
'dev-lang/rust',
],
@@ -225,7 +225,7 @@ def atomically_write_state(state_file: pathlib.Path, state: State) -> None:
def maybe_compose_email(old_state: State, newest_release: RustReleaseVersion,
new_gentoo_commits: List[GitCommit]
- ) -> Optional[Tuple[str, List[tiny_render.Piece]]]:
+ ) -> Optional[Tuple[str, List[tiny_render.Piece]]]:
"""Creates an email given our new state, if doing so is appropriate."""
subject_pieces = []
body_pieces = []
@@ -269,11 +269,14 @@ def main(argv: List[str]) -> None:
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--state_dir', required=True, help='Directory to store state in.')
- parser.add_argument(
- '--skip_email', action='store_true', help="Don't send an email.")
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument('--state_dir',
+ required=True,
+ help='Directory to store state in.')
+ parser.add_argument('--skip_email',
+ action='store_true',
+ help="Don't send an email.")
parser.add_argument(
'--skip_state_update',
action='store_true',
@@ -334,8 +337,8 @@ def main(argv: List[str]) -> None:
logging.info('Skipping state update, as requested')
return
- newest_sha = (
- new_commits[-1].sha if new_commits else prior_state.last_gentoo_sha)
+ newest_sha = (new_commits[-1].sha
+ if new_commits else prior_state.last_gentoo_sha)
atomically_write_state(
state_file,
State(
diff --git a/setup_chromeos.py b/setup_chromeos.py
deleted file mode 100755
index 07c7530d..00000000
--- a/setup_chromeos.py
+++ /dev/null
@@ -1,252 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to checkout the ChromeOS source.
-
-This script sets up the ChromeOS source in the given directory, matching a
-particular release of ChromeOS.
-"""
-
-from __future__ import print_function
-
-__author__ = 'raymes@google.com (Raymes Khoury)'
-
-from datetime import datetime
-
-import argparse
-import os
-import pickle
-import sys
-import tempfile
-import time
-from cros_utils import command_executer
-from cros_utils import logger
-from cros_utils import manifest_versions
-
-GCLIENT_FILE = """solutions = [
- { "name" : "CHROME_DEPS",
- "url" :
- "svn://svn.chromium.org/chrome-internal/trunk/tools/buildspec/releases/%s",
- "custom_deps" : {
- "src/third_party/WebKit/LayoutTests": None,
- "src-pdf": None,
- "src/pdf": None,
- },
- "safesync_url": "",
- },
-]
-"""
-
-# List of stable versions used for common team image
-# Sheriff must update this list when a new common version becomes available
-COMMON_VERSIONS = '/home/mobiletc-prebuild/common_images/common_list.txt'
-
-
-def Usage(parser):
- parser.print_help()
- sys.exit(0)
-
-
-# Get version spec file, either from "paladin" or "buildspec" directory.
-def GetVersionSpecFile(version, versions_git):
- temp = tempfile.mkdtemp()
- commands = ['cd {0}'.format(temp), \
- 'git clone {0} versions'.format(versions_git)]
- cmd_executer = command_executer.GetCommandExecuter()
- ret = cmd_executer.RunCommands(commands)
- err_msg = None
- if ret:
- err_msg = 'Failed to checkout versions_git - {0}'.format(versions_git)
- ret = None
- else:
- v, m = version.split('.', 1)
- paladin_spec = 'paladin/buildspecs/{0}/{1}.xml'.format(v, m)
- generic_spec = 'buildspecs/{0}/{1}.xml'.format(v, m)
- paladin_path = '{0}/versions/{1}'.format(temp, paladin_spec)
- generic_path = '{0}/versions/{1}'.format(temp, generic_spec)
- if os.path.exists(paladin_path):
- ret = paladin_spec
- elif os.path.exists(generic_path):
- ret = generic_spec
- else:
- err_msg = 'No spec found for version {0}'.format(version)
- ret = None
- # Fall through to clean up.
-
- commands = ['rm -rf {0}'.format(temp)]
- cmd_executer.RunCommands(commands)
- if err_msg:
- logger.GetLogger().LogFatal(err_msg)
- return ret
-
-
-def TimeToCommonVersion(timestamp):
- """Convert timestamp to common image version."""
- tdt = datetime.fromtimestamp(float(timestamp))
- with open(COMMON_VERSIONS, 'r', encoding='utf-8') as f:
- common_list = pickle.load(f)
- for sv in common_list:
- sdt = datetime.strptime(sv['date'], '%Y-%m-%d %H:%M:%S.%f')
- if tdt >= sdt:
- return '%s.%s' % (sv['chrome_major_version'], sv['chromeos_version'])
- # should never reach here
- logger.GetLogger().LogFatal('No common version for timestamp')
- return None
-
-
-def Main(argv):
- """Checkout the ChromeOS source."""
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--dir',
- dest='directory',
- help='Target directory for ChromeOS installation.')
- parser.add_argument(
- '--version',
- dest='version',
- default='latest_lkgm',
- help="""ChromeOS version. Can be:
-(1) A release version in the format: 'X.X.X.X'
-(2) 'top' for top of trunk
-(3) 'latest_lkgm' for the latest lkgm version
-(4) 'lkgm' for the lkgm release before timestamp
-(5) 'latest_common' for the latest team common stable version
-(6) 'common' for the team common stable version before timestamp
-Default is 'latest_lkgm'.""")
- parser.add_argument(
- '--timestamp',
- dest='timestamp',
- default=None,
- help='Timestamps in epoch format. It will check out the'
- 'latest LKGM or the latest COMMON version of ChromeOS'
- ' before the timestamp. Use in combination with'
- ' --version=latest or --version=common. Use '
- '"date -d <date string> +%s" to find epoch time')
- parser.add_argument(
- '--minilayout',
- dest='minilayout',
- default=False,
- action='store_true',
- help='Whether to checkout the minilayout (smaller '
- 'checkout).')
- parser.add_argument(
- '--jobs', '-j', dest='jobs', help='Number of repo sync threads to use.')
- parser.add_argument(
- '--public',
- '-p',
- dest='public',
- default=False,
- action='store_true',
- help='Use the public checkout instead of the private '
- 'one.')
-
- options = parser.parse_args(argv)
-
- if not options.version:
- parser.print_help()
- logger.GetLogger().LogFatal('No version specified.')
- else:
- version = options.version.strip()
-
- if not options.timestamp:
- timestamp = ''
- else:
- timestamp = options.timestamp.strip()
- if version not in ('lkgm', 'common'):
- parser.print_help()
- logger.GetLogger().LogFatal('timestamp option only applies for '
- 'versions "lkgm" or "common"')
-
- if not options.directory:
- parser.print_help()
- logger.GetLogger().LogFatal('No directory specified.')
-
- directory = options.directory.strip()
-
- if options.public:
- manifest_repo = 'https://chromium.googlesource.com/chromiumos/manifest.git'
- versions_repo = ('https://chromium.googlesource.com/'
- 'chromiumos/manifest-versions.git')
- else:
- manifest_repo = ('https://chrome-internal.googlesource.com/chromeos/'
- 'manifest-internal.git')
- versions_repo = ('https://chrome-internal.googlesource.com/chromeos/'
- 'manifest-versions.git')
-
- if version == 'top':
- init = 'repo init -u %s' % manifest_repo
- elif version == 'latest_lkgm':
- manifests = manifest_versions.ManifestVersions()
- version = manifests.TimeToVersionChromeOS(time.mktime(time.gmtime()))
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput(
- 'found version %s.%s for latest LKGM' % (version, manifest))
- init = ('repo init -u %s -m paladin/buildspecs/%s/%s.xml' %
- (versions_repo, version, manifest))
- del manifests
- elif version == 'lkgm':
- if not timestamp:
- parser.print_help()
- logger.GetLogger().LogFatal('No timestamp specified for version=lkgm')
- manifests = manifest_versions.ManifestVersions()
- version = manifests.TimeToVersion(timestamp)
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput('found version %s.%s for LKGM at timestamp %s'
- % (version, manifest, timestamp))
- init = ('repo init -u %s -m paladin/buildspecs/%s/%s.xml' %
- (versions_repo, version, manifest))
- del manifests
- elif version == 'latest_common':
- version = TimeToCommonVersion(time.mktime(time.gmtime()))
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput(
- 'found version %s.%s for latest Common image' % (version, manifest))
- init = ('repo init -u %s -m buildspecs/%s/%s.xml' % (versions_repo, version,
- manifest))
- elif version == 'common':
- if not timestamp:
- parser.print_help()
- logger.GetLogger().LogFatal('No timestamp specified for version=lkgm')
- version = TimeToCommonVersion(timestamp)
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput(
- 'found version %s.%s for latest common image '
- 'at timestamp %s' % (version, manifest, timestamp))
- init = ('repo init -u %s -m buildspecs/%s/%s.xml' % (versions_repo, version,
- manifest))
- else:
- # user specified a specific version number
- version_spec_file = GetVersionSpecFile(version, versions_repo)
- if not version_spec_file:
- return 1
- init = 'repo init -u %s -m %s' % (versions_repo, version_spec_file)
-
- if options.minilayout:
- init += ' -g minilayout'
-
- init += ' --repo-url=https://chromium.googlesource.com/external/repo.git'
-
- # crosbug#31837 - "Sources need to be world-readable to properly
- # function inside the chroot"
- sync = 'umask 022 && repo sync'
- if options.jobs:
- sync += ' -j %s' % options.jobs
-
- commands = ['mkdir -p %s' % directory, 'cd %s' % directory, init, sync]
- cmd_executer = command_executer.GetCommandExecuter()
- ret = cmd_executer.RunCommands(commands)
- if ret:
- return ret
-
- return cmd_executer.RunCommand(
- 'git ls-remote '
- 'https://chrome-internal.googlesource.com/chrome/src-internal.git '
- 'refs/HEAD > /dev/null')
-
-
-if __name__ == '__main__':
- retval = Main(sys.argv[1:])
- sys.exit(retval)
diff --git a/unblocked_terms.txt b/unblocked_terms.txt
deleted file mode 100644
index c961e667..00000000
--- a/unblocked_terms.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-# Code uses this in many cases to refer to git branches.
-master
-
-# A few paths in AOSP contain this term.
-native