diff options
Diffstat (limited to 'llvm_tools')
27 files changed, 1064 insertions, 1577 deletions
diff --git a/llvm_tools/atomic_write_file.py b/llvm_tools/atomic_write_file.py new file mode 100644 index 00000000..aa6f112e --- /dev/null +++ b/llvm_tools/atomic_write_file.py @@ -0,0 +1,67 @@ +# Copyright 2023 The ChromiumOS Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Atomic file writing utilities. + +Provides atomic_write(...), which allows atomically replacing the contents +of a file. +""" + +import contextlib +import logging +import os +from pathlib import Path +import tempfile +from typing import Union + + +@contextlib.contextmanager +def atomic_write(fp: Union[Path, str], mode="w", *args, **kwargs): + """Write to a filepath atomically. + + This works by a temp file swap, created with a .tmp suffix in + the same directory briefly until being renamed to the desired + filepath. + + In the event an exception is raised during the write, the + temporary file is deleted and the original filepath is untouched. + + Examples: + >>> with atomic_write("my_file.txt", encoding="utf-8") as f: + >>> f.write("Hello world!") + >>> # my_file.txt is still unmodified + >>> # "f" is closed here, and my_file.txt is written to. + + Args: + fp: Filepath to open. + mode: File mode; can be 'w', 'wb'. Default 'w'. + *args: Passed to Path.open as nargs. + **kwargs: Passed to Path.open as kwargs. + + Raises: + ValueError when the mode is invalid. + """ + if isinstance(fp, str): + fp = Path(fp) + if mode not in ("w", "wb"): + raise ValueError(f"mode {mode} not accepted") + + # We use mkstemp here because we want to handle the closing and + # replacement ourselves. + (fd, tmp_path) = tempfile.mkstemp( + prefix=fp.name, + suffix=".tmp", + dir=fp.parent, + ) + tmp_path = Path(tmp_path) + try: + with os.fdopen(fd, mode=mode, *args, **kwargs) as f: + yield f + except: + try: + tmp_path.unlink() + except Exception as e: + logging.exception("unexpected error removing temporary file %s", e) + raise + tmp_path.replace(fp) diff --git a/llvm_tools/atomic_write_file_unittest.py b/llvm_tools/atomic_write_file_unittest.py new file mode 100644 index 00000000..78115569 --- /dev/null +++ b/llvm_tools/atomic_write_file_unittest.py @@ -0,0 +1,48 @@ +# Copyright 2023 The ChromiumOS Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pathlib import Path +import tempfile +import unittest + +import atomic_write_file + + +class TestAtomicWrite(unittest.TestCase): + """Test atomic_write.""" + + def test_atomic_write(self): + """Test that atomic write safely writes.""" + prior_contents = "This is a test written by patch_utils_unittest.py\n" + new_contents = "I am a test written by patch_utils_unittest.py\n" + with tempfile.TemporaryDirectory( + prefix="patch_utils_unittest" + ) as dirname: + dirpath = Path(dirname) + filepath = dirpath / "test_atomic_write.txt" + with filepath.open("w", encoding="utf-8") as f: + f.write(prior_contents) + + def _t(): + with atomic_write_file.atomic_write( + filepath, encoding="utf-8" + ) as f: + f.write(new_contents) + raise Exception("Expected failure") + + self.assertRaises(Exception, _t) + with filepath.open(encoding="utf-8") as f: + lines = f.readlines() + self.assertEqual(lines[0], prior_contents) + with atomic_write_file.atomic_write( + filepath, encoding="utf-8" + ) as f: + f.write(new_contents) + with filepath.open(encoding="utf-8") as f: + lines = f.readlines() + self.assertEqual(lines[0], new_contents) + + +if __name__ == "__main__": + unittest.main() diff --git a/llvm_tools/auto_llvm_bisection.py b/llvm_tools/auto_llvm_bisection.py index 3640abae..9b4b8559 100755 --- a/llvm_tools/auto_llvm_bisection.py +++ b/llvm_tools/auto_llvm_bisection.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2019 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -49,10 +48,15 @@ class BuilderStatus(enum.Enum): RUNNING = "running" +# Writing a dict with `.value`s spelled out makes `black`'s style conflict with +# `cros lint`'s diagnostics. builder_status_mapping = { - BuilderStatus.PASS.value: update_tryjob_status.TryjobStatus.GOOD.value, - BuilderStatus.FAIL.value: update_tryjob_status.TryjobStatus.BAD.value, - BuilderStatus.RUNNING.value: update_tryjob_status.TryjobStatus.PENDING.value, + a.value: b.value + for a, b in ( + (BuilderStatus.PASS, update_tryjob_status.TryjobStatus.GOOD), + (BuilderStatus.FAIL, update_tryjob_status.TryjobStatus.BAD), + (BuilderStatus.RUNNING, update_tryjob_status.TryjobStatus.PENDING), + ) } @@ -63,8 +67,6 @@ def GetBuildResult(chroot_path, buildbucket_id): try: tryjob_json = subprocess.check_output( [ - "cros_sdk", - "--", "cros", "buildresult", "--buildbucket-id", @@ -98,13 +100,15 @@ def main(): """Bisects LLVM using the result of `cros buildresult` of each tryjob. Raises: - AssertionError: The script was run inside the chroot. + AssertionError: The script was run inside the chroot. """ chroot.VerifyOutsideChroot() args_output = llvm_bisection.GetCommandLineArgs() + chroot.VerifyChromeOSRoot(args_output.chroot_path) + if os.path.isfile(args_output.last_tested): print("Resuming bisection for %s" % args_output.last_tested) else: @@ -114,7 +118,7 @@ def main(): # Update the status of existing tryjobs if os.path.isfile(args_output.last_tested): update_start_time = time.time() - with open(args_output.last_tested) as json_file: + with open(args_output.last_tested, encoding="utf-8") as json_file: json_dict = json.load(json_file) while True: print( @@ -139,15 +143,16 @@ def main(): print("-" * 40) - # Proceed to the next step if all the existing tryjobs have completed. + # Proceed to the next step if all the existing tryjobs have + # completed. if completed: break delta_time = time.time() - update_start_time if delta_time > POLLING_LIMIT_SECS: - # Something is wrong with updating the tryjobs's 'status' via - # `cros buildresult` (e.g. network issue, etc.). + # Something is wrong with updating the tryjobs's 'status' + # via `cros buildresult` (e.g. network issue, etc.). sys.exit("Failed to update pending tryjobs.") print("-" * 40) @@ -157,7 +162,7 @@ def main(): # There should always be update from the tryjobs launched in the # last iteration. temp_filename = "%s.new" % args_output.last_tested - with open(temp_filename, "w") as temp_file: + with open(temp_filename, "w", encoding="utf-8") as temp_file: json.dump( json_dict, temp_file, indent=4, separators=(",", ": ") ) diff --git a/llvm_tools/auto_llvm_bisection_unittest.py b/llvm_tools/auto_llvm_bisection_unittest.py index c70ddee5..30e7dfb3 100755 --- a/llvm_tools/auto_llvm_bisection_unittest.py +++ b/llvm_tools/auto_llvm_bisection_unittest.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2019 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,7 +12,7 @@ import subprocess import time import traceback import unittest -import unittest.mock as mock +from unittest import mock import auto_llvm_bisection import chroot @@ -25,6 +24,7 @@ import update_tryjob_status class AutoLLVMBisectionTest(unittest.TestCase): """Unittests for auto bisection of LLVM.""" + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) @mock.patch.object( llvm_bisection, @@ -53,6 +53,7 @@ class AutoLLVMBisectionTest(unittest.TestCase): mock_sleep, mock_get_args, mock_outside_chroot, + mock_chromeos_root, ): mock_isfile.side_effect = [False, False, True, True] @@ -76,8 +77,8 @@ class AutoLLVMBisectionTest(unittest.TestCase): update_tryjob_status.TryjobStatus.GOOD.value ) - # Verify the excpetion is raised when successfully found the bad revision. - # Uses `sys.exit(0)` to indicate success. + # Verify the excpetion is raised when successfully found the bad + # revision. Uses `sys.exit(0)` to indicate success. with self.assertRaises(SystemExit) as err: auto_llvm_bisection.main() @@ -90,6 +91,7 @@ class AutoLLVMBisectionTest(unittest.TestCase): mock_traceback.assert_called_once() mock_sleep.assert_called_once() + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) @mock.patch.object(time, "sleep") @mock.patch.object(traceback, "print_exc") @@ -108,6 +110,7 @@ class AutoLLVMBisectionTest(unittest.TestCase): mock_traceback, mock_sleep, mock_outside_chroot, + mock_chromeos_root, ): mock_isfile.return_value = False @@ -123,6 +126,7 @@ class AutoLLVMBisectionTest(unittest.TestCase): self.assertEqual(err.exception.code, "Unable to continue bisection.") + mock_chromeos_root.assert_called_once() mock_outside_chroot.assert_called_once() mock_get_args.assert_called_once() self.assertEqual(mock_isfile.call_count, 2) @@ -130,6 +134,7 @@ class AutoLLVMBisectionTest(unittest.TestCase): self.assertEqual(mock_traceback.call_count, 3) self.assertEqual(mock_sleep.call_count, 2) + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) @mock.patch.object( llvm_bisection, @@ -153,6 +158,7 @@ class AutoLLVMBisectionTest(unittest.TestCase): mock_time, mock_get_args, mock_outside_chroot, + mock_chromeos_root, ): # Simulate behavior of `time.time()` for time passed. @@ -210,8 +216,6 @@ class AutoLLVMBisectionTest(unittest.TestCase): mock_chroot_command.assert_called_once_with( [ - "cros_sdk", - "--", "cros", "buildresult", "--buildbucket-id", @@ -234,8 +238,6 @@ class AutoLLVMBisectionTest(unittest.TestCase): auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id) mock_chroot_command.assert_called_once_with( [ - "cros_sdk", - "--", "cros", "buildresult", "--buildbucket-id", @@ -258,8 +260,8 @@ class AutoLLVMBisectionTest(unittest.TestCase): tryjob_contents = {buildbucket_id: {"status": invalid_build_status}} mock_chroot_command.return_value = json.dumps(tryjob_contents) - # Verify the exception is raised when the return value of `cros buildresult` - # is not in the `builder_status_mapping`. + # Verify an exception is raised when the return value of `cros + # buildresult` is not in the `builder_status_mapping`. with self.assertRaises(ValueError) as err: auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id) @@ -271,8 +273,6 @@ class AutoLLVMBisectionTest(unittest.TestCase): mock_chroot_command.assert_called_once_with( [ - "cros_sdk", - "--", "cros", "buildresult", "--buildbucket-id", diff --git a/llvm_tools/bisect_clang_crashes.py b/llvm_tools/bisect_clang_crashes.py deleted file mode 100755 index b2759051..00000000 --- a/llvm_tools/bisect_clang_crashes.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 The ChromiumOS Authors -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Fetches and submits the artifacts from ChromeOS toolchain's crash bucket. -""" - -import argparse -import glob -import json -import logging -import os -import os.path -import shutil -import subprocess -import sys - -import chroot - - -def get_artifacts(pattern): - results = subprocess.check_output( - ["gsutil.py", "ls", pattern], stderr=subprocess.STDOUT, encoding="utf-8" - ) - return sorted(l.strip() for l in results.splitlines()) - - -def get_crash_reproducers(working_dir): - results = [] - for src in [ - f - for f in glob.glob("%s/*.c*" % working_dir) - if f.split(".")[-1] in ["c", "cc", "cpp"] - ]: - script = ".".join(src.split(".")[:-1]) + ".sh" - if not os.path.exists(script): - logging.warning("could not find the matching script of %s", src) - else: - results.append((src, script)) - return results - - -def submit_crash_to_forcey( - forcey: str, temporary_directory: str, buildbucket_id: str, url: str -) -> None: - dest_dir = os.path.join(temporary_directory, buildbucket_id) - dest_file = os.path.join(dest_dir, os.path.basename(url)) - logging.info("Downloading and submitting %r...", url) - subprocess.check_output( - ["gsutil.py", "cp", url, dest_file], stderr=subprocess.STDOUT - ) - subprocess.check_output(["tar", "-xJf", dest_file], cwd=dest_dir) - for src, script in get_crash_reproducers(dest_dir): - subprocess.check_output( - [ - forcey, - "reduce", - "-wait=false", - "-note", - "%s:%s" % (url, src), - "-sh_file", - script, - "-src_file", - src, - ] - ) - - -def main(argv): - chroot.VerifyOutsideChroot() - logging.basicConfig( - format="%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s", - level=logging.INFO, - ) - cur_dir = os.path.dirname(os.path.abspath(__file__)) - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--4c", dest="forcey", required=True, help="Path to a 4c client binary" - ) - parser.add_argument( - "--state_file", - default=os.path.join(cur_dir, "chromeos-state.json"), - help="The path to the state file.", - ) - parser.add_argument( - "--nocleanup", - action="store_false", - dest="cleanup", - help="Keep temporary files created after the script finishes.", - ) - opts = parser.parse_args(argv) - - state_file = os.path.abspath(opts.state_file) - os.makedirs(os.path.dirname(state_file), exist_ok=True) - temporary_directory = "/tmp/bisect_clang_crashes" - os.makedirs(temporary_directory, exist_ok=True) - urls = get_artifacts( - "gs://chromeos-toolchain-artifacts/clang-crash-diagnoses" - "/**/*clang_crash_diagnoses.tar.xz" - ) - logging.info("%d crash URLs found", len(urls)) - - visited = {} - if os.path.exists(state_file): - buildbucket_ids = {url.split("/")[-2] for url in urls} - with open(state_file, encoding="utf-8") as f: - data = json.load(f) - visited = {k: v for k, v in data.items() if k in buildbucket_ids} - logging.info( - "Successfully loaded %d previously-submitted crashes", len(visited) - ) - - try: - for url in urls: - splits = url.split("/") - buildbucket_id = splits[-2] - # Skip the builds that has been processed - if buildbucket_id in visited: - continue - submit_crash_to_forcey( - forcey=opts.forcey, - temporary_directory=temporary_directory, - buildbucket_id=buildbucket_id, - url=url, - ) - visited[buildbucket_id] = url - - exception_in_flight = False - except: - exception_in_flight = True - raise - finally: - if exception_in_flight: - # This is best-effort. If the machine powers off or similar, we'll just - # resubmit the same crashes, which is suboptimal, but otherwise - # acceptable. - logging.error( - "Something went wrong; attempting to save our work..." - ) - else: - logging.info("Persisting state...") - - tmp_state_file = state_file + ".tmp" - with open(tmp_state_file, "w", encoding="utf-8") as f: - json.dump(visited, f, indent=2) - os.rename(tmp_state_file, state_file) - - logging.info("State successfully persisted") - - if opts.cleanup: - shutil.rmtree(temporary_directory) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/llvm_tools/bisect_clang_crashes_unittest.py b/llvm_tools/bisect_clang_crashes_unittest.py deleted file mode 100755 index 22c9be19..00000000 --- a/llvm_tools/bisect_clang_crashes_unittest.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 The ChromiumOS Authors -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Tests for bisect_clang_crashes.""" - -import glob -import logging -import os.path -import subprocess -import unittest -import unittest.mock as mock - -import bisect_clang_crashes - - -class Test(unittest.TestCase): - """Tests for bisect_clang_crashes.""" - - class _SilencingFilter(object): - """Silences all log messages. - - Also collects info about log messages that would've been emitted. - """ - - def __init__(self): - self.messages = [] - - def filter(self, record): - self.messages.append(record.getMessage()) - return 0 - - @mock.patch.object(subprocess, "check_output") - def test_get_artifacts(self, mock_gsutil_ls): - pattern = ( - "gs://chromeos-toolchain-artifacts/clang-crash-diagnoses/" - "**/*clang_crash_diagnoses.tar.xz" - ) - mock_gsutil_ls.return_value = "artifact1\nartifact2\nartifact3" - results = bisect_clang_crashes.get_artifacts(pattern) - self.assertEqual(results, ["artifact1", "artifact2", "artifact3"]) - mock_gsutil_ls.assert_called_once_with( - ["gsutil.py", "ls", pattern], - stderr=subprocess.STDOUT, - encoding="utf-8", - ) - - @mock.patch.object(os.path, "exists") - @mock.patch.object(glob, "glob") - def test_get_crash_reproducers_succeed( - self, mock_file_search, mock_file_check - ): - working_dir = "SomeDirectory" - mock_file_search.return_value = ["a.c", "b.cpp", "c.cc"] - mock_file_check.side_effect = [True, True, True] - results = bisect_clang_crashes.get_crash_reproducers(working_dir) - mock_file_search.assert_called_once_with("%s/*.c*" % working_dir) - self.assertEqual(mock_file_check.call_count, 3) - self.assertEqual(mock_file_check.call_args_list[0], mock.call("a.sh")) - self.assertEqual(mock_file_check.call_args_list[1], mock.call("b.sh")) - self.assertEqual(mock_file_check.call_args_list[2], mock.call("c.sh")) - self.assertEqual( - results, [("a.c", "a.sh"), ("b.cpp", "b.sh"), ("c.cc", "c.sh")] - ) - - @mock.patch.object(os.path, "exists") - @mock.patch.object(glob, "glob") - def test_get_crash_reproducers_no_matching_script( - self, mock_file_search, mock_file_check - ): - def silence_logging(): - root = logging.getLogger() - filt = self._SilencingFilter() - root.addFilter(filt) - self.addCleanup(root.removeFilter, filt) - return filt - - log_filter = silence_logging() - working_dir = "SomeDirectory" - mock_file_search.return_value = ["a.c", "b.cpp", "c.cc"] - mock_file_check.side_effect = [True, False, True] - results = bisect_clang_crashes.get_crash_reproducers(working_dir) - mock_file_search.assert_called_once_with("%s/*.c*" % working_dir) - self.assertEqual(mock_file_check.call_count, 3) - self.assertEqual(mock_file_check.call_args_list[0], mock.call("a.sh")) - self.assertEqual(mock_file_check.call_args_list[1], mock.call("b.sh")) - self.assertEqual(mock_file_check.call_args_list[2], mock.call("c.sh")) - self.assertEqual(results, [("a.c", "a.sh"), ("c.cc", "c.sh")]) - self.assertTrue( - any( - "could not find the matching script of b.cpp" in x - for x in log_filter.messages - ), - log_filter.messages, - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/llvm_tools/chroot.py b/llvm_tools/chroot.py index 46464feb..cb0ebc87 100755 --- a/llvm_tools/chroot.py +++ b/llvm_tools/chroot.py @@ -9,7 +9,9 @@ import collections import os +from pathlib import Path import subprocess +from typing import List CommitContents = collections.namedtuple("CommitContents", ["url", "cl_number"]) @@ -30,6 +32,19 @@ def VerifyOutsideChroot(): assert not InChroot(), "Script should be run outside the chroot." +def VerifyChromeOSRoot(chromeos_root): + """Checks whether the path actually points to ChromiumOS checkout root. + + Raises: + AssertionError: The path is not ChromiumOS checkout root. + """ + + subdir = "src/third_party/chromiumos-overlay" + path = Path(chromeos_root).expanduser() / subdir + msg = f"Wrong ChromeOS path. No {subdir} directory in {chromeos_root} ." + assert path.is_dir(), msg + + def GetChrootEbuildPaths(chromeos_root, packages): """Gets the chroot path(s) of the package(s). @@ -60,7 +75,10 @@ def GetChrootEbuildPaths(chromeos_root, packages): return chroot_paths -def ConvertChrootPathsToAbsolutePaths(chromeos_root, chroot_paths): +def ConvertChrootPathsToAbsolutePaths( + chromeos_root: str, + chroot_paths: List[str], +) -> List[str]: """Converts the chroot path(s) to absolute symlink path(s). Args: @@ -76,11 +94,8 @@ def ConvertChrootPathsToAbsolutePaths(chromeos_root, chroot_paths): """ abs_paths = [] - chroot_prefix = "/mnt/host/source/" - # Iterate through the chroot paths. - # # For each chroot file path, remove '/mnt/host/source/' prefix # and combine the chroot path with the result and add it to the list. for chroot_path in chroot_paths: @@ -88,12 +103,8 @@ def ConvertChrootPathsToAbsolutePaths(chromeos_root, chroot_paths): raise ValueError( "Invalid prefix for the chroot path: %s" % chroot_path ) - rel_path = chroot_path[len(chroot_prefix) :] - # combine the chromeos root path + '/src/...' abs_path = os.path.join(chromeos_root, rel_path) - abs_paths.append(abs_path) - return abs_paths diff --git a/llvm_tools/get_upstream_patch.py b/llvm_tools/get_upstream_patch.py index 72aa16b6..415faaa7 100755 --- a/llvm_tools/get_upstream_patch.py +++ b/llvm_tools/get_upstream_patch.py @@ -59,7 +59,7 @@ def validate_patch_application( if predecessor_apply_results.failed_patches: logging.error("Failed to apply patches from PATCHES.json:") for p in predecessor_apply_results.failed_patches: - logging.error(f"Patch title: {p.title()}") + logging.error("Patch title: %s", p.title()) raise PatchApplicationError("Failed to apply patch from PATCHES.json") patch_entry = patch_utils.PatchEntry.from_dict( @@ -126,7 +126,9 @@ def add_patch( ) with open(patches_json_path, encoding="utf-8") as f: - patches_json = json.load(f) + contents = f.read() + indent_len = patch_utils.predict_indent(contents.splitlines()) + patches_json = json.loads(contents) for p in patches_json: rel_path = p["rel_patch_path"] @@ -182,7 +184,11 @@ def add_patch( temp_file = patches_json_path + ".tmp" with open(temp_file, "w", encoding="utf-8") as f: json.dump( - patches_json, f, indent=4, separators=(",", ": "), sort_keys=True + patches_json, + f, + indent=indent_len, + separators=(",", ": "), + sort_keys=True, ) f.write("\n") os.rename(temp_file, patches_json_path) @@ -327,6 +333,7 @@ def find_patches_and_make_cl( start_rev: git_llvm_rev.Rev, llvm_config: git_llvm_rev.LLVMConfig, llvm_symlink_dir: str, + allow_failures: bool, create_cl: bool, skip_dependencies: bool, reviewers: t.Optional[t.List[str]], @@ -354,6 +361,8 @@ def find_patches_and_make_cl( if create_cl: git.CreateBranch(llvm_symlink_dir, branch) + successes = [] + failures = [] for parsed_patch in converted_patches: # Find out the llvm projects changed in this commit packages = get_package_names(parsed_patch.sha, llvm_config.dir) @@ -369,15 +378,25 @@ def find_patches_and_make_cl( chroot_path, symlinks ) # Create a local patch for all the affected llvm projects - create_patch_for_packages( - packages, - symlinks, - start_rev, - parsed_patch.rev, - parsed_patch.sha, - llvm_config.dir, - platforms=platforms, - ) + try: + create_patch_for_packages( + packages, + symlinks, + start_rev, + parsed_patch.rev, + parsed_patch.sha, + llvm_config.dir, + platforms=platforms, + ) + except PatchApplicationError as e: + if allow_failures: + logging.warning(e) + failures.append(parsed_patch.sha) + continue + else: + raise e + successes.append(parsed_patch.sha) + if create_cl: symlinks_to_uprev.extend(symlinks) @@ -397,7 +416,17 @@ def find_patches_and_make_cl( ["git", "reset", "--hard", "HEAD^"], cwd=llvm_config.dir ) - if create_cl: + if allow_failures: + success_list = (":\n\t" + "\n\t".join(successes)) if successes else "." + logging.info( + "Successfully applied %d patches%s", len(successes), success_list + ) + failure_list = (":\n\t" + "\n\t".join(failures)) if failures else "." + logging.info( + "Failed to apply %d patches%s", len(failures), failure_list + ) + + if successes and create_cl: make_cl( symlinks_to_uprev, llvm_symlink_dir, @@ -478,6 +507,7 @@ def get_from_upstream( start_sha: str, patches: t.List[str], platforms: t.List[str], + allow_failures: bool, skip_dependencies: bool = False, reviewers: t.List[str] = None, cc: t.List[str] = None, @@ -515,7 +545,9 @@ def get_from_upstream( skip_dependencies=skip_dependencies, reviewers=reviewers, cc=cc, + allow_failures=allow_failures, ) + logging.info("Complete.") @@ -565,6 +597,11 @@ def main(): "apply to multiple platforms", ) parser.add_argument( + "--allow_failures", + action="store_true", + help="Skip patches that fail to apply and continue.", + ) + parser.add_argument( "--create_cl", action="store_true", help="Automatically create a CL if specified", @@ -576,6 +613,7 @@ def main(): "when --differential appears exactly once.", ) args = parser.parse_args() + chroot.VerifyChromeOSRoot(args.chroot_path) if not (args.sha or args.differential): parser.error("--sha or --differential required") @@ -588,6 +626,7 @@ def main(): get_from_upstream( chroot_path=args.chroot_path, + allow_failures=args.allow_failures, create_cl=args.create_cl, start_sha=args.start_sha, patches=args.sha + args.differential, diff --git a/llvm_tools/git.py b/llvm_tools/git.py index 0f56aa0d..3bb702c9 100755 --- a/llvm_tools/git.py +++ b/llvm_tools/git.py @@ -65,14 +65,17 @@ def DeleteBranch(repo, branch): if not os.path.isdir(repo): raise ValueError("Invalid directory path provided: %s" % repo) - subprocess.check_output(["git", "-C", repo, "checkout", "cros/main"]) + def run_checked(cmd): + subprocess.run(["git", "-C", repo] + cmd, check=True) - subprocess.check_output(["git", "-C", repo, "reset", "HEAD", "--hard"]) - - subprocess.check_output(["git", "-C", repo, "branch", "-D", branch]) + run_checked(["checkout", "-q", "m/main"]) + run_checked(["reset", "-q", "HEAD", "--hard"]) + run_checked(["branch", "-q", "-D", branch]) -def UploadChanges(repo, branch, commit_messages, reviewers=None, cc=None): +def UploadChanges( + repo, branch, commit_messages, reviewers=None, cc=None, wip=False +): """Uploads the changes in the specifed branch of the given repo for review. Args: @@ -114,6 +117,8 @@ def UploadChanges(repo, branch, commit_messages, reviewers=None, cc=None): if cc: git_args.append(f'--cc={",".join(cc)}') + if wip: + git_args.append("--wip") out = subprocess.check_output( git_args, @@ -123,13 +128,11 @@ def UploadChanges(repo, branch, commit_messages, reviewers=None, cc=None): ) print(out) - + # Matches both internal and external CLs. found_url = re.search( - r"https://chromium-review.googlesource.com/c/" - r"chromiumos/overlays/chromiumos-overlay/\+/([0-9]+)", + r"https?://[\w-]*-review.googlesource.com/c/.*/([0-9]+)", out.rstrip(), ) - if not found_url: raise ValueError("Failed to find change list URL.") diff --git a/llvm_tools/git_llvm_rev.py b/llvm_tools/git_llvm_rev.py index 3dc34fce..1db94461 100755 --- a/llvm_tools/git_llvm_rev.py +++ b/llvm_tools/git_llvm_rev.py @@ -104,7 +104,7 @@ def translate_prebase_sha_to_rev_number( This function assumes that the given SHA is an ancestor of |base_llvm_sha|. """ commit_message = check_output( - ["git", "log", "-n1", "--format=%B", sha], + ["git", "log", "-n1", "--format=%B", sha, "--"], cwd=llvm_config.dir, ) last_line = commit_message.strip().splitlines()[-1] @@ -125,13 +125,13 @@ def translate_sha_to_rev(llvm_config: LLVMConfig, sha_or_ref: str) -> Rev: sha = sha_or_ref else: sha = check_output( - ["git", "rev-parse", sha_or_ref], + ["git", "rev-parse", "--revs-only", sha_or_ref, "--"], cwd=llvm_config.dir, ) sha = sha.strip() merge_base = check_output( - ["git", "merge-base", base_llvm_sha, sha], + ["git", "merge-base", base_llvm_sha, sha, "--"], cwd=llvm_config.dir, ) merge_base = merge_base.strip() @@ -144,6 +144,7 @@ def translate_sha_to_rev(llvm_config: LLVMConfig, sha_or_ref: str) -> Rev: "--count", "--first-parent", f"{base_llvm_sha}..{sha}", + "--", ], cwd=llvm_config.dir, ) @@ -167,6 +168,7 @@ def translate_sha_to_rev(llvm_config: LLVMConfig, sha_or_ref: str) -> Rev: "--count", "--first-parent", f"{merge_base}..{sha}", + "--", ], cwd=llvm_config.dir, ) @@ -263,23 +265,21 @@ def translate_prebase_rev_to_sha(llvm_config: LLVMConfig, rev: Rev) -> str: base_llvm_sha, ] - subp = subprocess.Popen( + with subprocess.Popen( git_command, cwd=llvm_config.dir, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, encoding="utf-8", - ) - - with subp: + ) as subp: for sha, message in parse_git_commit_messages(subp.stdout, separator): last_line = message.splitlines()[-1] if last_line.strip() == looking_for: subp.terminate() return sha + if subp.wait() != 0: + raise subprocess.CalledProcessError(subp.returncode, git_command) - if subp.returncode: - raise subprocess.CalledProcessError(subp.returncode, git_command) raise ValueError(f"No commit with revision {rev} found") @@ -317,7 +317,13 @@ def translate_rev_to_sha(llvm_config: LLVMConfig, rev: Rev) -> str: # about rev walking/counting locally compared to long |log|s, so we walk back # twice. head = check_output( - ["git", "rev-parse", f"{llvm_config.remote}/{branch}"], + [ + "git", + "rev-parse", + "--revs-only", + f"{llvm_config.remote}/{branch}", + "--", + ], cwd=llvm_config.dir, ) branch_head_sha = head.strip() @@ -330,6 +336,7 @@ def translate_rev_to_sha(llvm_config: LLVMConfig, rev: Rev) -> str: "--count", "--first-parent", f"{base_sha}..{branch_head_sha}", + "--", ], cwd=llvm_config.dir, ) diff --git a/llvm_tools/git_unittest.py b/llvm_tools/git_unittest.py index ce21e6c9..35ba2430 100755 --- a/llvm_tools/git_unittest.py +++ b/llvm_tools/git_unittest.py @@ -68,7 +68,7 @@ class HelperFunctionsTest(unittest.TestCase): mock_isdir.assert_called_once() @mock.patch.object(os.path, "isdir", return_value=True) - @mock.patch.object(subprocess, "check_output", return_value=None) + @mock.patch.object(subprocess, "run", return_value=None) def testSuccessfullyDeletedBranch(self, mock_command_output, mock_isdir): path_to_repo = "/valid/path/to/repo" branch = "branch-name" diff --git a/llvm_tools/llvm_bisection.py b/llvm_tools/llvm_bisection.py index 0b851ebe..6cc93aec 100755 --- a/llvm_tools/llvm_bisection.py +++ b/llvm_tools/llvm_bisection.py @@ -289,7 +289,6 @@ def Bisect( last_tested, update_packages, chroot_path, - patch_metadata_file, extra_change_lists, options, builder, @@ -304,7 +303,6 @@ def Bisect( git_hash, svn_revision, chroot_path, - patch_metadata_file, extra_change_lists, options, builder, @@ -346,7 +344,7 @@ def main(args_output): """ chroot.VerifyOutsideChroot() - patch_metadata_file = "PATCHES.json" + chroot.VerifyChromeOSRoot(args_output.chroot_path) start = args_output.start_rev end = args_output.end_rev @@ -453,7 +451,6 @@ def main(args_output): args_output.last_tested, update_chromeos_llvm_hash.DEFAULT_PACKAGES, args_output.chroot_path, - patch_metadata_file, args_output.extra_change_lists, args_output.options, args_output.builder, diff --git a/llvm_tools/llvm_bisection_unittest.py b/llvm_tools/llvm_bisection_unittest.py index 1e86a678..a2bcc90b 100755 --- a/llvm_tools/llvm_bisection_unittest.py +++ b/llvm_tools/llvm_bisection_unittest.py @@ -209,7 +209,6 @@ class LLVMBisectionTest(unittest.TestCase): _git_hash, _revision, _chroot_path, - _patch_file, _extra_cls, _options, _builder, @@ -238,7 +237,6 @@ class LLVMBisectionTest(unittest.TestCase): args_output = test_helpers.ArgsOutputTest() packages = ["sys-devel/llvm"] - patch_file = "/abs/path/to/PATCHES.json" # Create a temporary .JSON file to simulate a status file for bisection. with test_helpers.CreateTemporaryJsonFile() as temp_json_file: @@ -255,7 +253,6 @@ class LLVMBisectionTest(unittest.TestCase): temp_json_file, packages, args_output.chroot_path, - patch_file, args_output.extra_change_lists, args_output.options, args_output.builders, @@ -289,10 +286,12 @@ class LLVMBisectionTest(unittest.TestCase): @mock.patch.object(llvm_bisection, "GetCommitsBetween") @mock.patch.object(llvm_bisection, "GetRemainingRange") @mock.patch.object(llvm_bisection, "LoadStatusFile") + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) def testMainPassed( self, mock_outside_chroot, + mock_chromeos_root, mock_load_status_file, mock_get_range, mock_get_revision_and_hash_list, @@ -337,6 +336,8 @@ class LLVMBisectionTest(unittest.TestCase): llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value, ) + mock_chromeos_root.assert_called_once() + mock_outside_chroot.assert_called_once() mock_load_status_file.assert_called_once() @@ -362,9 +363,10 @@ class LLVMBisectionTest(unittest.TestCase): ) @mock.patch.object(llvm_bisection, "LoadStatusFile") + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) def testMainFailedWithInvalidRange( - self, mock_outside_chroot, mock_load_status_file + self, mock_chromeos_root, mock_outside_chroot, mock_load_status_file ): start = 500 @@ -394,6 +396,8 @@ class LLVMBisectionTest(unittest.TestCase): self.assertEqual(str(err.exception), error_message) + mock_chromeos_root.assert_called_once() + mock_outside_chroot.assert_called_once() mock_load_status_file.assert_called_once() @@ -401,9 +405,11 @@ class LLVMBisectionTest(unittest.TestCase): @mock.patch.object(llvm_bisection, "GetCommitsBetween") @mock.patch.object(llvm_bisection, "GetRemainingRange") @mock.patch.object(llvm_bisection, "LoadStatusFile") + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) def testMainFailedWithPendingBuilds( self, + mock_chromeos_root, mock_outside_chroot, mock_load_status_file, mock_get_range, @@ -451,6 +457,8 @@ class LLVMBisectionTest(unittest.TestCase): self.assertEqual(str(err.exception), error_message) + mock_chromeos_root.assert_called_once() + mock_outside_chroot.assert_called_once() mock_load_status_file.assert_called_once() @@ -462,10 +470,12 @@ class LLVMBisectionTest(unittest.TestCase): @mock.patch.object(llvm_bisection, "GetCommitsBetween") @mock.patch.object(llvm_bisection, "GetRemainingRange") @mock.patch.object(llvm_bisection, "LoadStatusFile") + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) def testMainFailedWithDuplicateBuilds( self, mock_outside_chroot, + mock_chromeos_root, mock_load_status_file, mock_get_range, mock_get_revision_and_hash_list, @@ -508,6 +518,8 @@ class LLVMBisectionTest(unittest.TestCase): error_message = 'Revision %d exists already in "jobs"' % rev self.assertEqual(str(err.exception), error_message) + mock_chromeos_root.assert_called_once() + mock_outside_chroot.assert_called_once() mock_load_status_file.assert_called_once() @@ -523,10 +535,12 @@ class LLVMBisectionTest(unittest.TestCase): @mock.patch.object(llvm_bisection, "GetCommitsBetween") @mock.patch.object(llvm_bisection, "GetRemainingRange") @mock.patch.object(llvm_bisection, "LoadStatusFile") + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) def testMainFailedToAbandonCL( self, mock_outside_chroot, + mock_chromeos_root, mock_load_status_file, mock_get_range, mock_get_revision_and_hash_list, @@ -574,6 +588,8 @@ class LLVMBisectionTest(unittest.TestCase): self.assertEqual(err.exception.output, error_message) + mock_chromeos_root.assert_called_once() + mock_outside_chroot.assert_called_once() mock_load_status_file.assert_called_once() diff --git a/llvm_tools/llvm_local_bisection.sh b/llvm_tools/llvm_local_bisection.sh index e319080c..0dde49d7 100755 --- a/llvm_tools/llvm_local_bisection.sh +++ b/llvm_tools/llvm_local_bisection.sh @@ -92,7 +92,7 @@ build_llvm () { build_pkg () { local pkg="${1}" - local logfile="/tmp/build-${pkg}.${CURRENT}.out" + local logfile="/tmp/build-${pkg//\//_}.${CURRENT}.out" log "Writing logs to ${logfile}" log "sudo emerge ${pkg}" logdo sudo emerge "${pkg}" \ diff --git a/llvm_tools/manifest_utils.py b/llvm_tools/manifest_utils.py new file mode 100644 index 00000000..67eae4f3 --- /dev/null +++ b/llvm_tools/manifest_utils.py @@ -0,0 +1,103 @@ +# Copyright 2023 The ChromiumOS Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provides utilities to read and edit the ChromiumOS Manifest entries. + +While this code reads and edits the internal manifest, it should only operate +on toolchain projects (llvm-project, etc.) which are public. +""" + +from pathlib import Path +import shutil +import subprocess +from xml.etree import ElementTree + +import atomic_write_file + + +LLVM_PROJECT_PATH = "src/third_party/llvm-project" + + +class FormattingError(Exception): + """Error occurred when formatting the manifest.""" + + pass + + +class UpdateManifestError(Exception): + """Error occurred when updating the manifest.""" + + pass + + +def make_xmlparser(): + """Return a new xmlparser with custom TreeBuilder.""" + return ElementTree.XMLParser( + target=ElementTree.TreeBuilder(insert_comments=True) + ) + + +def update_chromeos_manifest(revision: str, src_tree: Path) -> Path: + """Replaces the manifest project revision with 'revision'. + + Notably, this function reformats the manifest file to preserve + the formatting as specified by 'cros format'. + + Args: + revision: Revision (git sha) to use in the manifest. + src_tree: Path to the root of the source tree checkout. + + Returns: + The manifest path. + + Post: + The llvm-project revision info in the chromeos repo manifest + is updated with 'revision'. + + Raises: + UpdateManifestError: The manifest could not be changed. + FormattingError: The manifest could not be reformatted. + """ + manifest_path = get_chromeos_manifest_path(src_tree) + parser = make_xmlparser() + xmltree = ElementTree.parse(manifest_path, parser) + update_chromeos_manifest_tree(revision, xmltree.getroot()) + with atomic_write_file.atomic_write(manifest_path, mode="wb") as f: + xmltree.write(f, encoding="UTF-8") + format_manifest(manifest_path) + return manifest_path + + +def get_chromeos_manifest_path(src_tree: Path) -> Path: + """Return the path to the toolchain manifest.""" + return src_tree / "manifest-internal" / "_toolchain.xml" + + +def update_chromeos_manifest_tree(revision: str, xmlroot: ElementTree.Element): + """Update the revision info for LLVM for a manifest XML root.""" + + # This exists mostly for testing. + def is_llvm_project(child): + return ( + child.tag == "project" and child.attrib["path"] == LLVM_PROJECT_PATH + ) + + finder = (child for child in xmlroot if is_llvm_project(child)) + llvm_project_elem = next(finder, None) + # Element objects can be falsy, so we need to explicitly check None. + if llvm_project_elem is not None: + # Update the llvm revision git sha + llvm_project_elem.attrib["revision"] = revision + else: + raise UpdateManifestError("xmltree did not have llvm-project") + + +def format_manifest(repo_manifest: Path): + """Use cros format to format the given manifest.""" + if not shutil.which("cros"): + raise FormattingError( + "unable to format manifest, 'cros'" " executable not in PATH" + ) + cmd = ["cros", "format", repo_manifest] + subprocess.run(cmd, check=True) diff --git a/llvm_tools/manifest_utils_unittest.py b/llvm_tools/manifest_utils_unittest.py new file mode 100644 index 00000000..9a21d990 --- /dev/null +++ b/llvm_tools/manifest_utils_unittest.py @@ -0,0 +1,84 @@ +# Copyright 2023 The ChromiumOS Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import io +from pathlib import Path +import re +import unittest +from xml.etree import ElementTree + +import manifest_utils + + +"""Provides utilities to read and edit the ChromiumOS Manifest entries. + +While this code reads and edits the internal manifest, it should only operate +on toolchain projects (llvm-project, etc.) which are public. +""" + +MANIFEST_FIXTURE = """<?xml version="1.0" encoding="UTF-8"?> +<manifest> + <!-- Comment that should not be removed. + Multiple lines. --> + <include name="_remotes.xml" /> + <default revision="refs/heads/main" + remote="cros" + sync-j="8" /> + + <include name="_kernel_upstream.xml" /> + + <!-- Common projects for developing CrOS. --> + <project path="src/repohooks" + name="chromiumos/repohooks" + groups="minilayout,paygen,firmware,buildtools,labtools,crosvm" /> + <repo-hooks in-project="chromiumos/repohooks" + enabled-list="pre-upload" /> + <project path="chromite" + name="chromiumos/chromite" + groups="minilayout,paygen,firmware,buildtools,chromeos-admin"> + <copyfile src="AUTHORS" dest="AUTHORS" /> + <copyfile src="LICENSE" dest="LICENSE" /> + </project> + <project path="src/third_party/llvm-project" + name="external/github.com/llvm/llvm-project" + groups="notdefault,bazel" + revision="abcd" /> + <project path="chromite/third_party/pyelftools" + name="chromiumos/third_party/pyelftools" + revision="refs/heads/chromeos-0.22" + groups="minilayout,paygen,firmware,buildtools" /> +</manifest> +""" + + +class TestManifestUtils(unittest.TestCase): + """Test manifest_utils.""" + + def test_update_chromeos_manifest(self): + root = ElementTree.fromstring( + MANIFEST_FIXTURE, + parser=manifest_utils.make_xmlparser(), + ) + manifest_utils.update_chromeos_manifest_tree("wxyz", root) + string_root1 = ElementTree.tostring(root) + self.assertRegex( + str(string_root1, encoding="utf-8"), + r'revision="wxyz"', + ) + self.assertRegex( + str(string_root1, encoding="utf-8"), + r"<!-- Comment that should not be removed.", + ) + self.assertNotRegex( + str(string_root1, encoding="utf-8"), + r'revision="abcd"', + ) + # Check idempotence. + manifest_utils.update_chromeos_manifest_tree("wxyz", root) + string_root2 = ElementTree.tostring(root) + self.assertEqual(string_root1, string_root2) + + +if __name__ == "__main__": + unittest.main() diff --git a/llvm_tools/modify_a_tryjob.py b/llvm_tools/modify_a_tryjob.py index 03de606d..2ecca800 100755 --- a/llvm_tools/modify_a_tryjob.py +++ b/llvm_tools/modify_a_tryjob.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2019 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,6 +10,7 @@ import argparse import enum import json import os +from pathlib import Path import sys import chroot @@ -45,11 +45,12 @@ def GetCommandLineArgs(): parser.add_argument( "--status_file", required=True, - help="The absolute path to the JSON file that contains the tryjobs used " - "for bisecting LLVM.", + help="The absolute path to the JSON file that contains the tryjobs " + "used for bisecting LLVM.", ) - # Add argument that determines what action to take on the revision specified. + # Add argument that determines what action to take on the revision + # specified. parser.add_argument( "--modify_tryjob", required=True, @@ -66,7 +67,8 @@ def GetCommandLineArgs(): help="The revision to either remove or relaunch.", ) - # Add argument for other change lists that want to run alongside the tryjob. + # Add argument for other change lists that want to run alongside the + # tryjob. parser.add_argument( "--extra_change_lists", type=int, @@ -134,7 +136,6 @@ def GetCLAfterUpdatingPackages( git_hash, svn_version, chroot_path, - patch_metadata_file, svn_option, ): """Updates the packages' LLVM_NEXT.""" @@ -145,7 +146,7 @@ def GetCLAfterUpdatingPackages( llvm_variant=update_chromeos_llvm_hash.LLVMVariant.next, git_hash=git_hash, svn_version=svn_version, - chroot_path=chroot_path, + chroot_path=Path(chroot_path), mode=failure_modes.FailureModes.DISABLE_PATCHES, git_hash_source=svn_option, extra_commit_msg=None, @@ -196,7 +197,6 @@ def AddTryjob( git_hash, revision, chroot_path, - patch_metadata_file, extra_cls, options, builder, @@ -212,7 +212,6 @@ def AddTryjob( git_hash, revision, chroot_path, - patch_metadata_file, svn_option, ) @@ -242,16 +241,17 @@ def PerformTryjobModification( """Removes, relaunches, or adds a tryjob. Args: - revision: The revision associated with the tryjob. - modify_tryjob: What action to take on the tryjob. - Ex: ModifyTryjob.REMOVE, ModifyTryjob.RELAUNCH, ModifyTryjob.ADD - status_file: The .JSON file that contains the tryjobs. - extra_cls: Extra change lists to be run alongside tryjob - options: Extra options to pass into 'cros tryjob'. - builder: The builder to use for 'cros tryjob'. - chroot_path: The absolute path to the chroot (used by 'cros tryjob' when - relaunching a tryjob). - verbose: Determines whether to print the contents of a command to `stdout`. + revision: The revision associated with the tryjob. + modify_tryjob: What action to take on the tryjob. + Ex: ModifyTryjob.REMOVE, ModifyTryjob.RELAUNCH, ModifyTryjob.ADD + status_file: The .JSON file that contains the tryjobs. + extra_cls: Extra change lists to be run alongside tryjob + options: Extra options to pass into 'cros tryjob'. + builder: The builder to use for 'cros tryjob'. + chroot_path: The absolute path to the chroot (used by 'cros tryjob' + when relaunching a tryjob). + verbose: Determines whether to print the contents of a command to + `stdout`. """ # Format of 'bisect_contents': @@ -265,7 +265,7 @@ def PerformTryjobModification( # {[TRYJOB_INFORMATION]} # ] # } - with open(status_file) as tryjobs: + with open(status_file, encoding="utf-8") as tryjobs: bisect_contents = json.load(tryjobs) if not bisect_contents["jobs"] and modify_tryjob != ModifyTryjob.ADD: @@ -318,12 +318,10 @@ def PerformTryjobModification( % (tryjob_index, status_file) ) - # Make sure the revision is within the bounds of the start and end of the - # bisection. + # Make sure the revision is within the bounds of the start and end of + # the bisection. elif bisect_contents["start"] < revision < bisect_contents["end"]: - patch_metadata_file = "PATCHES.json" - ( git_hash, revision, @@ -334,7 +332,6 @@ def PerformTryjobModification( git_hash, revision, chroot_path, - patch_metadata_file, extra_cls, options, builder, @@ -352,7 +349,7 @@ def PerformTryjobModification( 'Invalid "modify_tryjob" option provided: %s' % modify_tryjob ) - with open(status_file, "w") as update_tryjobs: + with open(status_file, "w", encoding="utf-8") as update_tryjobs: json.dump( bisect_contents, update_tryjobs, indent=4, separators=(",", ": ") ) @@ -365,6 +362,8 @@ def main(): args_output = GetCommandLineArgs() + chroot.VerifyChromeOSRoot(args_output.chroot_path) + PerformTryjobModification( args_output.revision, ModifyTryjob(args_output.modify_tryjob), diff --git a/llvm_tools/patch_manager_unittest.py b/llvm_tools/patch_manager_unittest.py index 42697d91..91573a82 100755 --- a/llvm_tools/patch_manager_unittest.py +++ b/llvm_tools/patch_manager_unittest.py @@ -12,6 +12,7 @@ from typing import Callable import unittest from unittest import mock +import atomic_write_file import patch_manager import patch_utils @@ -103,7 +104,9 @@ class PatchManagerTest(unittest.TestCase): ), ] patches_path = dirpath / "PATCHES.json" - with patch_utils.atomic_write(patches_path, encoding="utf-8") as f: + with atomic_write_file.atomic_write( + patches_path, encoding="utf-8" + ) as f: json.dump([pe.to_dict() for pe in patch_entries], f) def _harness1( diff --git a/llvm_tools/patch_sync/src/patch_parsing.rs b/llvm_tools/patch_sync/src/patch_parsing.rs index 00153834..7f545e5b 100644 --- a/llvm_tools/patch_sync/src/patch_parsing.rs +++ b/llvm_tools/patch_sync/src/patch_parsing.rs @@ -3,11 +3,11 @@ // found in the LICENSE file. use std::collections::{BTreeMap, BTreeSet}; -use std::fs::{copy, File}; +use std::fs::{copy, read_to_string, File}; use std::io::{BufRead, BufReader, Read, Write}; use std::path::{Path, PathBuf}; -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, bail, Context, Result}; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; @@ -45,18 +45,22 @@ impl PatchDictSchema { pub struct PatchCollection { pub patches: Vec<PatchDictSchema>, pub workdir: PathBuf, + pub indent_len: usize, } impl PatchCollection { /// Create a `PatchCollection` from a PATCHES. pub fn parse_from_file(json_file: &Path) -> Result<Self> { - Ok(Self { - patches: serde_json::from_reader(File::open(json_file)?)?, - workdir: json_file + // We can't just use a file reader because we + // need to know what the original indent_len is. + let contents = read_to_string(json_file)?; + Self::parse_from_str( + json_file .parent() .ok_or_else(|| anyhow!("failed to get json_file parent"))? .to_path_buf(), - }) + &contents, + ) } /// Create a `PatchCollection` from a string literal and a workdir. @@ -64,6 +68,7 @@ impl PatchCollection { Ok(Self { patches: serde_json::from_str(contents).context("parsing from str")?, workdir, + indent_len: predict_indent(contents), }) } @@ -72,6 +77,7 @@ impl PatchCollection { Self { patches: self.patches.iter().cloned().filter(f).collect(), workdir: self.workdir.clone(), + indent_len: self.indent_len, } } @@ -80,6 +86,7 @@ impl PatchCollection { Self { patches: self.patches.iter().map(f).collect(), workdir: self.workdir.clone(), + indent_len: self.indent_len, } } @@ -89,7 +96,7 @@ impl PatchCollection { } /// Compute the set-set subtraction, returning a new `PatchCollection` which - /// keeps the minuend's workdir. + /// keeps the minuend's workdir and indent level. pub fn subtract(&self, subtrahend: &Self) -> Result<Self> { let mut new_patches = Vec::new(); // This is O(n^2) when it could be much faster, but n is always going to be less @@ -111,6 +118,7 @@ impl PatchCollection { Ok(Self { patches: new_patches, workdir: self.workdir.clone(), + indent_len: self.indent_len, }) } @@ -173,6 +181,7 @@ impl PatchCollection { .collect(); Self { workdir: self.workdir.clone(), + indent_len: self.indent_len, patches: cloned_patches, } } @@ -236,6 +245,7 @@ impl PatchCollection { Ok(Self { workdir: self.workdir.clone(), + indent_len: self.indent_len, patches: combined_patches, }) } @@ -262,11 +272,12 @@ impl PatchCollection { } pub fn serialize_patches(&self) -> Result<String> { + let indent_str = " ".repeat(self.indent_len); let mut serialization_buffer = Vec::<u8>::new(); // Four spaces to indent json serialization. let mut serializer = serde_json::Serializer::with_formatter( &mut serialization_buffer, - serde_json::ser::PrettyFormatter::with_indent(b" "), + serde_json::ser::PrettyFormatter::with_indent(indent_str.as_bytes()), ); self.patches .serialize(&mut serializer) @@ -329,8 +340,7 @@ pub fn new_patches( // Set up the current patch collection. let cur_collection = PatchCollection::parse_from_file(patches_path) .with_context(|| format!("parsing {} PATCHES.json", platform))?; - let cur_collection = filter_patches_by_platform(&cur_collection, platform); - let cur_collection = cur_collection.filter_patches(|p| cur_collection.patch_exists(p)); + validate_patches(&cur_collection, platform)?; // Set up the old patch collection. let old_collection = PatchCollection::parse_from_str( @@ -367,6 +377,25 @@ pub fn filter_patches_by_platform(collection: &PatchCollection, platform: &str) }) } +/// Verify the patches all exist and apply to the given platform. +/// +/// If all good, return Unit. Otherwise, return an Err. +pub fn validate_patches(collection: &PatchCollection, platform: &str) -> Result<()> { + for p in &collection.patches { + if !collection.patch_exists(p) { + bail!("Patch {} does not exist", p.rel_patch_path); + } + if !p.platforms.is_empty() && !p.platforms.contains(platform) { + bail!( + "Patch {} did not apply to platform {}", + p.rel_patch_path, + platform + ); + } + } + Ok(()) +} + /// Get the hash from the patch file contents. /// /// Not every patch file actually contains its own hash, @@ -413,6 +442,24 @@ fn copy_create_parents(from: &Path, to: &Path) -> Result<()> { Ok(()) } +/// Given a json string, predict and return the maximum indentation unit the json string uses. +pub fn predict_indent(json: &str) -> usize { + let indents = json + .split('\n') + .map(|line| line.len() - line.trim_start_matches(' ').len()) + .collect::<Vec<usize>>(); + if indents.iter().all(|x| x % 4 == 0) { + return 4; + } + if indents.iter().all(|x| x % 2 == 0) { + return 2; + } + if indents.iter().all(|x| *x == 0) { + return 0; + } + 1 +} + #[cfg(test)] mod test { @@ -444,6 +491,40 @@ mod test { } #[test] + fn test_keep_indent2() { + let example_json = "\ +[ + { + \"rel_patch_path\": \"some_patch.\", + \"metadata\": null, + \"platforms\": [] + } +] +"; + let collection1 = PatchCollection::parse_from_str(PathBuf::new(), example_json) + .expect("could not parse example_json"); + assert_eq!(collection1.indent_len, 2); + let collection2 = PatchCollection::parse_from_str( + PathBuf::new(), + &collection1 + .serialize_patches() + .expect("expected to serialize patches"), + ) + .expect("could not parse from serialization"); + assert_eq!(collection2.indent_len, 2); + let mut collection3 = collection1; + collection3.indent_len = 4; + let collection4 = PatchCollection::parse_from_str( + PathBuf::new(), + &collection3 + .serialize_patches() + .expect("expected to serialize patches"), + ) + .expect("could not parse from serialization"); + assert_eq!(collection4.indent_len, 4) + } + + #[test] fn test_union() { let patch1 = PatchDictSchema { rel_patch_path: "a".into(), @@ -466,10 +547,12 @@ mod test { let collection1 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch1, patch2], + indent_len: 0, }; let collection2 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch3], + indent_len: 0, }; let union = collection1 .union_helper( @@ -503,10 +586,12 @@ mod test { let collection1 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch1.clone()], + indent_len: 4, }; let collection2 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch1], + indent_len: 4, }; let union = collection1 .union_helper( @@ -577,14 +662,17 @@ mod test { let collection1 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch1, patch2.clone()], + indent_len: 0, }; let collection2 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch1_updated, patch2.clone()], + indent_len: 0, }; let collection3 = PatchCollection { workdir: PathBuf::new(), patches: vec![patch2], + indent_len: 0, }; vec![collection1, collection2, collection3] } diff --git a/llvm_tools/patch_utils.py b/llvm_tools/patch_utils.py index affb3d0d..0c8ad19c 100644 --- a/llvm_tools/patch_utils.py +++ b/llvm_tools/patch_utils.py @@ -12,7 +12,9 @@ from pathlib import Path import re import subprocess import sys -from typing import Any, Dict, IO, Iterable, List, Optional, Tuple, Union +from typing import Any, Dict, IO, Iterable, List, Optional, Tuple + +import atomic_write_file CHECKED_FILE_RE = re.compile(r"^checking file\s+(.*)$") @@ -22,38 +24,6 @@ HUNK_END_RE = re.compile(r"^--\s*$") PATCH_SUBFILE_HEADER_RE = re.compile(r"^\+\+\+ [ab]/(.*)$") -@contextlib.contextmanager -def atomic_write(fp: Union[Path, str], mode="w", *args, **kwargs): - """Write to a filepath atomically. - - This works by a temp file swap, created with a .tmp suffix in - the same directory briefly until being renamed to the desired - filepath. - - Args: - fp: Filepath to open. - mode: File mode; can be 'w', 'wb'. Default 'w'. - *args: Passed to Path.open as nargs. - **kwargs: Passed to Path.open as kwargs. - - Raises: - ValueError when the mode is invalid. - """ - if isinstance(fp, str): - fp = Path(fp) - if mode not in ("w", "wb"): - raise ValueError(f"mode {mode} not accepted") - temp_fp = fp.with_suffix(fp.suffix + ".tmp") - try: - with temp_fp.open(mode, *args, **kwargs) as f: - yield f - except: - if temp_fp.is_file(): - temp_fp.unlink() - raise - temp_fp.rename(fp) - - @dataclasses.dataclass class Hunk: """Represents a patch Hunk.""" @@ -260,6 +230,7 @@ class PatchEntry: "-d", root_dir.absolute(), "-f", + "-E", "-p1", "--no-backup-if-mismatch", "-i", @@ -324,6 +295,16 @@ def json_to_patch_entries(workdir: Path, json_fd: IO[str]) -> List[PatchEntry]: return [PatchEntry.from_dict(workdir, d) for d in json.load(json_fd)] +def json_str_to_patch_entries(workdir: Path, json_str: str) -> List[PatchEntry]: + """Convert a json IO object to List[PatchEntry]. + + Examples: + >>> f = open('PATCHES.json').read() + >>> patch_entries = json_str_to_patch_entries(Path(), f) + """ + return [PatchEntry.from_dict(workdir, d) for d in json.loads(json_str)] + + def _print_failed_patch(pe: PatchEntry, failed_hunks: Dict[str, List[Hunk]]): """Print information about a single failing PatchEntry. @@ -462,13 +443,27 @@ def git_clean_context(git_root_dir: Path): clean_src_tree(git_root_dir) -def _write_json_changes(patches: List[Dict[str, Any]], file_io: IO[str]): +def _write_json_changes( + patches: List[Dict[str, Any]], file_io: IO[str], indent_len=2 +): """Write JSON changes to file, does not acquire new file lock.""" - json.dump(patches, file_io, indent=4, separators=(",", ": ")) + json.dump(patches, file_io, indent=indent_len, separators=(",", ": ")) # Need to add a newline as json.dump omits it. file_io.write("\n") +def predict_indent(patches_lines: List[str]) -> int: + """Given file lines, predict and return the max indentation unit.""" + indents = [len(x) - len(x.lstrip(" ")) for x in patches_lines] + if all(x % 4 == 0 for x in indents): + return 4 + if all(x % 2 == 0 for x in indents): + return 2 + if all(x == 0 for x in indents): + return 0 + return 1 + + def update_version_ranges( svn_version: int, llvm_src_dir: Path, patches_json_fp: Path ) -> PatchInfo: @@ -488,15 +483,19 @@ def update_version_ranges( PatchInfo for applied and disabled patches. """ with patches_json_fp.open(encoding="utf-8") as f: - patch_entries = json_to_patch_entries( - patches_json_fp.parent, - f, - ) + contents = f.read() + indent_len = predict_indent(contents.splitlines()) + patch_entries = json_str_to_patch_entries( + patches_json_fp.parent, + contents, + ) modified_entries, applied_patches = update_version_ranges_with_entries( svn_version, llvm_src_dir, patch_entries ) - with atomic_write(patches_json_fp, encoding="utf-8") as f: - _write_json_changes([p.to_dict() for p in patch_entries], f) + with atomic_write_file.atomic_write(patches_json_fp, encoding="utf-8") as f: + _write_json_changes( + [p.to_dict() for p in patch_entries], f, indent_len=indent_len + ) for entry in modified_entries: print( f"Stopped applying {entry.rel_patch_path} ({entry.title()}) " @@ -532,7 +531,9 @@ def update_version_ranges_with_entries( """ modified_entries: List[PatchEntry] = [] applied_patches: List[PatchEntry] = [] - active_patches = (pe for pe in patch_entries if not pe.is_old(svn_version)) + active_patches = ( + pe for pe in patch_entries if pe.can_patch_version(svn_version) + ) with git_clean_context(llvm_src_dir): for pe in active_patches: test_result = pe.test_apply(llvm_src_dir) @@ -570,14 +571,16 @@ def remove_old_patches( PatchInfo for modified patches. """ with patches_json_fp.open(encoding="utf-8") as f: - patches_list = json.load(f) - patch_entries = ( - PatchEntry.from_dict(llvm_src_dir, elem) for elem in patches_list + contents = f.read() + indent_len = predict_indent(contents.splitlines()) + patch_entries = json_str_to_patch_entries( + llvm_src_dir, + contents, ) oldness = [(entry, entry.is_old(svn_version)) for entry in patch_entries] filtered_entries = [entry.to_dict() for entry, old in oldness if not old] - with atomic_write(patches_json_fp, encoding="utf-8") as f: - _write_json_changes(filtered_entries, f) + with atomic_write_file.atomic_write(patches_json_fp, encoding="utf-8") as f: + _write_json_changes(filtered_entries, f, indent_len=indent_len) removed_entries = [entry for entry, old in oldness if old] plural_patches = "patch" if len(removed_entries) == 1 else "patches" print(f"Removed {len(removed_entries)} old {plural_patches}:") diff --git a/llvm_tools/patch_utils_unittest.py b/llvm_tools/patch_utils_unittest.py index b8c21390..dfee55e3 100755 --- a/llvm_tools/patch_utils_unittest.py +++ b/llvm_tools/patch_utils_unittest.py @@ -20,32 +20,23 @@ import patch_utils as pu class TestPatchUtils(unittest.TestCase): """Test the patch_utils.""" - def test_atomic_write(self): - """Test that atomic write safely writes.""" - prior_contents = "This is a test written by patch_utils_unittest.py\n" - new_contents = "I am a test written by patch_utils_unittest.py\n" - with tempfile.TemporaryDirectory( - prefix="patch_utils_unittest" - ) as dirname: - dirpath = Path(dirname) - filepath = dirpath / "test_atomic_write.txt" - with filepath.open("w", encoding="utf-8") as f: - f.write(prior_contents) - - def _t(): - with pu.atomic_write(filepath, encoding="utf-8") as f: - f.write(new_contents) - raise Exception("Expected failure") - - self.assertRaises(Exception, _t) - with filepath.open(encoding="utf-8") as f: - lines = f.readlines() - self.assertEqual(lines[0], prior_contents) - with pu.atomic_write(filepath, encoding="utf-8") as f: - f.write(new_contents) - with filepath.open(encoding="utf-8") as f: - lines = f.readlines() - self.assertEqual(lines[0], new_contents) + def test_predict_indent(self): + test_str1 = """ +a + a + a + a +a +""" + self.assertEqual(pu.predict_indent(test_str1.splitlines()), 2) + test_str2 = """ +a + a + a + a +a +""" + self.assertEqual(pu.predict_indent(test_str2.splitlines()), 4) def test_from_to_dict(self): """Test to and from dict conversion.""" @@ -123,6 +114,9 @@ class TestPatchUtils(unittest.TestCase): } ] """ + result = pu.json_str_to_patch_entries(Path(), patches_json) + self.assertEqual(len(result), 4) + result = pu.json_to_patch_entries(Path(), io.StringIO(patches_json)) self.assertEqual(len(result), 4) @@ -245,6 +239,16 @@ Hunk #1 SUCCEEDED at 96 with fuzz 1. "until": 2, }, ), + pu.PatchEntry( + workdir=dirpath, + rel_patch_path="z.patch", + metadata=None, + platforms=None, + version_range={ + "from": 4, + "until": 5, + }, + ), ] patches[0].apply = mock.MagicMock( return_value=pu.PatchResult( @@ -254,6 +258,9 @@ Hunk #1 SUCCEEDED at 96 with fuzz 1. patches[1].apply = mock.MagicMock( return_value=pu.PatchResult(succeeded=True) ) + patches[2].apply = mock.MagicMock( + return_value=pu.PatchResult(succeeded=False) + ) results, _ = pu.update_version_ranges_with_entries( 1, dirpath, patches ) @@ -263,6 +270,7 @@ Hunk #1 SUCCEEDED at 96 with fuzz 1. self.assertEqual(results[0].version_range, {"from": 0, "until": 1}) self.assertEqual(patches[0].version_range, {"from": 0, "until": 1}) self.assertEqual(patches[1].version_range, {"from": 0, "until": 2}) + self.assertEqual(patches[2].version_range, {"from": 4, "until": 5}) @mock.patch("builtins.print") def test_remove_old_patches(self, _): diff --git a/llvm_tools/update_chromeos_llvm_hash.py b/llvm_tools/update_chromeos_llvm_hash.py index 75c6ce6c..9a8754d4 100755 --- a/llvm_tools/update_chromeos_llvm_hash.py +++ b/llvm_tools/update_chromeos_llvm_hash.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2019 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -17,12 +16,15 @@ import os from pathlib import Path import re import subprocess -from typing import Dict, Iterable +import textwrap +from typing import Dict, Iterable, List, Optional +import atomic_write_file import chroot import failure_modes import get_llvm_hash import git +import manifest_utils import patch_utils import subprocess_helpers @@ -33,6 +35,7 @@ DEFAULT_PACKAGES = [ "sys-libs/compiler-rt", "sys-libs/libcxx", "sys-libs/llvm-libunwind", + "sys-libs/scudo", ] DEFAULT_MANIFEST_PACKAGES = ["sys-devel/llvm"] @@ -46,9 +49,87 @@ class LLVMVariant(enum.Enum): next = "LLVM_NEXT_HASH" -# If set to `True`, then the contents of `stdout` after executing a command will -# be displayed to the terminal. -verbose = False +class PortagePackage: + """Represents a portage package with location info.""" + + def __init__(self, chromeos_root: Path, package: str): + """Create a new PortagePackage. + + Args: + chromeos_root: Path to the root of the code checkout. + package: "category/package" string. + """ + self.package = package + potential_ebuild_path = PortagePackage.find_package_ebuild( + chromeos_root, package + ) + if potential_ebuild_path.is_symlink(): + self.uprev_target = potential_ebuild_path.absolute() + self.ebuild_path = potential_ebuild_path.resolve() + else: + # Should have a 9999 ebuild, no uprevs needed. + self.uprev_target = None + self.ebuild_path = potential_ebuild_path.absolute() + + @staticmethod + def find_package_ebuild(chromeos_root: Path, package: str) -> Path: + """Look up the package's ebuild location.""" + chromeos_root_str = str(chromeos_root) + ebuild_paths = chroot.GetChrootEbuildPaths( + chromeos_root_str, + [package], + ) + converted = chroot.ConvertChrootPathsToAbsolutePaths( + chromeos_root_str, ebuild_paths + )[0] + return Path(converted) + + def package_dir(self) -> Path: + """Return the package directory.""" + return self.ebuild_path.parent + + def update( + self, llvm_variant: LLVMVariant, git_hash: str, svn_version: int + ): + """Update the package with the new LLVM git sha and revision. + + Args: + llvm_variant: Which LLVM hash to update. + Either LLVM_HASH or LLVM_NEXT_HASH. + git_hash: Upstream LLVM git hash to update to. + svn_version: Matching LLVM revision string for the git_hash. + """ + live_ebuild = self.live_ebuild() + if live_ebuild: + # Working with a -9999 ebuild package here, no + # upreving. + UpdateEbuildLLVMHash( + live_ebuild, llvm_variant, git_hash, svn_version + ) + return + if not self.uprev_target: + # We can exit early if we're not working with a live ebuild, + # and we don't have something to uprev. + raise RuntimeError( + f"Cannot update: no live ebuild or symlink found for {self.package}" + ) + + UpdateEbuildLLVMHash( + self.ebuild_path, llvm_variant, git_hash, svn_version + ) + if llvm_variant == LLVMVariant.current: + UprevEbuildToVersion(str(self.uprev_target), svn_version, git_hash) + else: + UprevEbuildSymlink(str(self.uprev_target)) + + def live_ebuild(self) -> Optional[Path]: + """Path to the live ebuild if it exists. + + Returns: + The patch to the live ebuild if it exists. None otherwise. + """ + matches = self.package_dir().glob("*-9999.ebuild") + return next(matches, None) def defaultCrosRoot() -> Path: @@ -104,14 +185,6 @@ def GetCommandLineArgs(): "(default: %(default)s)", ) - # Add argument for whether to display command contents to `stdout`. - parser.add_argument( - "--verbose", - action="store_true", - help="display contents of a command to the terminal " - "(default: %(default)s)", - ) - # Add argument for the LLVM hash to update parser.add_argument( "--is_llvm_next", @@ -150,59 +223,23 @@ def GetCommandLineArgs(): help="the .json file that has all the patches and their " "metadata if applicable (default: PATCHES.json inside $FILESDIR)", ) + parser.add_argument( + "--repo_manifest", + action="store_true", + help="Updates the llvm-project revision attribute" + " in the internal manifest.", + ) # Parse the command line. - args_output = parser.parse_args() - - # FIXME: We shouldn't be using globals here, but until we fix it, make pylint - # stop complaining about it. - # pylint: disable=global-statement - global verbose + return parser.parse_args() - verbose = args_output.verbose - - return args_output - - -def GetEbuildPathsFromSymLinkPaths(symlinks): - """Reads the symlink(s) to get the ebuild path(s) to the package(s). - - Args: - symlinks: A list of absolute path symlink/symlinks that point - to the package's ebuild. - - Returns: - A dictionary where the key is the absolute path of the symlink and the value - is the absolute path to the ebuild that was read from the symlink. - Raises: - ValueError: Invalid symlink(s) were provided. - """ - - # A dictionary that holds: - # key: absolute symlink path - # value: absolute ebuild path - resolved_paths = {} - - # Iterate through each symlink. - # - # For each symlink, check that it is a valid symlink, - # and then construct the ebuild path, and - # then add the ebuild path to the dict. - for cur_symlink in symlinks: - if not os.path.islink(cur_symlink): - raise ValueError(f"Invalid symlink provided: {cur_symlink}") - - # Construct the absolute path to the ebuild. - ebuild_path = os.path.realpath(cur_symlink) - - if cur_symlink not in resolved_paths: - resolved_paths[cur_symlink] = ebuild_path - - return resolved_paths - - -def UpdateEbuildLLVMHash(ebuild_path, llvm_variant, git_hash, svn_version): +def UpdateEbuildLLVMHash( + ebuild_path: Path, + llvm_variant: LLVMVariant, + git_hash: str, + svn_version: int, +): """Updates the LLVM hash in the ebuild. The build changes are staged for commit in the temporary repo. @@ -218,33 +255,26 @@ def UpdateEbuildLLVMHash(ebuild_path, llvm_variant, git_hash, svn_version): of the changes or failed to update the LLVM hash. """ - # Iterate through each ebuild. - # # For each ebuild, read the file in # advance and then create a temporary file # that gets updated with the new LLVM hash # and revision number and then the ebuild file # gets updated to the temporary file. - if not os.path.isfile(ebuild_path): raise ValueError(f"Invalid ebuild path provided: {ebuild_path}") - temp_ebuild_file = f"{ebuild_path}.temp" - - with open(ebuild_path) as ebuild_file: - # write updates to a temporary file in case of interrupts - with open(temp_ebuild_file, "w") as temp_file: - for cur_line in ReplaceLLVMHash( - ebuild_file, llvm_variant, git_hash, svn_version - ): - temp_file.write(cur_line) - os.rename(temp_ebuild_file, ebuild_path) - - # Get the path to the parent directory. - parent_dir = os.path.dirname(ebuild_path) - + with open(ebuild_path, encoding="utf-8") as ebuild_file: + new_lines = list( + ReplaceLLVMHash(ebuild_file, llvm_variant, git_hash, svn_version) + ) + with atomic_write_file.atomic_write( + ebuild_path, "w", encoding="utf-8" + ) as ebuild_file: + ebuild_file.writelines(new_lines) # Stage the changes. - subprocess.check_output(["git", "-C", parent_dir, "add", ebuild_path]) + subprocess.check_output( + ["git", "-C", ebuild_path.parent, "add", ebuild_path] + ) def ReplaceLLVMHash(ebuild_lines, llvm_variant, git_hash, svn_version): @@ -367,46 +397,11 @@ def UprevEbuildToVersion(symlink, svn_version, git_hash): # Create a symlink of the renamed ebuild new_symlink = new_ebuild[: -len(".ebuild")] + "-r1.ebuild" subprocess.check_output(["ln", "-s", "-r", new_ebuild, new_symlink]) - - if not os.path.islink(new_symlink): - raise ValueError( - f'Invalid symlink name: {new_ebuild[:-len(".ebuild")]}' - ) - subprocess.check_output(["git", "-C", symlink_dir, "add", new_symlink]) - # Remove the old symlink subprocess.check_output(["git", "-C", symlink_dir, "rm", symlink]) -def CreatePathDictionaryFromPackages(chroot_path, update_packages): - """Creates a symlink and ebuild path pair dictionary from the packages. - - Args: - chroot_path: The absolute path to the chroot. - update_packages: The filtered packages to be updated. - - Returns: - A dictionary where the key is the absolute path to the symlink - of the package and the value is the absolute path to the ebuild of - the package. - """ - - # Construct a list containing the chroot file paths of the package(s). - chroot_file_paths = chroot.GetChrootEbuildPaths( - chroot_path, update_packages - ) - - # Construct a list containing the symlink(s) of the package(s). - symlink_file_paths = chroot.ConvertChrootPathsToAbsolutePaths( - chroot_path, chroot_file_paths - ) - - # Create a dictionary where the key is the absolute path of the symlink to - # the package and the value is the absolute path to the ebuild of the package. - return GetEbuildPathsFromSymLinkPaths(symlink_file_paths) - - def RemovePatchesFromFilesDir(patches): """Removes the patches from $FILESDIR of a package. @@ -506,8 +501,8 @@ def StagePackagesPatchResultsForCommit(package_info_dict, commit_messages): return commit_messages -def UpdateManifests(packages: Iterable[str], chroot_path: Path): - """Updates manifest files for packages. +def UpdatePortageManifests(packages: Iterable[str], chroot_path: Path): + """Updates portage manifest files for packages. Args: packages: A list of packages to update manifests for. @@ -518,17 +513,21 @@ def UpdateManifests(packages: Iterable[str], chroot_path: Path): """ manifest_ebuilds = chroot.GetChrootEbuildPaths(chroot_path, packages) for ebuild_path in manifest_ebuilds: + ebuild_dir = os.path.dirname(ebuild_path) subprocess_helpers.ChrootRunCommand( chroot_path, ["ebuild", ebuild_path, "manifest"] ) + subprocess_helpers.ChrootRunCommand( + chroot_path, ["git", "-C", ebuild_dir, "add", "Manifest"] + ) def UpdatePackages( packages: Iterable[str], manifest_packages: Iterable[str], - llvm_variant, - git_hash, - svn_version, + llvm_variant: LLVMVariant, + git_hash: str, + svn_version: int, chroot_path: Path, mode, git_hash_source, @@ -558,86 +557,55 @@ def UpdatePackages( Gerrit commit URL and the second pair is the change list number. """ - # Construct a dictionary where the key is the absolute path of the symlink to - # the package and the value is the absolute path to the ebuild of the package. - paths_dict = CreatePathDictionaryFromPackages(chroot_path, packages) - - repo_path = os.path.dirname(next(iter(paths_dict.values()))) - - branch = "update-" + llvm_variant.value + "-" + git_hash + portage_packages = (PortagePackage(chroot_path, pkg) for pkg in packages) + chromiumos_overlay_path = ( + chroot_path / "src" / "third_party" / "chromiumos-overlay" + ) + branch_name = "update-" + llvm_variant.value + "-" + git_hash + + commit_message_header = "llvm" + if llvm_variant == LLVMVariant.next: + commit_message_header = "llvm-next" + if git_hash_source in get_llvm_hash.KNOWN_HASH_SOURCES: + commit_message_header += ( + f"/{git_hash_source}: upgrade to {git_hash} (r{svn_version})" + ) + else: + commit_message_header += f": upgrade to {git_hash} (r{svn_version})" - git.CreateBranch(repo_path, branch) + commit_lines = [ + commit_message_header + "\n", + "The following packages have been updated:", + ] + # Holds the list of packages that are updating. + updated_packages: List[str] = [] + git.CreateBranch(chromiumos_overlay_path, branch_name) try: - commit_message_header = "llvm" - if llvm_variant == LLVMVariant.next: - commit_message_header = "llvm-next" - if git_hash_source in get_llvm_hash.KNOWN_HASH_SOURCES: - commit_message_header += ( - f"/{git_hash_source}: upgrade to {git_hash} (r{svn_version})" - ) - else: - commit_message_header += f": upgrade to {git_hash} (r{svn_version})" - - commit_lines = [ - commit_message_header + "\n", - "The following packages have been updated:", - ] - - # Holds the list of packages that are updating. - packages = [] - - # Iterate through the dictionary. - # - # For each iteration: - # 1) Update the ebuild's LLVM hash. - # 2) Uprev the ebuild (symlink). - # 3) Add the modified package to the commit message. - for symlink_path, ebuild_path in paths_dict.items(): - path_to_ebuild_dir = os.path.dirname(ebuild_path) - - UpdateEbuildLLVMHash( - ebuild_path, llvm_variant, git_hash, svn_version - ) - - if llvm_variant == LLVMVariant.current: - UprevEbuildToVersion(symlink_path, svn_version, git_hash) - else: - UprevEbuildSymlink(symlink_path) - - cur_dir_name = os.path.basename(path_to_ebuild_dir) - parent_dir_name = os.path.basename( - os.path.dirname(path_to_ebuild_dir) - ) - - packages.append(f"{parent_dir_name}/{cur_dir_name}") - commit_lines.append(f"{parent_dir_name}/{cur_dir_name}") - + for pkg in portage_packages: + pkg.update(llvm_variant, git_hash, svn_version) + updated_packages.append(pkg.package) + commit_lines.append(pkg.package) if manifest_packages: - UpdateManifests(manifest_packages, chroot_path) + UpdatePortageManifests(manifest_packages, chroot_path) commit_lines.append("Updated manifest for:") commit_lines.extend(manifest_packages) - EnsurePackageMaskContains(chroot_path, git_hash) - # Handle the patches for each package. package_info_dict = UpdatePackagesPatchMetadataFile( - chroot_path, svn_version, packages, mode + chroot_path, svn_version, updated_packages, mode ) - # Update the commit message if changes were made to a package's patches. commit_lines = StagePackagesPatchResultsForCommit( package_info_dict, commit_lines ) - if extra_commit_msg: commit_lines.append(extra_commit_msg) - - change_list = git.UploadChanges(repo_path, branch, commit_lines) - + change_list = git.UploadChanges( + chromiumos_overlay_path, branch_name, commit_lines + ) finally: - git.DeleteBranch(repo_path, branch) - + git.DeleteBranch(chromiumos_overlay_path, branch_name) return change_list @@ -660,7 +628,7 @@ def EnsurePackageMaskContains(chroot_path, git_hash): mask_path = os.path.join( overlay_dir, "profiles/targets/chromeos/package.mask" ) - with open(mask_path, "r+") as mask_file: + with open(mask_path, "r+", encoding="utf-8") as mask_file: mask_contents = mask_file.read() expected_line = f"=sys-devel/llvm-{llvm_major_version}.0_pre*\n" if expected_line not in mask_contents: @@ -715,7 +683,7 @@ def UpdatePackagesPatchMetadataFile( ) chroot_ebuild_path = Path( chroot.ConvertChrootPathsToAbsolutePaths( - chroot_path, [chroot_ebuild_str] + str(chroot_path), [chroot_ebuild_str] )[0] ) patches_json_fp = ( @@ -747,12 +715,61 @@ def UpdatePackagesPatchMetadataFile( patches_info = patch_utils.update_version_ranges( svn_version, src_path, patches_json_fp ) + else: + raise RuntimeError(f"unsupported failure mode: {mode}") package_info[cur_package] = patches_info._asdict() return package_info +def ChangeRepoManifest(git_hash: str, src_tree: Path): + """Change the repo internal manifest for llvm-project. + + Args: + git_hash: The LLVM git hash to change to. + src_tree: ChromiumOS source tree checkout. + + Returns: + The uploaded changelist CommitContents. + """ + manifest_dir = manifest_utils.get_chromeos_manifest_path(src_tree).parent + branch_name = "update-llvm-project-" + git_hash + commit_lines = ( + textwrap.dedent( + f""" + manifest: Update llvm-project to {git_hash} + + Upgrade the local LLVM reversion to match the new llvm ebuild + hash. This must be merged along with any chromiumos-overlay + changes to LLVM. Automatic uprevs rely on the manifest hash + to match what is specified by LLVM_HASH. + + This CL is generated by the update_chromeos_llvm_hash.py script. + + BUG=None + TEST=CQ + """ + ) + .lstrip() + .splitlines() + ) + + git.CreateBranch(manifest_dir, branch_name) + try: + manifest_path = manifest_utils.update_chromeos_manifest( + git_hash, + src_tree, + ) + subprocess.run( + ["git", "-C", manifest_dir, "add", manifest_path.name], check=True + ) + change_list = git.UploadChanges(manifest_dir, branch_name, commit_lines) + finally: + git.DeleteBranch(manifest_dir, branch_name) + return change_list + + def main(): """Updates the LLVM next hash for each package. @@ -764,6 +781,8 @@ def main(): args_output = GetCommandLineArgs() + chroot.VerifyChromeOSRoot(args_output.chroot_path) + llvm_variant = LLVMVariant.current if args_output.is_llvm_next: llvm_variant = LLVMVariant.next @@ -773,7 +792,6 @@ def main(): git_hash, svn_version = get_llvm_hash.GetLLVMHashAndVersionFromSVNOption( git_hash_source ) - # Filter out empty strings. For example "".split{",") returns [""]. packages = set(p for p in args_output.update_packages.split(",") if p) manifest_packages = set( @@ -798,6 +816,17 @@ def main(): print(f"Gerrit URL: {change_list.url}") print(f"Change list number: {change_list.cl_number}") + if args_output.repo_manifest: + print( + f"Updating internal manifest to {git_hash} ({svn_version})...", + end="", + ) + change_list = ChangeRepoManifest(git_hash, args_output.chroot_path) + print(" Done!") + print("New repo manifest CL:") + print(f" URL: {change_list.url}") + print(f" CL Number: {change_list.cl_number}") + if __name__ == "__main__": main() diff --git a/llvm_tools/update_chromeos_llvm_hash_unittest.py b/llvm_tools/update_chromeos_llvm_hash_unittest.py index b758538c..568242aa 100755 --- a/llvm_tools/update_chromeos_llvm_hash_unittest.py +++ b/llvm_tools/update_chromeos_llvm_hash_unittest.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2019 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -13,6 +12,7 @@ import os from pathlib import Path import subprocess import sys +import tempfile import unittest import unittest.mock as mock @@ -53,7 +53,7 @@ class UpdateLLVMHashTest(unittest.TestCase): # does not exist. @mock.patch.object(os.path, "isfile", return_value=False) def testFailedToUpdateLLVMHashForInvalidEbuildPath(self, mock_isfile): - ebuild_path = "/some/path/to/package.ebuild" + ebuild_path = Path("/some/path/to/package.ebuild") llvm_variant = update_chromeos_llvm_hash.LLVMVariant.current git_hash = "a123testhash1" svn_version = 1000 @@ -94,7 +94,7 @@ class UpdateLLVMHashTest(unittest.TestCase): # 'LLVM_HASH'. with self.assertRaises(ValueError) as err: update_chromeos_llvm_hash.UpdateEbuildLLVMHash( - ebuild_file, llvm_variant, git_hash, svn_version + Path(ebuild_file), llvm_variant, git_hash, svn_version ) self.assertEqual(str(err.exception), "Failed to update LLVM_HASH") @@ -127,7 +127,7 @@ class UpdateLLVMHashTest(unittest.TestCase): # 'LLVM_NEXT_HASH'. with self.assertRaises(ValueError) as err: update_chromeos_llvm_hash.UpdateEbuildLLVMHash( - ebuild_file, llvm_variant, git_hash, svn_version + Path(ebuild_file), llvm_variant, git_hash, svn_version ) self.assertEqual( @@ -141,7 +141,6 @@ class UpdateLLVMHashTest(unittest.TestCase): def testSuccessfullyStageTheEbuildForCommitForLLVMHashUpdate( self, mock_stage_commit_command, mock_isfile ): - # Create a temporary file to simulate an ebuild file of a package. with test_helpers.CreateTemporaryJsonFile() as ebuild_file: # Updates LLVM_HASH to 'git_hash' and revision to @@ -163,7 +162,7 @@ class UpdateLLVMHashTest(unittest.TestCase): ) update_chromeos_llvm_hash.UpdateEbuildLLVMHash( - ebuild_file, llvm_variant, git_hash, svn_version + Path(ebuild_file), llvm_variant, git_hash, svn_version ) expected_file_contents = [ @@ -190,7 +189,6 @@ class UpdateLLVMHashTest(unittest.TestCase): def testSuccessfullyStageTheEbuildForCommitForLLVMNextHashUpdate( self, mock_stage_commit_command, mock_isfile ): - # Create a temporary file to simulate an ebuild file of a package. with test_helpers.CreateTemporaryJsonFile() as ebuild_file: # Updates LLVM_NEXT_HASH to 'git_hash' and revision to @@ -212,7 +210,7 @@ class UpdateLLVMHashTest(unittest.TestCase): ) update_chromeos_llvm_hash.UpdateEbuildLLVMHash( - ebuild_file, llvm_variant, git_hash, svn_version + Path(ebuild_file), llvm_variant, git_hash, svn_version ) expected_file_contents = [ @@ -374,19 +372,35 @@ class UpdateLLVMHashTest(unittest.TestCase): def testManifestUpdate(self, mock_subprocess, mock_ebuild_paths): manifest_packages = ["sys-devel/llvm"] chroot_path = "/path/to/chroot" - update_chromeos_llvm_hash.UpdateManifests( - manifest_packages, chroot_path - ) - - args = mock_subprocess.call_args[0][-1] - manifest_cmd = [ - "cros_sdk", - "--", - "ebuild", - "/chroot/path/test.ebuild", - "manifest", - ] - self.assertEqual(args, manifest_cmd) + update_chromeos_llvm_hash.UpdatePortageManifests( + manifest_packages, Path(chroot_path) + ) + + args = mock_subprocess.call_args_list[0] + manifest_cmd = ( + [ + "cros_sdk", + "--", + "ebuild", + "/chroot/path/test.ebuild", + "manifest", + ], + ) + self.assertEqual(args[0], manifest_cmd) + + args = mock_subprocess.call_args_list[1] + git_add_cmd = ( + [ + "cros_sdk", + "--", + "git", + "-C", + "/chroot/path", + "add", + "Manifest", + ], + ) + self.assertEqual(args[0], git_add_cmd) mock_ebuild_paths.assert_called_once() @mock.patch.object(get_llvm_hash, "GetLLVMMajorVersion") @@ -457,122 +471,6 @@ class UpdateLLVMHashTest(unittest.TestCase): mock_command_output.assert_called_once() - # Simulate behavior of 'os.path.isdir()' when the path to the repo is not a - - # directory. - - @mock.patch.object(chroot, "GetChrootEbuildPaths") - @mock.patch.object(chroot, "ConvertChrootPathsToAbsolutePaths") - def testExceptionRaisedWhenCreatingPathDictionaryFromPackages( - self, mock_chroot_paths_to_symlinks, mock_get_chroot_paths - ): - - chroot_path = "/some/path/to/chroot" - - package_name = "test-pckg/package" - package_chroot_path = "/some/chroot/path/to/package-r1.ebuild" - - # Test function to simulate 'ConvertChrootPathsToAbsolutePaths' when a - # symlink does not start with the prefix '/mnt/host/source'. - def BadPrefixChrootPath(*args): - assert len(args) == 2 - raise ValueError( - "Invalid prefix for the chroot path: " - "%s" % package_chroot_path - ) - - # Simulate 'GetChrootEbuildPaths' when valid packages are passed in. - # - # Returns a list of chroot paths. - mock_get_chroot_paths.return_value = [package_chroot_path] - - # Use test function to simulate 'ConvertChrootPathsToAbsolutePaths' - # behavior. - mock_chroot_paths_to_symlinks.side_effect = BadPrefixChrootPath - - # Verify exception is raised when for an invalid prefix in the symlink. - with self.assertRaises(ValueError) as err: - update_chromeos_llvm_hash.CreatePathDictionaryFromPackages( - chroot_path, [package_name] - ) - - self.assertEqual( - str(err.exception), - "Invalid prefix for the chroot path: " "%s" % package_chroot_path, - ) - - mock_get_chroot_paths.assert_called_once_with( - chroot_path, [package_name] - ) - - mock_chroot_paths_to_symlinks.assert_called_once_with( - chroot_path, [package_chroot_path] - ) - - @mock.patch.object(chroot, "GetChrootEbuildPaths") - @mock.patch.object(chroot, "ConvertChrootPathsToAbsolutePaths") - @mock.patch.object( - update_chromeos_llvm_hash, "GetEbuildPathsFromSymLinkPaths" - ) - def testSuccessfullyCreatedPathDictionaryFromPackages( - self, - mock_ebuild_paths_from_symlink_paths, - mock_chroot_paths_to_symlinks, - mock_get_chroot_paths, - ): - - package_chroot_path = "/mnt/host/source/src/path/to/package-r1.ebuild" - - # Simulate 'GetChrootEbuildPaths' when returning a chroot path for a valid - # package. - # - # Returns a list of chroot paths. - mock_get_chroot_paths.return_value = [package_chroot_path] - - package_symlink_path = ( - "/some/path/to/chroot/src/path/to/package-r1.ebuild" - ) - - # Simulate 'ConvertChrootPathsToAbsolutePaths' when returning a symlink to - # a chroot path that points to a package. - # - # Returns a list of symlink file paths. - mock_chroot_paths_to_symlinks.return_value = [package_symlink_path] - - chroot_package_path = "/some/path/to/chroot/src/path/to/package.ebuild" - - # Simulate 'GetEbuildPathsFromSymlinkPaths' when returning a dictionary of - # a symlink that points to an ebuild. - # - # Returns a dictionary of a symlink and ebuild file path pair - # where the key is the absolute path to the symlink of the ebuild file - # and the value is the absolute path to the ebuild file of the package. - mock_ebuild_paths_from_symlink_paths.return_value = { - package_symlink_path: chroot_package_path - } - - chroot_path = "/some/path/to/chroot" - package_name = "test-pckg/package" - - self.assertEqual( - update_chromeos_llvm_hash.CreatePathDictionaryFromPackages( - chroot_path, [package_name] - ), - {package_symlink_path: chroot_package_path}, - ) - - mock_get_chroot_paths.assert_called_once_with( - chroot_path, [package_name] - ) - - mock_chroot_paths_to_symlinks.assert_called_once_with( - chroot_path, [package_chroot_path] - ) - - mock_ebuild_paths_from_symlink_paths.assert_called_once_with( - [package_symlink_path] - ) - @mock.patch.object(subprocess, "check_output", return_value=None) def testSuccessfullyRemovedPatchesFromFilesDir(self, mock_run_cmd): patches_to_remove_list = [ @@ -607,7 +505,6 @@ class UpdateLLVMHashTest(unittest.TestCase): @mock.patch.object(os.path, "isfile", return_value=True) @mock.patch.object(subprocess, "check_output", return_value=None) def testSuccessfullyStagedPatchMetadataFileForCommit(self, mock_run_cmd, _): - patch_metadata_path = "/abs/path/to/filesdir/PATCHES.json" update_chromeos_llvm_hash.StagePatchMetadataFileForCommit( @@ -656,7 +553,6 @@ class UpdateLLVMHashTest(unittest.TestCase): def testAddedPatchResultsForCommit( self, mock_remove_patches, mock_stage_patches_for_commit ): - package_1_patch_info_dict = { "applied_patches": [], "failed_patches": [], @@ -707,321 +603,101 @@ class UpdateLLVMHashTest(unittest.TestCase): self.assertEqual(mock_stage_patches_for_commit.call_count, 2) - @mock.patch.object(get_llvm_hash, "GetLLVMMajorVersion") - @mock.patch.object( - update_chromeos_llvm_hash, "CreatePathDictionaryFromPackages" - ) - @mock.patch.object(git, "CreateBranch") - @mock.patch.object(update_chromeos_llvm_hash, "UpdateEbuildLLVMHash") - @mock.patch.object(update_chromeos_llvm_hash, "UprevEbuildSymlink") - @mock.patch.object(git, "UploadChanges") - @mock.patch.object(git, "DeleteBranch") - @mock.patch.object(os.path, "realpath") - def testExceptionRaisedWhenUpdatingPackages( - self, - mock_realpath, - mock_delete_repo, - mock_upload_changes, - mock_uprev_symlink, - mock_update_llvm_next, - mock_create_repo, - mock_create_path_dict, - mock_llvm_major_version, - ): - - path_to_package_dir = "/some/path/to/chroot/src/path/to" - abs_path_to_package = os.path.join( - path_to_package_dir, "package.ebuild" - ) - symlink_path_to_package = os.path.join( - path_to_package_dir, "package-r1.ebuild" + def setup_mock_src_tree(self, src_tree: Path): + package_dir = ( + src_tree / "src/third_party/chromiumos-overlay/sys-devel/llvm" ) - - mock_llvm_major_version.return_value = "1234" - - # Test function to simulate 'CreateBranch' when successfully created the - # branch on a valid repo path. - def SuccessfullyCreateBranchForChanges(_, branch): - self.assertEqual(branch, "update-LLVM_NEXT_HASH-a123testhash4") - - # Test function to simulate 'UpdateEbuildLLVMHash' when successfully - # updated the ebuild's 'LLVM_NEXT_HASH'. - def SuccessfullyUpdatedLLVMHash(ebuild_path, _, git_hash, svn_version): - self.assertEqual(ebuild_path, abs_path_to_package) - self.assertEqual(git_hash, "a123testhash4") - self.assertEqual(svn_version, 1000) - - # Test function to simulate 'UprevEbuildSymlink' when the symlink to the - # ebuild does not have a revision number. - def FailedToUprevEbuildSymlink(_): - # Raises a 'ValueError' exception because the symlink did not have have a - # revision number. - raise ValueError("Failed to uprev the ebuild.") - - # Test function to fail on 'UploadChanges' if the function gets called - # when an exception is raised. - def ShouldNotExecuteUploadChanges(*args): - # Test function should not be called (i.e. execution should resume in the - # 'finally' block) because 'UprevEbuildSymlink' raised an - # exception. - assert len(args) == 3 - assert False, ( - 'Failed to go to "finally" block ' - "after the exception was raised." + package_dir.mkdir(parents=True) + ebuild_path = package_dir / "llvm-00.00_pre0_p0.ebuild" + with ebuild_path.open("w", encoding="utf-8") as f: + f.writelines( + [ + 'LLVM_HASH="abcdef123456" # r123456', + 'LLVM_NEXT_HASH="987654321fedcba" # r99453', + ] ) - - test_package_path_dict = {symlink_path_to_package: abs_path_to_package} - - # Simulate behavior of 'CreatePathDictionaryFromPackages()' when - # successfully created a dictionary where the key is the absolute path to - # the symlink of the package and value is the absolute path to the ebuild of - # the package. - mock_create_path_dict.return_value = test_package_path_dict - - # Use test function to simulate behavior. - mock_create_repo.side_effect = SuccessfullyCreateBranchForChanges - mock_update_llvm_next.side_effect = SuccessfullyUpdatedLLVMHash - mock_uprev_symlink.side_effect = FailedToUprevEbuildSymlink - mock_upload_changes.side_effect = ShouldNotExecuteUploadChanges - mock_realpath.return_value = ( - "/abs/path/to/test-packages/package1.ebuild" - ) - - packages_to_update = ["test-packages/package1"] - llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next - git_hash = "a123testhash4" - svn_version = 1000 - chroot_path = Path("/some/path/to/chroot") - git_hash_source = "google3" - branch = "update-LLVM_NEXT_HASH-a123testhash4" - extra_commit_msg = None - - # Verify exception is raised when an exception is thrown within - # the 'try' block by UprevEbuildSymlink function. - with self.assertRaises(ValueError) as err: - update_chromeos_llvm_hash.UpdatePackages( - packages=packages_to_update, - manifest_packages=[], - llvm_variant=llvm_variant, - git_hash=git_hash, - svn_version=svn_version, - chroot_path=chroot_path, - mode=failure_modes.FailureModes.FAIL, - git_hash_source=git_hash_source, - extra_commit_msg=extra_commit_msg, + symlink_path = package_dir / "llvm-00.00_pre0_p0-r1234.ebuild" + symlink_path.symlink_to(ebuild_path) + return package_dir, ebuild_path, symlink_path + + def testPortagePackageConstruction(self): + with tempfile.TemporaryDirectory( + "update_chromeos_llvm_hash.tmp" + ) as workdir_str: + src_tree = Path(workdir_str) + package_dir, ebuild_path, symlink_path = self.setup_mock_src_tree( + src_tree ) - self.assertEqual(str(err.exception), "Failed to uprev the ebuild.") - - mock_create_path_dict.assert_called_once_with( - chroot_path, packages_to_update - ) - - mock_create_repo.assert_called_once_with(path_to_package_dir, branch) - - mock_update_llvm_next.assert_called_once_with( - abs_path_to_package, llvm_variant, git_hash, svn_version - ) - - mock_uprev_symlink.assert_called_once_with(symlink_path_to_package) + # Test that we're upreving if there's a symlink. + def mock_find_package_ebuild(_, package_name): + self.assertEqual( + package_name, + f"{package_dir.parent.name}/{package_dir.name}", + ) + return symlink_path + + with mock.patch( + "update_chromeos_llvm_hash.PortagePackage.find_package_ebuild", + mock_find_package_ebuild, + ): + pkg = update_chromeos_llvm_hash.PortagePackage( + src_tree, "sys-devel/llvm" + ) + self.assertEqual(pkg.uprev_target, symlink_path.absolute()) + self.assertEqual(pkg.ebuild_path, ebuild_path.absolute()) + self.assertEqual(pkg.live_ebuild(), None) - mock_upload_changes.assert_not_called() + # Make sure if the live ebuild is there, we find it. + live_ebuild_path = package_dir / "llvm-9999.ebuild" + live_ebuild_path.touch() - mock_delete_repo.assert_called_once_with(path_to_package_dir, branch) + pkg = update_chromeos_llvm_hash.PortagePackage( + src_tree, "sys-devel/llvm" + ) + self.assertEqual(pkg.live_ebuild(), live_ebuild_path) - @mock.patch.object(update_chromeos_llvm_hash, "EnsurePackageMaskContains") + @mock.patch("subprocess.run") + @mock.patch("subprocess.check_output") @mock.patch.object(get_llvm_hash, "GetLLVMMajorVersion") - @mock.patch.object( - update_chromeos_llvm_hash, "CreatePathDictionaryFromPackages" - ) - @mock.patch.object(git, "CreateBranch") - @mock.patch.object(update_chromeos_llvm_hash, "UpdateEbuildLLVMHash") - @mock.patch.object(update_chromeos_llvm_hash, "UprevEbuildSymlink") - @mock.patch.object(git, "UploadChanges") - @mock.patch.object(git, "DeleteBranch") - @mock.patch.object( - update_chromeos_llvm_hash, "UpdatePackagesPatchMetadataFile" - ) - @mock.patch.object( - update_chromeos_llvm_hash, "StagePatchMetadataFileForCommit" - ) - def testSuccessfullyUpdatedPackages( - self, - mock_stage_patch_file, - mock_update_package_metadata_file, - mock_delete_repo, - mock_upload_changes, - mock_uprev_symlink, - mock_update_llvm_next, - mock_create_repo, - mock_create_path_dict, - mock_llvm_version, - mock_mask_contains, + def testUpdatePackages( + self, mock_llvm_major_version, mock_check_output, mock_run ): - - path_to_package_dir = "/some/path/to/chroot/src/path/to" - abs_path_to_package = os.path.join( - path_to_package_dir, "package.ebuild" - ) - symlink_path_to_package = os.path.join( - path_to_package_dir, "package-r1.ebuild" - ) - - # Test function to simulate 'CreateBranch' when successfully created the - # branch for the changes to be made to the ebuild files. - def SuccessfullyCreateBranchForChanges(_, branch): - self.assertEqual(branch, "update-LLVM_NEXT_HASH-a123testhash5") - - # Test function to simulate 'UploadChanges' after a successfull update of - # 'LLVM_NEXT_HASH" of the ebuild file. - def SuccessfullyUpdatedLLVMHash(ebuild_path, _, git_hash, svn_version): - self.assertEqual( - ebuild_path, "/some/path/to/chroot/src/path/to/package.ebuild" - ) - self.assertEqual(git_hash, "a123testhash5") - self.assertEqual(svn_version, 1000) - - # Test function to simulate 'UprevEbuildSymlink' when successfully - # incremented the revision number by 1. - def SuccessfullyUprevedEbuildSymlink(symlink_path): - self.assertEqual( - symlink_path, - "/some/path/to/chroot/src/path/to/package-r1.ebuild", + mock_llvm_major_version.return_value = "17" + with tempfile.TemporaryDirectory( + "update_chromeos_llvm_hash.tmp" + ) as workdir_str: + src_tree = Path(workdir_str) + package_dir, ebuild_path, symlink_path = self.setup_mock_src_tree( + src_tree ) - # Test function to simulate 'UpdatePackagesPatchMetadataFile()' when the - # patch results contains a disabled patch in 'disable_patches' mode. - def RetrievedPatchResults(chroot_path, svn_version, packages, mode): - - self.assertEqual(chroot_path, Path("/some/path/to/chroot")) - self.assertEqual(svn_version, 1000) - self.assertListEqual(packages, ["path/to"]) - self.assertEqual(mode, failure_modes.FailureModes.DISABLE_PATCHES) - - patch_metadata_file = "PATCHES.json" - PatchInfo = collections.namedtuple( - "PatchInfo", - [ - "applied_patches", - "failed_patches", - "non_applicable_patches", - "disabled_patches", - "removed_patches", - "modified_metadata", - ], - ) - - package_patch_info = PatchInfo( - applied_patches=["fix_display.patch"], - failed_patches=["fix_stdout.patch"], - non_applicable_patches=[], - disabled_patches=["fix_stdout.patch"], - removed_patches=[], - modified_metadata="/abs/path/to/filesdir/%s" - % patch_metadata_file, - ) - - package_info_dict = {"path/to": package_patch_info._asdict()} - - # Returns a dictionary where the key is the package and the value is a - # dictionary that contains information about the package's patch results - # produced by the patch manager. - return package_info_dict - - # Test function to simulate 'UploadChanges()' when successfully created a - # commit for the changes made to the packages and their patches and - # retrieved the change list of the commit. - def SuccessfullyUploadedChanges(*args): - assert len(args) == 3 - commit_url = "https://some_name/path/to/commit/+/12345" - return git.CommitContents(url=commit_url, cl_number=12345) - - test_package_path_dict = {symlink_path_to_package: abs_path_to_package} - - # Simulate behavior of 'CreatePathDictionaryFromPackages()' when - # successfully created a dictionary where the key is the absolute path to - # the symlink of the package and value is the absolute path to the ebuild of - # the package. - mock_create_path_dict.return_value = test_package_path_dict - - # Use test function to simulate behavior. - mock_create_repo.side_effect = SuccessfullyCreateBranchForChanges - mock_update_llvm_next.side_effect = SuccessfullyUpdatedLLVMHash - mock_uprev_symlink.side_effect = SuccessfullyUprevedEbuildSymlink - mock_update_package_metadata_file.side_effect = RetrievedPatchResults - mock_upload_changes.side_effect = SuccessfullyUploadedChanges - mock_llvm_version.return_value = "1234" - mock_mask_contains.reurn_value = None + def mock_find_package_ebuild(*_): + return symlink_path - packages_to_update = ["test-packages/package1"] - llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next - git_hash = "a123testhash5" - svn_version = 1000 - chroot_path = Path("/some/path/to/chroot") - git_hash_source = "tot" - branch = "update-LLVM_NEXT_HASH-a123testhash5" - extra_commit_msg = "\ncommit-message-end" - - change_list = update_chromeos_llvm_hash.UpdatePackages( - packages=packages_to_update, - manifest_packages=[], - llvm_variant=llvm_variant, - git_hash=git_hash, - svn_version=svn_version, - chroot_path=chroot_path, - mode=failure_modes.FailureModes.DISABLE_PATCHES, - git_hash_source=git_hash_source, - extra_commit_msg=extra_commit_msg, - ) - - self.assertEqual( - change_list.url, "https://some_name/path/to/commit/+/12345" - ) - - self.assertEqual(change_list.cl_number, 12345) - - mock_create_path_dict.assert_called_once_with( - chroot_path, packages_to_update - ) - - mock_create_repo.assert_called_once_with(path_to_package_dir, branch) - - mock_update_llvm_next.assert_called_once_with( - abs_path_to_package, llvm_variant, git_hash, svn_version - ) - - mock_uprev_symlink.assert_called_once_with(symlink_path_to_package) - - mock_mask_contains.assert_called_once_with(chroot_path, git_hash) - - expected_commit_messages = [ - "llvm-next/tot: upgrade to a123testhash5 (r1000)\n", - "The following packages have been updated:", - "path/to", - "\nFor the package path/to:", - "The patch metadata file PATCHES.json was modified", - "The following patches were disabled:", - "fix_stdout.patch", - "\ncommit-message-end", - ] - - mock_update_package_metadata_file.assert_called_once() - - mock_stage_patch_file.assert_called_once_with( - "/abs/path/to/filesdir/PATCHES.json" - ) - - mock_upload_changes.assert_called_once_with( - path_to_package_dir, branch, expected_commit_messages - ) - - mock_delete_repo.assert_called_once_with(path_to_package_dir, branch) + with mock.patch( + "update_chromeos_llvm_hash.PortagePackage.find_package_ebuild", + mock_find_package_ebuild, + ): + pkg = update_chromeos_llvm_hash.PortagePackage( + src_tree, "sys-devel/llvm" + ) + pkg.update( + update_chromeos_llvm_hash.LLVMVariant.current, + "beef3333", + 3333, + ) + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot") @mock.patch.object(get_llvm_hash, "GetLLVMHashAndVersionFromSVNOption") @mock.patch.object(update_chromeos_llvm_hash, "UpdatePackages") def testMainDefaults( - self, mock_update_packages, mock_gethash, mock_outside_chroot + self, + mock_update_packages, + mock_gethash, + mock_outside_chroot, + mock_chromeos_root, ): git_hash = "1234abcd" svn_version = 5678 @@ -1053,12 +729,18 @@ class UpdateLLVMHashTest(unittest.TestCase): extra_commit_msg=None, ) mock_outside_chroot.assert_called() + mock_chromeos_root.assert_called() + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot") @mock.patch.object(get_llvm_hash, "GetLLVMHashAndVersionFromSVNOption") @mock.patch.object(update_chromeos_llvm_hash, "UpdatePackages") def testMainLlvmNext( - self, mock_update_packages, mock_gethash, mock_outside_chroot + self, + mock_update_packages, + mock_gethash, + mock_outside_chroot, + mock_chromeos_root, ): git_hash = "1234abcd" svn_version = 5678 @@ -1089,12 +771,18 @@ class UpdateLLVMHashTest(unittest.TestCase): extra_commit_msg=None, ) mock_outside_chroot.assert_called() + mock_chromeos_root.assert_called() + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot") @mock.patch.object(get_llvm_hash, "GetLLVMHashAndVersionFromSVNOption") @mock.patch.object(update_chromeos_llvm_hash, "UpdatePackages") def testMainAllArgs( - self, mock_update_packages, mock_gethash, mock_outside_chroot + self, + mock_update_packages, + mock_gethash, + mock_outside_chroot, + mock_chromeos_root, ): packages_to_update = "test-packages/package1,test-libs/lib1" manifest_packages = "test-libs/lib1,test-libs/lib2" @@ -1140,6 +828,7 @@ class UpdateLLVMHashTest(unittest.TestCase): extra_commit_msg=None, ) mock_outside_chroot.assert_called() + mock_chromeos_root.assert_called() @mock.patch.object(subprocess, "check_output", return_value=None) @mock.patch.object(get_llvm_hash, "GetLLVMMajorVersion") diff --git a/llvm_tools/update_packages_and_run_tests.py b/llvm_tools/update_packages_and_run_tests.py index dc14b6de..598d9099 100755 --- a/llvm_tools/update_packages_and_run_tests.py +++ b/llvm_tools/update_packages_and_run_tests.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # Copyright 2019 The ChromiumOS Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -11,6 +10,7 @@ import argparse import datetime import json import os +from pathlib import Path import subprocess import chroot @@ -26,10 +26,10 @@ def GetCommandLineArgs(): """Parses the command line for the command line arguments. Returns: - The log level to use when retrieving the LLVM hash or google3 LLVM version, - the chroot path to use for executing chroot commands, - a list of a package or packages to update their LLVM next hash, - and the LLVM version to use when retrieving the LLVM hash. + The log level to use when retrieving the LLVM hash or google3 LLVM + version, the chroot path to use for executing chroot commands, + a list of a package or packages to update their LLVM next hash, + and the LLVM version to use when retrieving the LLVM hash. """ # Default path to the chroot if a path is not specified. @@ -67,8 +67,8 @@ def GetCommandLineArgs(): "Otherwise, update LLVM_HASH", ) - # Add argument for the absolute path to the file that contains information on - # the previous tested svn version. + # Add argument for the absolute path to the file that contains information + # on the previous tested svn version. parser.add_argument( "--last_tested", help="the absolute path to the file that contains the last tested " @@ -94,14 +94,6 @@ def GetCommandLineArgs(): help="The reviewers for the package update changelist", ) - # Add argument for whether to display command contents to `stdout`. - parser.add_argument( - "--verbose", - action="store_true", - help="display contents of a command to the terminal " - "(default: %(default)s)", - ) - subparsers = parser.add_subparsers(dest="subparser_name") subparser_names = [] # Testing with the tryjobs. @@ -168,13 +160,13 @@ def UnchangedSinceLastRun(last_tested_file, arg_dict): """Gets the arguments used for last run Args: - last_tested_file: The absolute path to the file that contains the - arguments for the last run. - arg_dict: The arguments used for this run. + last_tested_file: The absolute path to the file that contains the + arguments for the last run. + arg_dict: The arguments used for this run. Returns: - Return true if the arguments used for last run exist and are the - same as the arguments used for this run. Otherwise return false. + Return true if the arguments used for last run exist and are the + same as the arguments used for this run. Otherwise return false. """ if not last_tested_file: @@ -183,7 +175,7 @@ def UnchangedSinceLastRun(last_tested_file, arg_dict): # Get the last tested svn version if the file exists. last_arg_dict = None try: - with open(last_tested_file) as f: + with open(last_tested_file, encoding="utf-8") as f: last_arg_dict = json.load(f) except (IOError, ValueError): @@ -205,12 +197,12 @@ def AddReviewers(cl, reviewers, chroot_path): def AddLinksToCL(tests, cl, chroot_path): """Adds the test link(s) to the CL as a comment.""" - # NOTE: Invoking `cros_sdk` does not make each tryjob link appear on its own - # line, so invoking the `gerrit` command directly instead of using `cros_sdk` - # to do it for us. + # NOTE: Invoking `cros_sdk` does not make each tryjob link appear on its + # own line, so invoking the `gerrit` command directly instead of using + # `cros_sdk` to do it for us. # - # FIXME: Need to figure out why `cros_sdk` does not add each tryjob link as a - # newline. + # FIXME: Need to figure out why `cros_sdk` does not add each tryjob link as + # a newline. gerrit_abs_path = os.path.join(chroot_path, "chromite/bin/gerrit") links = ["Started the following tests:"] @@ -231,15 +223,15 @@ def GetTryJobCommand(change_list, extra_change_lists, options, builder): """Constructs the 'tryjob' command. Args: - change_list: The CL obtained from updating the packages. - extra_change_lists: Extra change lists that would like to be run alongside - the change list of updating the packages. - options: Options to be passed into the tryjob command. - builder: The builder to be passed into the tryjob command. + change_list: The CL obtained from updating the packages. + extra_change_lists: Extra change lists that would like to be run + alongside the change list of updating the packages. + options: Options to be passed into the tryjob command. + builder: The builder to be passed into the tryjob command. Returns: - The 'tryjob' command with the change list of updating the packages and - any extra information that was passed into the command line. + The 'tryjob' command with the change list of updating the packages and + any extra information that was passed into the command line. """ tryjob_cmd = ["cros", "tryjob", "--yes", "--json", "-g", "%d" % change_list] @@ -260,20 +252,20 @@ def RunTryJobs(cl_number, extra_change_lists, options, builders, chroot_path): """Runs a tryjob/tryjobs. Args: - cl_number: The CL created by updating the packages. - extra_change_lists: Any extra change lists that would run alongside the CL - that was created by updating the packages ('cl_number'). - options: Any options to be passed into the 'tryjob' command. - builders: All the builders to run the 'tryjob' with. - chroot_path: The absolute path to the chroot. + cl_number: The CL created by updating the packages. + extra_change_lists: Any extra change lists that would run alongside the + CL that was created by updating the packages ('cl_number'). + options: Any options to be passed into the 'tryjob' command. + builders: All the builders to run the 'tryjob' with. + chroot_path: The absolute path to the chroot. Returns: - A list that contains stdout contents of each tryjob, where stdout is - information (a hashmap) about the tryjob. The hashmap also contains stderr - if there was an error when running a tryjob. + A list that contains stdout contents of each tryjob, where stdout is + information (a hashmap) about the tryjob. The hashmap also contains + stderr if there was an error when running a tryjob. Raises: - ValueError: Failed to submit a tryjob. + ValueError: Failed to submit a tryjob. """ # Contains the results of each builder. @@ -313,20 +305,20 @@ def StartRecipeBuilders( """Launch recipe builders. Args: - cl_number: The CL created by updating the packages. - extra_change_lists: Any extra change lists that would run alongside the CL - that was created by updating the packages ('cl_number'). - options: Any options to be passed into the 'tryjob' command. - builders: All the builders to run the 'tryjob' with. - chroot_path: The absolute path to the chroot. + cl_number: The CL created by updating the packages. + extra_change_lists: Any extra change lists that would run alongside the + CL that was created by updating the packages ('cl_number'). + options: Any options to be passed into the 'tryjob' command. + builders: All the builders to run the 'tryjob' with. + chroot_path: The absolute path to the chroot. Returns: - A list that contains stdout contents of each builder, where stdout is - information (a hashmap) about the tryjob. The hashmap also contains stderr - if there was an error when running a tryjob. + A list that contains stdout contents of each builder, where stdout is + information (a hashmap) about the tryjob. The hashmap also contains + stderr if there was an error when running a tryjob. Raises: - ValueError: Failed to start a builder. + ValueError: Failed to start a builder. """ # Contains the results of each builder. @@ -415,21 +407,23 @@ def main(): """Updates the packages' LLVM hash and run tests. Raises: - AssertionError: The script was run inside the chroot. + AssertionError: The script was run inside the chroot. """ chroot.VerifyOutsideChroot() args_output = GetCommandLineArgs() + chroot.VerifyChromeOSRoot(args_output.chroot_path) + svn_option = args_output.llvm_version git_hash, svn_version = get_llvm_hash.GetLLVMHashAndVersionFromSVNOption( svn_option ) - # There is no need to run tryjobs when all the key parameters remain unchanged - # from last time. + # There is no need to run tryjobs when all the key parameters remain + # unchanged from last time. # If --last_tested is specified, check if the current run has the same # arguments last time --last_tested is used. @@ -455,7 +449,6 @@ def main(): llvm_variant = update_chromeos_llvm_hash.LLVMVariant.current if args_output.is_llvm_next: llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next - update_chromeos_llvm_hash.verbose = args_output.verbose extra_commit_msg = None if args_output.subparser_name == "cq": cq_depend_msg = GetCQDependString(args_output.extra_change_lists) @@ -471,7 +464,7 @@ def main(): llvm_variant=llvm_variant, git_hash=git_hash, svn_version=svn_version, - chroot_path=args_output.chroot_path, + chroot_path=Path(args_output.chroot_path), mode=failure_modes.FailureModes.DISABLE_PATCHES, git_hash_source=svn_option, extra_commit_msg=extra_commit_msg, @@ -517,7 +510,7 @@ def main(): # If --last_tested is specified, record the arguments used if args_output.last_tested: - with open(args_output.last_tested, "w") as f: + with open(args_output.last_tested, "w", encoding="utf-8") as f: json.dump(arg_dict, f, indent=2) diff --git a/llvm_tools/update_packages_and_run_tests_unittest.py b/llvm_tools/update_packages_and_run_tests_unittest.py index fc65749f..e62ed04f 100755 --- a/llvm_tools/update_packages_and_run_tests_unittest.py +++ b/llvm_tools/update_packages_and_run_tests_unittest.py @@ -263,12 +263,14 @@ class UpdatePackagesAndRunTryjobsTest(unittest.TestCase): @mock.patch.object(update_chromeos_llvm_hash, "UpdatePackages") @mock.patch.object(update_packages_and_run_tests, "GetCommandLineArgs") @mock.patch.object(get_llvm_hash, "GetLLVMHashAndVersionFromSVNOption") + @mock.patch.object(chroot, "VerifyChromeOSRoot") @mock.patch.object(chroot, "VerifyOutsideChroot", return_value=True) @mock.patch.object(chroot, "GetChrootEbuildPaths") def testUpdatedLastTestedFileWithNewTestedRevision( self, mock_get_chroot_build_paths, mock_outside_chroot, + mock_chromeos_root, mock_get_hash_and_version, mock_get_commandline_args, mock_update_packages, @@ -336,6 +338,8 @@ class UpdatePackagesAndRunTryjobsTest(unittest.TestCase): mock_outside_chroot.assert_called_once() + mock_chromeos_root.assert_called_once() + mock_get_commandline_args.assert_called_once() mock_get_hash_and_version.assert_called_once() diff --git a/llvm_tools/upload_lexan_crashes_to_forcey.py b/llvm_tools/upload_lexan_crashes_to_forcey.py deleted file mode 100755 index 885a88f6..00000000 --- a/llvm_tools/upload_lexan_crashes_to_forcey.py +++ /dev/null @@ -1,285 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 The ChromiumOS Authors -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Fetches and submits the latest test-cases from Lexan's crash bucket.""" - -import argparse -import contextlib -import datetime -import json -import logging -import os -import shutil -import subprocess -import sys -import tempfile -from typing import Generator, Iterable, List - - -gsurl_base = "gs://chrome-clang-crash-reports/v1" - - -def gsutil_ls(loc: str) -> List[str]: - results = subprocess.run( - ["gsutil.py", "ls", loc], - stdout=subprocess.PIPE, - check=True, - encoding="utf-8", - ) - return [l.strip() for l in results.stdout.splitlines()] - - -def gsurl_ls_last_numbers(url: str) -> List[int]: - return sorted(int(x.rstrip("/").split("/")[-1]) for x in gsutil_ls(url)) - - -def get_available_year_numbers() -> List[int]: - return gsurl_ls_last_numbers(gsurl_base) - - -def get_available_month_numbers(year: int) -> List[int]: - return gsurl_ls_last_numbers(f"{gsurl_base}/{year}") - - -def get_available_day_numbers(year: int, month: int) -> List[int]: - return gsurl_ls_last_numbers(f"{gsurl_base}/{year}/{month:02d}") - - -def get_available_test_case_urls(year: int, month: int, day: int) -> List[str]: - return gsutil_ls(f"{gsurl_base}/{year}/{month:02d}/{day:02d}") - - -def test_cases_on_or_after( - date: datetime.datetime, -) -> Generator[str, None, None]: - """Yields all test-cases submitted on or after the given date.""" - for year in get_available_year_numbers(): - if year < date.year: - continue - - for month in get_available_month_numbers(year): - if year == date.year and month < date.month: - continue - - for day in get_available_day_numbers(year, month): - when = datetime.date(year, month, day) - if when < date: - continue - - yield when, get_available_test_case_urls(year, month, day) - - -def to_ymd(date: datetime.date) -> str: - return date.strftime("%Y-%m-%d") - - -def from_ymd(date_str: str) -> datetime.date: - return datetime.datetime.strptime(date_str, "%Y-%m-%d").date() - - -def persist_state( - seen_urls: Iterable[str], state_file: str, current_date: datetime.date -): - tmp_state_file = state_file + ".tmp" - with open(tmp_state_file, "w", encoding="utf-8") as f: - json.dump( - { - "already_seen": sorted(seen_urls), - "most_recent_date": to_ymd(current_date), - }, - f, - ) - os.rename(tmp_state_file, state_file) - - -@contextlib.contextmanager -def temp_dir() -> Generator[str, None, None]: - loc = tempfile.mkdtemp("lexan-autosubmit") - try: - yield loc - finally: - shutil.rmtree(loc) - - -def download_and_unpack_test_case(gs_url: str, tempdir: str) -> None: - suffix = os.path.splitext(gs_url)[1] - target_name = "test_case" + suffix - target = os.path.join(tempdir, target_name) - subprocess.run(["gsutil.py", "cp", gs_url, target], check=True) - subprocess.run(["tar", "xaf", target_name], check=True, cwd=tempdir) - os.unlink(target) - - -def submit_test_case(gs_url: str, cr_tool: str) -> None: - logging.info("Submitting %s", gs_url) - with temp_dir() as tempdir: - download_and_unpack_test_case(gs_url, tempdir) - - # Sometimes (e.g., in - # gs://chrome-clang-crash-reports/v1/2020/03/27/ - # chromium.clang-ToTiOS-12754-GTXToolKit-2bfcde.tgz) - # we'll get `.crash` files. Unclear why, but let's filter them out anyway. - repro_files = [ - os.path.join(tempdir, x) - for x in os.listdir(tempdir) - if not x.endswith(".crash") - ] - assert len(repro_files) == 2, repro_files - if repro_files[0].endswith(".sh"): - sh_file, src_file = repro_files - assert not src_file.endswith(".sh"), repro_files - else: - src_file, sh_file = repro_files - assert sh_file.endswith(".sh"), repro_files - - # Peephole: lexan got a crash upload with a way old clang. Ignore it. - with open(sh_file, encoding="utf-8") as f: - if "Crash reproducer for clang version 9.0.0" in f.read(): - logging.warning( - "Skipping upload for %s; seems to be with an old clang", - gs_url, - ) - return - - subprocess.run( - [ - cr_tool, - "reduce", - "-stream=false", - "-wait=false", - "-note", - gs_url, - "-sh_file", - os.path.join(tempdir, sh_file), - "-src_file", - os.path.join(tempdir, src_file), - ], - check=True, - ) - - -def submit_new_test_cases( - last_seen_test_cases: Iterable[str], - earliest_date_to_check: datetime.date, - forcey: str, - state_file_path: str, -) -> None: - """Submits new test-cases to forcey. - - This will persist state after each test-case is submitted. - - Args: - last_seen_test_cases: test-cases which have been submitted already, and - should be skipped if seen again. - earliest_date_to_check: the earliest date we should consider test-cases - from. - forcey: path to the forcey binary. - state_file_path: path to our state file. - """ - # `all_test_cases_seen` is the union of all test-cases seen on this and prior - # invocations. It guarantees, in all cases we care about, that we won't - # submit the same test-case twice. `test_cases_seen_this_invocation` is - # persisted as "all of the test-cases we've seen on this and prior - # invocations" if we successfully submit _all_ test-cases. - # - # Since you can visualize the test-cases this script considers as a sliding - # window that only moves forward, if we saw a test-case on a prior iteration - # but no longer see it, we'll never see it again (since it fell out of our - # sliding window by being too old). Hence, keeping it around is - # pointless. - # - # We only persist this minimized set of test-cases if _everything_ succeeds, - # since if something fails below, there's a chance that we haven't revisited - # test-cases that we've already seen. - all_test_cases_seen = set(last_seen_test_cases) - test_cases_seen_this_invocation = [] - most_recent_date = earliest_date_to_check - for date, candidates in test_cases_on_or_after(earliest_date_to_check): - most_recent_date = max(most_recent_date, date) - - for url in candidates: - test_cases_seen_this_invocation.append(url) - if url in all_test_cases_seen: - continue - - all_test_cases_seen.add(url) - submit_test_case(url, forcey) - - # Persisting on each iteration of this loop isn't free, but it's the - # easiest way to not resubmit test-cases, and it's good to keep in mind - # that: - # - the state file will be small (<12KB, since it only keeps a few days - # worth of test-cases after the first run) - # - in addition to this, we're downloading+unzipping+reuploading multiple - # MB of test-case bytes. - # - # So comparatively, the overhead here probably isn't an issue. - persist_state( - all_test_cases_seen, state_file_path, most_recent_date - ) - - persist_state( - test_cases_seen_this_invocation, state_file_path, most_recent_date - ) - - -def main(argv: List[str]): - logging.basicConfig( - format=">> %(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: " - "%(message)s", - level=logging.INFO, - ) - - my_dir = os.path.dirname(os.path.abspath(__file__)) - - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--state_file", default=os.path.join(my_dir, "lexan-state.json") - ) - parser.add_argument( - "--last_date", - help="The earliest date that we care about. All test cases from here " - "on will be picked up. Format is YYYY-MM-DD.", - ) - parser.add_argument( - "--4c", dest="forcey", required=True, help="Path to a 4c client binary" - ) - opts = parser.parse_args(argv) - - forcey = opts.forcey - state_file = opts.state_file - last_date_str = opts.last_date - - os.makedirs(os.path.dirname(state_file), 0o755, exist_ok=True) - - if last_date_str is None: - with open(state_file, encoding="utf-8") as f: - data = json.load(f) - most_recent_date = from_ymd(data["most_recent_date"]) - submit_new_test_cases( - last_seen_test_cases=data["already_seen"], - # Note that we always subtract one day from this to avoid a race: - # uploads may appear slightly out-of-order (or builders may lag, or - # ...), so the last test-case uploaded for 2020/01/01 might appear - # _after_ the first test-case for 2020/01/02. Assuming that builders - # won't lag behind for over a day, the easiest way to handle this is to - # always check the previous and current days. - earliest_date_to_check=most_recent_date - - datetime.timedelta(days=1), - forcey=forcey, - state_file_path=state_file, - ) - else: - submit_new_test_cases( - last_seen_test_cases=(), - earliest_date_to_check=from_ymd(last_date_str), - forcey=forcey, - state_file_path=state_file, - ) - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) diff --git a/llvm_tools/upload_lexan_crashes_to_forcey_test.py b/llvm_tools/upload_lexan_crashes_to_forcey_test.py deleted file mode 100755 index 7238281a..00000000 --- a/llvm_tools/upload_lexan_crashes_to_forcey_test.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 The ChromiumOS Authors -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Tests for upload_lexan_crashes_to_forcey.""" - -import datetime -import os -import unittest -import unittest.mock - -import upload_lexan_crashes_to_forcey - - -class Test(unittest.TestCase): - """Tests for upload_lexan_crashes_to_forcey.""" - - def test_date_parsing_functions(self): - self.assertEqual( - datetime.date(2020, 2, 1), - upload_lexan_crashes_to_forcey.from_ymd("2020-02-01"), - ) - - @unittest.mock.patch( - "upload_lexan_crashes_to_forcey.test_cases_on_or_after", - return_value=( - ( - datetime.date(2020, 1, 1), - ("gs://test-case-1", "gs://test-case-1.1"), - ), - (datetime.date(2020, 1, 2), ("gs://test-case-2",)), - (datetime.date(2020, 1, 1), ("gs://test-case-3",)), - (datetime.date(2020, 1, 4), ("gs://test-case-4",)), - ), - ) - @unittest.mock.patch("upload_lexan_crashes_to_forcey.submit_test_case") - @unittest.mock.patch("upload_lexan_crashes_to_forcey.persist_state") - def test_new_test_case_submission_functions( - self, - persist_state_mock, - submit_test_case_mock, - test_cases_on_or_after_mock, - ): - forcey_path = "/path/to/4c" - real_state_file_path = "/path/to/state/file" - earliest_date = datetime.date(2020, 1, 1) - - persist_state_calls = [] - - # Since the set this gets is mutated, we need to copy it somehow. - def persist_state_side_effect( - test_cases_to_persist, state_file_path, most_recent_date - ): - self.assertEqual(state_file_path, real_state_file_path) - persist_state_calls.append( - (sorted(test_cases_to_persist), most_recent_date) - ) - - persist_state_mock.side_effect = persist_state_side_effect - - upload_lexan_crashes_to_forcey.submit_new_test_cases( - last_seen_test_cases=( - "gs://test-case-0", - "gs://test-case-1", - ), - earliest_date_to_check=earliest_date, - forcey=forcey_path, - state_file_path=real_state_file_path, - ) - - test_cases_on_or_after_mock.assert_called_once_with(earliest_date) - self.assertEqual( - submit_test_case_mock.call_args_list, - [ - unittest.mock.call("gs://test-case-1.1", forcey_path), - unittest.mock.call("gs://test-case-2", forcey_path), - unittest.mock.call("gs://test-case-3", forcey_path), - unittest.mock.call("gs://test-case-4", forcey_path), - ], - ) - - self.assertEqual( - persist_state_calls, - [ - ( - [ - "gs://test-case-0", - "gs://test-case-1", - "gs://test-case-1.1", - ], - datetime.date(2020, 1, 1), - ), - ( - [ - "gs://test-case-0", - "gs://test-case-1", - "gs://test-case-1.1", - "gs://test-case-2", - ], - datetime.date(2020, 1, 2), - ), - ( - [ - "gs://test-case-0", - "gs://test-case-1", - "gs://test-case-1.1", - "gs://test-case-2", - "gs://test-case-3", - ], - datetime.date(2020, 1, 2), - ), - ( - [ - "gs://test-case-0", - "gs://test-case-1", - "gs://test-case-1.1", - "gs://test-case-2", - "gs://test-case-3", - "gs://test-case-4", - ], - datetime.date(2020, 1, 4), - ), - ( - [ - "gs://test-case-1", - "gs://test-case-1.1", - "gs://test-case-2", - "gs://test-case-3", - "gs://test-case-4", - ], - datetime.date(2020, 1, 4), - ), - ], - ) - - @unittest.mock.patch( - "upload_lexan_crashes_to_forcey.download_and_unpack_test_case" - ) - @unittest.mock.patch("subprocess.run") - def test_test_case_submission_functions( - self, subprocess_run_mock, download_and_unpack_mock - ): - mock_gs_url = "gs://foo/bar/baz" - - def side_effect(gs_url: str, tempdir: str) -> None: - self.assertEqual(gs_url, mock_gs_url) - - with open(os.path.join(tempdir, "test_case.c"), "w") as f: - # All we need is an empty file here. - pass - - with open( - os.path.join(tempdir, "test_case.sh"), "w", encoding="utf-8" - ) as f: - f.write("# Crash reproducer for clang version 9.0.0 (...)\n") - f.write("clang something or other\n") - - download_and_unpack_mock.side_effect = side_effect - upload_lexan_crashes_to_forcey.submit_test_case(mock_gs_url, "4c") - subprocess_run_mock.assert_not_called() - - -if __name__ == "__main__": - unittest.main() |