aboutsummaryrefslogtreecommitdiff
path: root/llvm_tools
diff options
context:
space:
mode:
Diffstat (limited to 'llvm_tools')
-rw-r--r--llvm_tools/README.md55
-rwxr-xr-xllvm_tools/auto_llvm_bisection.py100
-rwxr-xr-xllvm_tools/auto_llvm_bisection_unittest.py304
-rwxr-xr-xllvm_tools/bisect_clang_crashes.py2
-rwxr-xr-xllvm_tools/bisect_clang_crashes_unittest.py1
-rwxr-xr-xllvm_tools/cherrypick_cl.py250
-rwxr-xr-xllvm_tools/fetch_cros_sdk_rolls.py2
-rwxr-xr-xllvm_tools/get_llvm_hash.py101
-rwxr-xr-xllvm_tools/get_llvm_hash_unittest.py44
-rwxr-xr-xllvm_tools/get_upstream_patch.py465
-rwxr-xr-xllvm_tools/git.py24
-rwxr-xr-xllvm_tools/git_llvm_rev.py5
-rwxr-xr-xllvm_tools/git_llvm_rev_test.py6
-rwxr-xr-xllvm_tools/llvm_bisection.py151
-rwxr-xr-xllvm_tools/modify_a_tryjob.py75
-rwxr-xr-xllvm_tools/nightly_revert_checker.py280
-rwxr-xr-xllvm_tools/nightly_revert_checker_test.py39
-rwxr-xr-xllvm_tools/patch_manager.py9
-rwxr-xr-xllvm_tools/patch_manager_unittest.py112
-rw-r--r--llvm_tools/patch_sync/.gitignore1
-rw-r--r--llvm_tools/patch_sync/Cargo.lock460
-rw-r--r--llvm_tools/patch_sync/Cargo.toml21
-rw-r--r--llvm_tools/patch_sync/src/android_utils.rs62
-rw-r--r--llvm_tools/patch_sync/src/main.rs332
-rw-r--r--llvm_tools/patch_sync/src/patch_parsing.rs462
-rw-r--r--llvm_tools/patch_sync/src/version_control.rs400
-rwxr-xr-xllvm_tools/revert_checker.py97
-rwxr-xr-xllvm_tools/revert_checker_test.py110
-rwxr-xr-xllvm_tools/update_all_tryjobs_with_auto.py80
-rwxr-xr-xllvm_tools/update_chromeos_llvm_hash.py186
-rwxr-xr-xllvm_tools/update_chromeos_llvm_hash_unittest.py225
-rwxr-xr-xllvm_tools/update_packages_and_run_tests.py113
-rwxr-xr-xllvm_tools/update_packages_and_run_tests_unittest.py12
-rwxr-xr-xllvm_tools/update_tryjob_status.py79
-rwxr-xr-xllvm_tools/update_tryjob_status_unittest.py214
-rwxr-xr-xllvm_tools/upload_lexan_crashes_to_forcey.py31
-rwxr-xr-xllvm_tools/upload_lexan_crashes_to_forcey_test.py24
37 files changed, 3528 insertions, 1406 deletions
diff --git a/llvm_tools/README.md b/llvm_tools/README.md
index 783ec22d..74fad6c9 100644
--- a/llvm_tools/README.md
+++ b/llvm_tools/README.md
@@ -119,6 +119,7 @@ For example, to create a roll CL to the git hash of revision 367622:
$ ./update_chromeos_llvm_hash.py \
--update_packages sys-devel/llvm sys-libs/compiler-rt \
sys-libs/libcxx sys-libs/libcxxabi sys-libs/llvm-libunwind \
+ 'dev-util/lldb-server' \
--llvm_version 367622 \
--failure_mode disable_patches
```
@@ -360,30 +361,6 @@ $ ./update_tryjob_status.py \
--custom_script /abs/path/to/script.py
```
-### `update_all_tryjobs_with_auto.py`
-
-#### Usage
-
-This script updates all tryjobs that are 'pending' to the result provided by
-`cros buildresult`.
-
-For example:
-
-```
-$ ./update_all_tryjobs_with_auto.py \
- --last_tested /abs/path/to/last_tested_file.json \
- --chroot_path /abs/path/to/chroot
-```
-
-The above example will update all tryjobs whose 'status' is 'pending' in the
-file provided by `--last_tested`.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./update_all_tryjobs_with_auto.py --help
-```
-
### `modify_a_tryjob.py`
#### Usage
@@ -496,18 +473,20 @@ r387778
**Tip**: if you put a symlink called `git-llvm-rev` to this script somewhere on
your `$PATH`, you can also use it as `git llvm-rev`.
-### `cherrypick_cl.py`
+### `get_upstream_patch.py`
#### Usage
-This script updates the proper ChromeOS packages with an LLVM cherrypick of your choosing, and
-copies the cherrypick into patch folders of the packages.
+This script updates the proper ChromeOS packages with LLVM patches of your choosing, and
+copies the patches into patch folders of the packages. This tool supports both git hash
+of commits as well as differential reviews.
Usage:
```
-./cherrypick_cl.py --chroot_path /abs/path/to/chroot --start_sha llvm
+./get_upstream_patch.py --chroot_path /abs/path/to/chroot --start_sha llvm
--sha 174c3eb69f19ff2d6a3eeae31d04afe77e62c021 --sha 174c3eb69f19ff2d6a3eeae31d04afe77e62c021
+--differential D123456
```
It tries to autodetect a lot of things (e.g., packages changed by each sha,
@@ -517,6 +496,10 @@ more information, please see the `--help`
### `revert_checker.py`
+**This script is copied from upstream LLVM. Please prefer to make upstream edits,
+rather than modifying this script. It's kept in a CrOS repo so we don't need an
+LLVM tree to `import` this from scripts here.**
+
This script reports reverts which happen 'across' a certain LLVM commit.
To clarify the meaning of 'across' with an example, if we had the following
@@ -542,11 +525,23 @@ parents of 123abc.
This is an automated wrapper around `revert_checker.py`. It checks to see if any
new reverts happened across toolchains that we're trying to ship since it was
-last run. If so, it sends emails to appropriate groups.
+last run. If so, it either automatically cherry-picks the reverts, or sends
+emails to appropriate groups.
-Usage example:
+Usage example for cherry-picking:
+```
+PYTHONPATH=../ ./nightly_revert_checker.py \
+ cherry-pick
+ --state_file state.json \
+ --llvm_dir llvm-project-copy \
+ --chromeos_dir ../../../../
+ --reviewers=chromium-os-mage@google.com
+```
+
+Usage example for email:
```
PYTHONPATH=../ ./nightly_revert_checker.py \
+ email
--state_file state.json \
--llvm_dir llvm-project-copy \
--chromeos_dir ../../../../
diff --git a/llvm_tools/auto_llvm_bisection.py b/llvm_tools/auto_llvm_bisection.py
index dd29cf4f..7e8fb1dd 100755
--- a/llvm_tools/auto_llvm_bisection.py
+++ b/llvm_tools/auto_llvm_bisection.py
@@ -8,6 +8,8 @@
from __future__ import print_function
+import enum
+import json
import os
import subprocess
import sys
@@ -17,7 +19,7 @@ import traceback
import chroot
from llvm_bisection import BisectionExitStatus
import llvm_bisection
-from update_all_tryjobs_with_auto import GetPathToUpdateAllTryjobsWithAutoScript
+import update_tryjob_status
# Used to re-try for 'llvm_bisection.py' to attempt to launch more tryjobs.
BISECTION_RETRY_TIME_SECS = 10 * 60
@@ -39,6 +41,52 @@ BISECTION_ATTEMPTS = 3
POLLING_LIMIT_SECS = 18 * 60 * 60
+class BuilderStatus(enum.Enum):
+ """Actual values given via 'cros buildresult'."""
+
+ PASS = 'pass'
+ FAIL = 'fail'
+ RUNNING = 'running'
+
+
+builder_status_mapping = {
+ BuilderStatus.PASS.value: update_tryjob_status.TryjobStatus.GOOD.value,
+ BuilderStatus.FAIL.value: update_tryjob_status.TryjobStatus.BAD.value,
+ BuilderStatus.RUNNING.value: update_tryjob_status.TryjobStatus.PENDING.value
+}
+
+
+def GetBuildResult(chroot_path, buildbucket_id):
+ """Returns the conversion of the result of 'cros buildresult'."""
+
+ # Calls 'cros buildresult' to get the status of the tryjob.
+ try:
+ tryjob_json = subprocess.check_output(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id',
+ str(buildbucket_id), '--report', 'json'
+ ],
+ cwd=chroot_path,
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
+ except subprocess.CalledProcessError as err:
+ if 'No build found. Perhaps not started' not in err.output:
+ raise
+ return None
+
+ tryjob_content = json.loads(tryjob_json)
+
+ build_result = str(tryjob_content['%d' % buildbucket_id]['status'])
+
+ # The string returned by 'cros buildresult' might not be in the mapping.
+ if build_result not in builder_status_mapping:
+ raise ValueError('"cros buildresult" return value is invalid: %s' %
+ build_result)
+
+ return builder_status_mapping[build_result]
+
+
def main():
"""Bisects LLVM using the result of `cros buildresult` of each tryjob.
@@ -50,51 +98,58 @@ def main():
args_output = llvm_bisection.GetCommandLineArgs()
- exec_update_tryjobs = [
- GetPathToUpdateAllTryjobsWithAutoScript(), '--chroot_path',
- args_output.chroot_path, '--last_tested', args_output.last_tested
- ]
-
if os.path.isfile(args_output.last_tested):
print('Resuming bisection for %s' % args_output.last_tested)
else:
print('Starting a new bisection for %s' % args_output.last_tested)
while True:
+ # Update the status of existing tryjobs
if os.path.isfile(args_output.last_tested):
update_start_time = time.time()
-
- # Update all tryjobs whose status is 'pending' to the result of `cros
- # buildresult`.
+ with open(args_output.last_tested) as json_file:
+ json_dict = json.load(json_file)
while True:
print('\nAttempting to update all tryjobs whose "status" is '
'"pending":')
print('-' * 40)
- update_ret = subprocess.call(exec_update_tryjobs)
+ completed = True
+ for tryjob in json_dict['jobs']:
+ if tryjob[
+ 'status'] == update_tryjob_status.TryjobStatus.PENDING.value:
+ status = GetBuildResult(args_output.chroot_path,
+ tryjob['buildbucket_id'])
+ if status:
+ tryjob['status'] = status
+ else:
+ completed = False
print('-' * 40)
- # Successfully updated all tryjobs whose 'status' was 'pending'/ no
- # updates were needed (all tryjobs already have been updated).
- if update_ret == 0:
+ # Proceed to the next step if all the existing tryjobs have completed.
+ if completed:
break
delta_time = time.time() - update_start_time
if delta_time > POLLING_LIMIT_SECS:
- print('Unable to update tryjobs whose status is "pending" to '
- 'the result of `cros buildresult`.')
-
# Something is wrong with updating the tryjobs's 'status' via
# `cros buildresult` (e.g. network issue, etc.).
- sys.exit(1)
+ sys.exit('Failed to update pending tryjobs.')
+ print('-' * 40)
print('Sleeping for %d minutes.' % (POLL_RETRY_TIME_SECS // 60))
time.sleep(POLL_RETRY_TIME_SECS)
- # Launch more tryjobs if possible to narrow down the bad commit/revision or
- # terminate the bisection because the bad commit/revision was found.
+ # There should always be update from the tryjobs launched in the
+ # last iteration.
+ temp_filename = '%s.new' % args_output.last_tested
+ with open(temp_filename, 'w') as temp_file:
+ json.dump(json_dict, temp_file, indent=4, separators=(',', ': '))
+ os.rename(temp_filename, args_output.last_tested)
+
+ # Launch more tryjobs.
for cur_try in range(1, BISECTION_ATTEMPTS + 1):
try:
print('\nAttempting to launch more tryjobs if possible:')
@@ -104,8 +159,7 @@ def main():
print('-' * 40)
- # Exit code 126 means that there are no more revisions to test between
- # 'start' and 'end', so bisection is complete.
+ # Stop if the bisection has completed.
if bisection_ret == BisectionExitStatus.BISECTION_COMPLETE.value:
sys.exit(0)
@@ -118,9 +172,7 @@ def main():
# Exceeded the number of times to launch more tryjobs.
if cur_try == BISECTION_ATTEMPTS:
- print('Unable to continue bisection.')
-
- sys.exit(1)
+ sys.exit('Unable to continue bisection.')
num_retries_left = BISECTION_ATTEMPTS - cur_try
diff --git a/llvm_tools/auto_llvm_bisection_unittest.py b/llvm_tools/auto_llvm_bisection_unittest.py
index 56b556e6..07c0e715 100755
--- a/llvm_tools/auto_llvm_bisection_unittest.py
+++ b/llvm_tools/auto_llvm_bisection_unittest.py
@@ -8,6 +8,7 @@
from __future__ import print_function
+import json
import os
import subprocess
import time
@@ -19,187 +20,128 @@ import auto_llvm_bisection
import chroot
import llvm_bisection
import test_helpers
+import update_tryjob_status
class AutoLLVMBisectionTest(unittest.TestCase):
"""Unittests for auto bisection of LLVM."""
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
@mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
- @mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when an exception happened in
- # `llvm_bisection.main()`.
- @mock.patch.object(traceback, 'print_exc')
- # Simulate behavior of `llvm_bisection.main()` when failed to launch tryjobs
- # (exception happened along the way, etc.).
- @mock.patch.object(llvm_bisection, 'main')
- # Simulate behavior of `os.path.isfile()` when starting a new bisection.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
@mock.patch.object(
llvm_bisection,
'GetCommandLineArgs',
return_value=test_helpers.ArgsOutputTest())
- def testFailedToStartBisection(
- self, mock_get_args, mock_get_auto_script, mock_is_file,
- mock_llvm_bisection, mock_traceback, mock_sleep, mock_outside_chroot):
-
- def MockLLVMBisectionRaisesException(_args_output):
- raise ValueError('Failed to launch more tryjobs.')
-
- # Use the test function to simulate the behavior of an exception happening
- # when launching more tryjobs.
- mock_llvm_bisection.side_effect = MockLLVMBisectionRaisesException
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(traceback, 'print_exc')
+ @mock.patch.object(llvm_bisection, 'main')
+ @mock.patch.object(os.path, 'isfile')
+ @mock.patch.object(auto_llvm_bisection, 'open')
+ @mock.patch.object(json, 'load')
+ @mock.patch.object(auto_llvm_bisection, 'GetBuildResult')
+ @mock.patch.object(os, 'rename')
+ def testAutoLLVMBisectionPassed(
+ self,
+ # pylint: disable=unused-argument
+ mock_rename,
+ mock_get_build_result,
+ mock_json_load,
+ # pylint: disable=unused-argument
+ mock_open,
+ mock_isfile,
+ mock_llvm_bisection,
+ mock_traceback,
+ mock_sleep,
+ mock_get_args,
+ mock_outside_chroot):
+
+ mock_isfile.side_effect = [False, False, True, True]
+ mock_llvm_bisection.side_effect = [
+ 0,
+ ValueError('Failed to launch more tryjobs.'),
+ llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value
+ ]
+ mock_json_load.return_value = {
+ 'start':
+ 369410,
+ 'end':
+ 369420,
+ 'jobs': [{
+ 'buildbucket_id': 12345,
+ 'rev': 369411,
+ 'status': update_tryjob_status.TryjobStatus.PENDING.value,
+ }]
+ }
+ mock_get_build_result.return_value = (
+ update_tryjob_status.TryjobStatus.GOOD.value)
- # Verify the exception is raised when the number of attempts to launched
- # more tryjobs is exceeded, so unable to continue
- # bisection.
+ # Verify the excpetion is raised when successfully found the bad revision.
+ # Uses `sys.exit(0)` to indicate success.
with self.assertRaises(SystemExit) as err:
auto_llvm_bisection.main()
- self.assertEqual(err.exception.code, 1)
+ self.assertEqual(err.exception.code, 0)
mock_outside_chroot.assert_called_once()
mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 2)
+ self.assertEqual(mock_isfile.call_count, 3)
self.assertEqual(mock_llvm_bisection.call_count, 3)
- self.assertEqual(mock_traceback.call_count, 3)
- self.assertEqual(mock_sleep.call_count, 2)
+ mock_traceback.assert_called_once()
+ mock_sleep.assert_called_once()
- # Simulate the behavior of `subprocess.call()` when successfully updated all
- # tryjobs whose 'status' value is 'pending'.
- @mock.patch.object(subprocess, 'call', return_value=0)
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
@mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
@mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when an exception happened in
- # `llvm_bisection.main()`.
@mock.patch.object(traceback, 'print_exc')
- # Simulate behavior of `llvm_bisection.main()` when failed to launch tryjobs
- # (exception happened along the way, etc.).
@mock.patch.object(llvm_bisection, 'main')
- # Simulate behavior of `os.path.isfile()` when starting a new bisection.
@mock.patch.object(os.path, 'isfile')
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
@mock.patch.object(
llvm_bisection,
'GetCommandLineArgs',
return_value=test_helpers.ArgsOutputTest())
- def testSuccessfullyBisectedLLVMRevision(
- self, mock_get_args, mock_get_auto_script, mock_is_file,
- mock_llvm_bisection, mock_traceback, mock_sleep, mock_outside_chroot,
- mock_update_tryjobs):
-
- # Simulate the behavior of `os.path.isfile()` when checking whether the
- # status file provided exists.
- @test_helpers.CallCountsToMockFunctions
- def MockStatusFileCheck(call_count, _last_tested):
- # Simulate that the status file does not exist, so the LLVM bisection
- # script would create the status file and launch tryjobs.
- if call_count < 2:
- return False
-
- # Simulate when the status file exists and `subprocess.call()` executes
- # the script that updates all the 'pending' tryjobs to the result of `cros
- # buildresult`.
- if call_count == 2:
- return True
-
- assert False, 'os.path.isfile() called more times than expected.'
-
- # Simulate behavior of `llvm_bisection.main()` when successfully bisected
- # between the good and bad LLVM revision.
- @test_helpers.CallCountsToMockFunctions
- def MockLLVMBisectionReturnValue(call_count, _args_output):
- # Simulate that successfully launched more tryjobs.
- if call_count == 0:
- return 0
-
- # Simulate that failed to launch more tryjobs.
- if call_count == 1:
- raise ValueError('Failed to launch more tryjobs.')
-
- # Simulate that the bad revision has been found.
- if call_count == 2:
- return llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value
+ def testFailedToStartBisection(self, mock_get_args, mock_isfile,
+ mock_llvm_bisection, mock_traceback,
+ mock_sleep, mock_outside_chroot):
- assert False, 'Called `llvm_bisection.main()` more than expected.'
+ mock_isfile.return_value = False
+ mock_llvm_bisection.side_effect = ValueError(
+ 'Failed to launch more tryjobs.')
- # Use the test function to simulate the behavior of `llvm_bisection.main()`.
- mock_llvm_bisection.side_effect = MockLLVMBisectionReturnValue
-
- # Use the test function to simulate the behavior of `os.path.isfile()`.
- mock_is_file.side_effect = MockStatusFileCheck
-
- # Verify the excpetion is raised when successfully found the bad revision.
- # Uses `sys.exit(0)` to indicate success.
+ # Verify the exception is raised when the number of attempts to launched
+ # more tryjobs is exceeded, so unable to continue
+ # bisection.
with self.assertRaises(SystemExit) as err:
auto_llvm_bisection.main()
- self.assertEqual(err.exception.code, 0)
+ self.assertEqual(err.exception.code, 'Unable to continue bisection.')
mock_outside_chroot.assert_called_once()
mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 3)
+ self.assertEqual(mock_isfile.call_count, 2)
self.assertEqual(mock_llvm_bisection.call_count, 3)
- mock_traceback.assert_called_once()
- mock_sleep.assert_called_once()
- mock_update_tryjobs.assert_called_once()
+ self.assertEqual(mock_traceback.call_count, 3)
+ self.assertEqual(mock_sleep.call_count, 2)
- # Simulate behavior of `subprocess.call()` when failed to update tryjobs to
- # `cros buildresult` (script failed).
- @mock.patch.object(subprocess, 'call', return_value=1)
- # Simulate behavior of `time.time()` when determining the time passed when
- # updating tryjobs whose 'status' is 'pending'.
- @mock.patch.object(time, 'time')
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
@mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
- @mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when resuming bisection.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
@mock.patch.object(
llvm_bisection,
'GetCommandLineArgs',
return_value=test_helpers.ArgsOutputTest())
+ @mock.patch.object(time, 'time')
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(os.path, 'isfile')
+ @mock.patch.object(auto_llvm_bisection, 'open')
+ @mock.patch.object(json, 'load')
+ @mock.patch.object(auto_llvm_bisection, 'GetBuildResult')
def testFailedToUpdatePendingTryJobs(
- self, mock_get_args, mock_get_auto_script, mock_is_file, mock_sleep,
- mock_outside_chroot, mock_time, mock_update_tryjobs):
+ self,
+ mock_get_build_result,
+ mock_json_load,
+ # pylint: disable=unused-argument
+ mock_open,
+ mock_isfile,
+ mock_sleep,
+ mock_time,
+ mock_get_args,
+ mock_outside_chroot):
# Simulate behavior of `time.time()` for time passed.
@test_helpers.CallCountsToMockFunctions
@@ -209,9 +151,20 @@ class AutoLLVMBisectionTest(unittest.TestCase):
assert False, 'Called `time.time()` more than expected.'
- # Use the test function to simulate the behavior of `time.time()`.
+ mock_isfile.return_value = True
+ mock_json_load.return_value = {
+ 'start':
+ 369410,
+ 'end':
+ 369420,
+ 'jobs': [{
+ 'buildbucket_id': 12345,
+ 'rev': 369411,
+ 'status': update_tryjob_status.TryjobStatus.PENDING.value,
+ }]
+ }
+ mock_get_build_result.return_value = None
mock_time.side_effect = MockTimePassed
-
# Reduce the polling limit for the test case to terminate faster.
auto_llvm_bisection.POLLING_LIMIT_SECS = 1
@@ -220,15 +173,80 @@ class AutoLLVMBisectionTest(unittest.TestCase):
with self.assertRaises(SystemExit) as err:
auto_llvm_bisection.main()
- self.assertEqual(err.exception.code, 1)
+ self.assertEqual(err.exception.code, 'Failed to update pending tryjobs.')
mock_outside_chroot.assert_called_once()
mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 2)
+ self.assertEqual(mock_isfile.call_count, 2)
mock_sleep.assert_called_once()
self.assertEqual(mock_time.call_count, 3)
- self.assertEqual(mock_update_tryjobs.call_count, 2)
+
+ @mock.patch.object(subprocess, 'check_output')
+ def testGetBuildResult(self, mock_chroot_command):
+ buildbucket_id = 192
+ status = auto_llvm_bisection.BuilderStatus.PASS.value
+ tryjob_contents = {buildbucket_id: {'status': status}}
+ mock_chroot_command.return_value = json.dumps(tryjob_contents)
+ chroot_path = '/some/path/to/chroot'
+
+ self.assertEqual(
+ auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id),
+ update_tryjob_status.TryjobStatus.GOOD.value)
+
+ mock_chroot_command.assert_called_once_with(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id',
+ str(buildbucket_id), '--report', 'json'
+ ],
+ cwd='/some/path/to/chroot',
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
+
+ @mock.patch.object(subprocess, 'check_output')
+ def testGetBuildResultPassedWithUnstartedTryjob(self, mock_chroot_command):
+ buildbucket_id = 192
+ chroot_path = '/some/path/to/chroot'
+ mock_chroot_command.side_effect = subprocess.CalledProcessError(
+ returncode=1, cmd=[], output='No build found. Perhaps not started')
+ auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id)
+ mock_chroot_command.assert_called_once_with(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id', '192',
+ '--report', 'json'
+ ],
+ cwd=chroot_path,
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
+
+ @mock.patch.object(subprocess, 'check_output')
+ def testGetBuildReusultFailedWithInvalidBuildStatus(self,
+ mock_chroot_command):
+ chroot_path = '/some/path/to/chroot'
+ buildbucket_id = 50
+ invalid_build_status = 'querying'
+ tryjob_contents = {buildbucket_id: {'status': invalid_build_status}}
+ mock_chroot_command.return_value = json.dumps(tryjob_contents)
+
+ # Verify the exception is raised when the return value of `cros buildresult`
+ # is not in the `builder_status_mapping`.
+ with self.assertRaises(ValueError) as err:
+ auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id)
+
+ self.assertEqual(
+ str(err.exception),
+ '"cros buildresult" return value is invalid: %s' % invalid_build_status)
+
+ mock_chroot_command.assert_called_once_with(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id',
+ str(buildbucket_id), '--report', 'json'
+ ],
+ cwd=chroot_path,
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
if __name__ == '__main__':
diff --git a/llvm_tools/bisect_clang_crashes.py b/llvm_tools/bisect_clang_crashes.py
index e8ee2ab6..c53db179 100755
--- a/llvm_tools/bisect_clang_crashes.py
+++ b/llvm_tools/bisect_clang_crashes.py
@@ -7,8 +7,6 @@
"""Fetches and submits the artifacts from Chrome OS toolchain's crash bucket.
"""
-# pylint: disable=cros-logging-import
-
import argparse
import glob
import json
diff --git a/llvm_tools/bisect_clang_crashes_unittest.py b/llvm_tools/bisect_clang_crashes_unittest.py
index c9143450..a3dc0c6d 100755
--- a/llvm_tools/bisect_clang_crashes_unittest.py
+++ b/llvm_tools/bisect_clang_crashes_unittest.py
@@ -6,7 +6,6 @@
"""Tests for bisect_clang_crashes."""
-# pylint: disable=cros-logging-import
import glob
import logging
import os.path
diff --git a/llvm_tools/cherrypick_cl.py b/llvm_tools/cherrypick_cl.py
deleted file mode 100755
index 9e306725..00000000
--- a/llvm_tools/cherrypick_cl.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable=cros-logging-import
-
-"""Adds a cherrypick to LLVM's PATCHES.json."""
-
-from __future__ import print_function
-
-import argparse
-import json
-import logging
-import os
-import shlex
-import subprocess
-import sys
-
-import chroot
-import get_llvm_hash
-import git
-import git_llvm_rev
-import update_chromeos_llvm_hash
-
-
-def add_cherrypick(patches_json_path: str, patches_dir: str,
- relative_patches_dir: str, start_version: git_llvm_rev.Rev,
- llvm_dir: str, rev: git_llvm_rev.Rev, sha: str,
- package: str):
- with open(patches_json_path, encoding='utf-8') as f:
- patches_json = json.load(f)
-
- file_name = sha + '.patch'
- rel_patch_path = os.path.join(relative_patches_dir, file_name)
-
- for p in patches_json:
- rel_path = p['rel_patch_path']
- if rel_path == rel_patch_path:
- raise ValueError('Patch at %r already exists in PATCHES.json' % rel_path)
- if sha in rel_path:
- logging.warning(
- 'Similarly-named patch already exists in PATCHES.json: %r', rel_path)
-
- with open(os.path.join(patches_dir, file_name), 'wb') as f:
- cmd = ['git', 'show', sha]
- # Only apply the part of the patch that belongs to this package, expect
- # LLVM. This is because some packages are built with LLVM ebuild on X86 but
- # not on the other architectures. e.g. compiler-rt. Therefore always apply
- # the entire patch to LLVM ebuild as a workaround.
- if package != 'llvm':
- cmd.append(package_to_project(package))
- subprocess.check_call(cmd, stdout=f, cwd=llvm_dir)
-
- commit_subject = subprocess.check_output(
- ['git', 'log', '-n1', '--format=%s', sha], cwd=llvm_dir, encoding='utf-8')
-
- patches_json.append({
- 'comment': commit_subject.strip(),
- 'rel_patch_path': rel_patch_path,
- 'start_version': start_version.number,
- 'end_version': rev.number,
- })
-
- temp_file = patches_json_path + '.tmp'
- with open(temp_file, 'w', encoding='utf-8') as f:
- json.dump(patches_json, f, indent=4, separators=(',', ': '))
- os.rename(temp_file, patches_json_path)
-
-
-def parse_ebuild_for_assignment(ebuild_path: str, var_name: str) -> str:
- # '_pre' filters the LLVM 9.0 ebuild, which we never want to target, from
- # this list.
- candidates = [
- x for x in os.listdir(ebuild_path)
- if x.endswith('.ebuild') and '_pre' in x
- ]
-
- if not candidates:
- raise ValueError('No ebuilds found under %r' % ebuild_path)
-
- ebuild = os.path.join(ebuild_path, max(candidates))
- with open(ebuild, encoding='utf-8') as f:
- var_name_eq = var_name + '='
- for orig_line in f:
- if not orig_line.startswith(var_name_eq):
- continue
-
- # We shouldn't see much variety here, so do the simplest thing possible.
- line = orig_line[len(var_name_eq):]
- # Remove comments
- line = line.split('#')[0]
- # Remove quotes
- line = shlex.split(line)
- if len(line) != 1:
- raise ValueError('Expected exactly one quoted value in %r' % orig_line)
- return line[0].strip()
-
- raise ValueError('No %s= line found in %r' % (var_name, ebuild))
-
-
-# Resolves a git ref (or similar) to a LLVM SHA.
-def resolve_llvm_ref(llvm_dir: str, sha: str) -> str:
- return subprocess.check_output(
- ['git', 'rev-parse', sha],
- encoding='utf-8',
- cwd=llvm_dir,
- ).strip()
-
-
-# Get the package name of an LLVM project
-def project_to_package(project: str) -> str:
- if project == 'libunwind':
- return 'llvm-libunwind'
- return project
-
-
-# Get the LLVM project name of a package
-def package_to_project(package: str) -> str:
- if package == 'llvm-libunwind':
- return 'libunwind'
- return package
-
-
-# Get the LLVM projects change in the specifed sha
-def get_package_names(sha: str, llvm_dir: str) -> list:
- paths = subprocess.check_output(
- ['git', 'show', '--name-only', '--format=', sha],
- cwd=llvm_dir,
- encoding='utf-8').splitlines()
- # Some LLVM projects are built by LLVM ebuild on X86, so always apply the
- # patch to LLVM ebuild
- packages = {'llvm'}
- # Detect if there are more packages to apply the patch to
- for path in paths:
- package = project_to_package(path.split('/')[0])
- if package in ('compiler-rt', 'libcxx', 'libcxxabi', 'llvm-libunwind'):
- packages.add(package)
- packages = list(sorted(packages))
- return packages
-
-
-def main():
- chroot.VerifyOutsideChroot()
- logging.basicConfig(
- format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
- level=logging.INFO,
- )
-
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument(
- '--chroot_path',
- default=os.path.join(os.path.expanduser('~'), 'chromiumos'),
- help='the path to the chroot (default: %(default)s)')
- parser.add_argument(
- '--start_sha',
- default='llvm-next',
- help='LLVM SHA that the patch should start applying at. You can specify '
- '"llvm" or "llvm-next", as well. Defaults to %(default)s.')
- parser.add_argument(
- '--sha',
- required=True,
- action='append',
- help='The LLVM git SHA to cherry-pick.')
- parser.add_argument(
- '--create_cl',
- default=False,
- action='store_true',
- help='Automatically create a CL if specified')
- args = parser.parse_args()
-
- llvm_symlink = chroot.ConvertChrootPathsToAbsolutePaths(
- args.chroot_path,
- chroot.GetChrootEbuildPaths(args.chroot_path, ['sys-devel/llvm']))[0]
- llvm_symlink_dir = os.path.dirname(llvm_symlink)
-
- git_status = subprocess.check_output(['git', 'status', '-s'],
- cwd=llvm_symlink_dir,
- encoding='utf-8')
- if git_status:
- raise ValueError('Uncommited changes detected in %s' %
- os.path.dirname(os.path.dirname(llvm_symlink_dir)))
-
- start_sha = args.start_sha
- if start_sha == 'llvm':
- start_sha = parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_HASH')
- elif start_sha == 'llvm-next':
- start_sha = parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_NEXT_HASH')
- logging.info('Base llvm hash == %s', start_sha)
-
- llvm_config = git_llvm_rev.LLVMConfig(
- remote='origin', dir=get_llvm_hash.GetAndUpdateLLVMProjectInLLVMTools())
-
- start_sha = resolve_llvm_ref(llvm_config.dir, start_sha)
- start_rev = git_llvm_rev.translate_sha_to_rev(llvm_config, start_sha)
-
- if args.create_cl:
- branch = 'cherry-pick'
- git.CreateBranch(llvm_symlink_dir, branch)
- symlinks_to_uprev = []
- commit_messages = [
- 'llvm: cherry-pick CLs from upstream\n',
- ]
-
- for sha in args.sha:
- sha = resolve_llvm_ref(llvm_config.dir, sha)
- rev = git_llvm_rev.translate_sha_to_rev(llvm_config, sha)
- # Find out the llvm projects changed in this commit
- packages = get_package_names(sha, llvm_config.dir)
- # Find out the ebuild symlinks of the corresponding ChromeOS packages
- symlinks = chroot.GetChrootEbuildPaths(args.chroot_path, [
- 'sys-devel/llvm' if package == 'llvm' else 'sys-libs/' + package
- for package in packages
- ])
- symlinks = chroot.ConvertChrootPathsToAbsolutePaths(args.chroot_path,
- symlinks)
-
- # Create a patch and add its metadata for each package
- for package, symlink in zip(packages, symlinks):
- symlink_dir = os.path.dirname(symlink)
- patches_json_path = os.path.join(symlink_dir, 'files/PATCHES.json')
- relative_patches_dir = 'cherry' if package == 'llvm' else ''
- patches_dir = os.path.join(symlink_dir, 'files', relative_patches_dir)
- logging.info('Cherrypicking %s (%s) into %s', rev, sha, package)
-
- add_cherrypick(patches_json_path, patches_dir, relative_patches_dir,
- start_rev, llvm_config.dir, rev, sha, package)
- if args.create_cl:
- symlinks_to_uprev.extend(symlinks)
- commit_messages.extend([
- '\n\nreviews.llvm.org/rG%s\n' % sha,
- subprocess.check_output(['git', 'log', '-n1', '--oneline', sha],
- cwd=llvm_config.dir,
- encoding='utf-8')
- ])
-
- logging.info('Complete.')
-
- if args.create_cl:
- symlinks_to_uprev = list(sorted(set(symlinks_to_uprev)))
- for symlink in symlinks_to_uprev:
- update_chromeos_llvm_hash.UprevEbuildSymlink(symlink)
- subprocess.check_output(['git', 'add', '--all'], cwd=symlink_dir)
- git.UploadChanges(llvm_symlink_dir, branch, commit_messages)
- git.DeleteBranch(llvm_symlink_dir, branch)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/llvm_tools/fetch_cros_sdk_rolls.py b/llvm_tools/fetch_cros_sdk_rolls.py
index 42af678a..83d7025a 100755
--- a/llvm_tools/fetch_cros_sdk_rolls.py
+++ b/llvm_tools/fetch_cros_sdk_rolls.py
@@ -10,8 +10,6 @@ gs://chromiumos-sdk/. The hope is that it'll help answer the question "when did
the toolchain ebuild ${x} go live?"
"""
-# pylint: disable=cros-logging-import
-
import argparse
import json
import logging
diff --git a/llvm_tools/get_llvm_hash.py b/llvm_tools/get_llvm_hash.py
index 329e8292..83b5ae76 100755
--- a/llvm_tools/get_llvm_hash.py
+++ b/llvm_tools/get_llvm_hash.py
@@ -9,16 +9,18 @@
from __future__ import print_function
import argparse
+import contextlib
+import functools
import os
+import re
import shutil
import subprocess
import sys
import tempfile
-from contextlib import contextmanager
import git_llvm_rev
-from subprocess_helpers import CheckCommand
from subprocess_helpers import check_output
+from subprocess_helpers import CheckCommand
_LLVM_GIT_URL = ('https://chromium.googlesource.com/external/github.com/llvm'
'/llvm-project')
@@ -63,7 +65,66 @@ def GetGitHashFrom(src_dir, version):
git_llvm_rev.Rev(branch=git_llvm_rev.MAIN_BRANCH, number=version))
-@contextmanager
+def CheckoutBranch(src_dir, branch):
+ """Checks out and pulls from a branch in a git repo.
+
+ Args:
+ src_dir: The LLVM source tree.
+ branch: The git branch to checkout in src_dir.
+
+ Raises:
+ ValueError: Failed to checkout or pull branch version
+ """
+ CheckCommand(['git', '-C', src_dir, 'checkout', branch])
+ CheckCommand(['git', '-C', src_dir, 'pull'])
+
+
+def ParseLLVMMajorVersion(cmakelist):
+ """Reads CMakeList.txt file contents for LLVMMajor Version.
+
+ Args:
+ cmakelist: contents of CMakeList.txt
+
+ Returns:
+ The major version number as a string
+
+ Raises:
+ ValueError: The major version cannot be parsed from cmakelist
+ """
+ match = re.search(r'\n\s+set\(LLVM_VERSION_MAJOR (?P<major>\d+)\)', cmakelist)
+ if not match:
+ raise ValueError('Failed to parse CMakeList for llvm major version')
+ return match.group('major')
+
+
+@functools.lru_cache(maxsize=1)
+def GetLLVMMajorVersion(git_hash=None):
+ """Reads llvm/CMakeList.txt file contents for LLVMMajor Version.
+
+ Args:
+ git_hash: git hash of llvm version as string or None for top of trunk
+
+ Returns:
+ The major version number as a string
+
+ Raises:
+ ValueError: The major version cannot be parsed from cmakelist or
+ there was a failure to checkout git_hash version
+ FileExistsError: The src directory doe not contain CMakeList.txt
+ """
+ src_dir = GetAndUpdateLLVMProjectInLLVMTools()
+ cmakelists_path = os.path.join(src_dir, 'llvm', 'CMakeLists.txt')
+ if git_hash:
+ CheckCommand(['git', '-C', src_dir, 'checkout', git_hash])
+ try:
+ with open(cmakelists_path) as cmakelists_file:
+ return ParseLLVMMajorVersion(cmakelists_file.read())
+ finally:
+ if git_hash:
+ CheckoutBranch(src_dir, git_llvm_rev.MAIN_BRANCH)
+
+
+@contextlib.contextmanager
def CreateTempLLVMRepo(temp_dir):
"""Adds a LLVM worktree to 'temp_dir'.
@@ -77,7 +138,7 @@ def CreateTempLLVMRepo(temp_dir):
temp_dir: An absolute path to the temporary directory to put the worktree in
(obtained via 'tempfile.mkdtemp()').
- Returns:
+ Yields:
The absolute path to 'temp_dir'.
Raises:
@@ -88,7 +149,8 @@ def CreateTempLLVMRepo(temp_dir):
abs_path_to_llvm_project_dir = GetAndUpdateLLVMProjectInLLVMTools()
CheckCommand([
'git', '-C', abs_path_to_llvm_project_dir, 'worktree', 'add', '--detach',
- temp_dir, git_llvm_rev.MAIN_BRANCH
+ temp_dir,
+ 'origin/%s' % git_llvm_rev.MAIN_BRANCH
])
try:
@@ -113,6 +175,9 @@ def GetAndUpdateLLVMProjectInLLVMTools():
LLVM mirror. In either case, this function will return the absolute path to
'llvm-project-copy' directory.
+ Returns:
+ Absolute path to 'llvm-project-copy' directory in 'llvm_tools'
+
Raises:
ValueError: LLVM repo (in 'llvm-project-copy' dir.) has changes or failed to
checkout to main or failed to fetch from chromium mirror of LLVM.
@@ -125,8 +190,9 @@ def GetAndUpdateLLVMProjectInLLVMTools():
if not os.path.isdir(abs_path_to_llvm_project_dir):
print(
- 'Checking out LLVM from scratch. This could take a while...\n'
- '(This should only need to be done once, though.)',
+ (f'Checking out LLVM to {abs_path_to_llvm_project_dir}\n'
+ 'so that we can map between commit hashes and revision numbers.\n'
+ 'This may take a while, but only has to be done once.'),
file=sys.stderr)
os.mkdir(abs_path_to_llvm_project_dir)
@@ -142,11 +208,7 @@ def GetAndUpdateLLVMProjectInLLVMTools():
raise ValueError('LLVM repo in %s has changes, please remove.' %
abs_path_to_llvm_project_dir)
- CheckCommand([
- 'git', '-C', abs_path_to_llvm_project_dir, 'checkout',
- git_llvm_rev.MAIN_BRANCH
- ])
- CheckCommand(['git', '-C', abs_path_to_llvm_project_dir, 'pull'])
+ CheckoutBranch(abs_path_to_llvm_project_dir, git_llvm_rev.MAIN_BRANCH)
return abs_path_to_llvm_project_dir
@@ -154,6 +216,9 @@ def GetAndUpdateLLVMProjectInLLVMTools():
def GetGoogle3LLVMVersion(stable):
"""Gets the latest google3 LLVM version.
+ Args:
+ stable: boolean, use the stable version or the unstable version
+
Returns:
The latest LLVM SVN version as an integer.
@@ -178,7 +243,7 @@ def GetGoogle3LLVMVersion(stable):
return GetVersionFrom(GetAndUpdateLLVMProjectInLLVMTools(), git_hash.rstrip())
-def is_svn_option(svn_option):
+def IsSvnOption(svn_option):
"""Validates whether the argument (string) is a git hash option.
The argument is used to find the git hash of LLVM.
@@ -186,6 +251,10 @@ def is_svn_option(svn_option):
Args:
svn_option: The option passed in as a command line argument.
+ Returns:
+ lowercase svn_option if it is a known hash source, otherwise the svn_option
+ as an int
+
Raises:
ValueError: Invalid svn option provided.
"""
@@ -212,7 +281,7 @@ def GetLLVMHashAndVersionFromSVNOption(svn_option):
Args:
svn_option: A valid svn option obtained from the command line.
- Ex: 'google3', 'tot', or <svn_version> such as 365123.
+ Ex. 'google3', 'tot', or <svn_version> such as 365123.
Returns:
A tuple that is the LLVM git hash and LLVM version.
@@ -240,7 +309,7 @@ class LLVMHash(object):
"""Provides methods to retrieve a LLVM hash."""
@staticmethod
- @contextmanager
+ @contextlib.contextmanager
def CreateTempDirectory():
temp_dir = tempfile.mkdtemp()
@@ -310,7 +379,7 @@ def main():
parser = argparse.ArgumentParser(description='Finds the LLVM hash.')
parser.add_argument(
'--llvm_version',
- type=is_svn_option,
+ type=IsSvnOption,
required=True,
help='which git hash of LLVM to find. Either a svn revision, or one '
'of %s' % sorted(KNOWN_HASH_SOURCES))
diff --git a/llvm_tools/get_llvm_hash_unittest.py b/llvm_tools/get_llvm_hash_unittest.py
index 2e56aed5..49740f33 100755
--- a/llvm_tools/get_llvm_hash_unittest.py
+++ b/llvm_tools/get_llvm_hash_unittest.py
@@ -90,6 +90,50 @@ class TestGetLLVMHash(unittest.TestCase):
self.assertEqual(LLVMHash().GetTopOfTrunkGitHash(), 'a123testhash1')
mock_check_output.assert_called_once()
+ @mock.patch.object(subprocess, 'Popen')
+ def testCheckoutBranch(self, mock_popen):
+ mock_popen.return_value = mock.MagicMock(
+ communicate=lambda: (None, None), returncode=0)
+ get_llvm_hash.CheckoutBranch('fake/src_dir', 'fake_branch')
+ self.assertEqual(
+ mock_popen.call_args_list[0][0],
+ (['git', '-C', 'fake/src_dir', 'checkout', 'fake_branch'],))
+ self.assertEqual(mock_popen.call_args_list[1][0],
+ (['git', '-C', 'fake/src_dir', 'pull'],))
+
+ def testParseLLVMMajorVersion(self):
+ cmakelist_42 = ('set(CMAKE_BUILD_WITH_INSTALL_NAME_DIR ON)\n'
+ 'if(NOT DEFINED LLVM_VERSION_MAJOR)\n'
+ ' set(LLVM_VERSION_MAJOR 42)\n'
+ 'endif()')
+ self.assertEqual(get_llvm_hash.ParseLLVMMajorVersion(cmakelist_42), '42')
+
+ def testParseLLVMMajorVersionInvalid(self):
+ invalid_cmakelist = 'invalid cmakelist.txt contents'
+ with self.assertRaises(ValueError):
+ get_llvm_hash.ParseLLVMMajorVersion(invalid_cmakelist)
+
+ @mock.patch.object(get_llvm_hash, 'GetAndUpdateLLVMProjectInLLVMTools')
+ @mock.patch.object(get_llvm_hash, 'ParseLLVMMajorVersion')
+ @mock.patch.object(get_llvm_hash, 'CheckCommand')
+ @mock.patch.object(get_llvm_hash, 'CheckoutBranch')
+ @mock.patch(
+ 'get_llvm_hash.open',
+ mock.mock_open(read_data='mock contents'),
+ create=True)
+ def testGetLLVMMajorVersion(self, mock_checkout_branch, mock_git_checkout,
+ mock_major_version, mock_llvm_project_path):
+ mock_llvm_project_path.return_value = 'path/to/llvm-project'
+ mock_major_version.return_value = '1234'
+ self.assertEqual(get_llvm_hash.GetLLVMMajorVersion('314159265'), '1234')
+ # Second call should be memoized
+ self.assertEqual(get_llvm_hash.GetLLVMMajorVersion('314159265'), '1234')
+ mock_llvm_project_path.assert_called_once()
+ mock_major_version.assert_called_with('mock contents')
+ mock_git_checkout.assert_called_once_with(
+ ['git', '-C', 'path/to/llvm-project', 'checkout', '314159265'])
+ mock_checkout_branch.assert_called_once_with('path/to/llvm-project', 'main')
+
if __name__ == '__main__':
unittest.main()
diff --git a/llvm_tools/get_upstream_patch.py b/llvm_tools/get_upstream_patch.py
new file mode 100755
index 00000000..5669b023
--- /dev/null
+++ b/llvm_tools/get_upstream_patch.py
@@ -0,0 +1,465 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Get an upstream patch to LLVM's PATCHES.json."""
+
+import argparse
+import json
+import logging
+import os
+import shlex
+import subprocess
+import sys
+import typing as t
+from datetime import datetime
+
+import dataclasses
+
+import chroot
+import get_llvm_hash
+import git
+import git_llvm_rev
+import update_chromeos_llvm_hash
+
+__DOC_EPILOGUE = """
+Example Usage:
+ get_upstream_patch --chroot_path ~/chromiumos --platform chromiumos \
+--sha 1234567 --sha 890abdc
+"""
+
+
+class CherrypickError(ValueError):
+ """A ValueError that highlights the cherry-pick has been seen before"""
+
+
+def add_patch(patches_json_path: str, patches_dir: str,
+ relative_patches_dir: str, start_version: git_llvm_rev.Rev,
+ llvm_dir: str, rev: t.Union[git_llvm_rev.Rev, str], sha: str,
+ package: str, platforms: t.List[str]):
+ """Gets the start and end intervals in 'json_file'.
+
+ Args:
+ patches_json_path: The absolute path to PATCHES.json.
+ patches_dir: The aboslute path to the directory patches are in.
+ relative_patches_dir: The relative path to PATCHES.json.
+ start_version: The base LLVM revision this patch applies to.
+ llvm_dir: The path to LLVM checkout.
+ rev: An LLVM revision (git_llvm_rev.Rev) for a cherrypicking, or a
+ differential revision (str) otherwise.
+ sha: The LLVM git sha that corresponds to the patch. For differential
+ revisions, the git sha from the local commit created by 'arc patch'
+ is used.
+ package: The LLVM project name this patch applies to.
+ platforms: List of platforms this patch applies to.
+
+ Raises:
+ CherrypickError: A ValueError that highlights the cherry-pick has been
+ seen before.
+ """
+
+ with open(patches_json_path, encoding='utf-8') as f:
+ patches_json = json.load(f)
+
+ is_cherrypick = isinstance(rev, git_llvm_rev.Rev)
+ if is_cherrypick:
+ file_name = f'{sha}.patch'
+ else:
+ file_name = f'{rev}.patch'
+ rel_patch_path = os.path.join(relative_patches_dir, file_name)
+
+ for p in patches_json:
+ rel_path = p['rel_patch_path']
+ if rel_path == rel_patch_path:
+ raise CherrypickError(
+ f'Patch at {rel_path} already exists in PATCHES.json')
+ if is_cherrypick:
+ if sha in rel_path:
+ logging.warning(
+ 'Similarly-named patch already exists in PATCHES.json: %r',
+ rel_path)
+
+ with open(os.path.join(patches_dir, file_name), 'wb') as f:
+ cmd = ['git', 'show', sha]
+ # Only apply the part of the patch that belongs to this package, expect
+ # LLVM. This is because some packages are built with LLVM ebuild on X86 but
+ # not on the other architectures. e.g. compiler-rt. Therefore always apply
+ # the entire patch to LLVM ebuild as a workaround.
+ if package != 'llvm':
+ cmd.append(package_to_project(package))
+ subprocess.check_call(cmd, stdout=f, cwd=llvm_dir)
+
+ commit_subject = subprocess.check_output(
+ ['git', 'log', '-n1', '--format=%s', sha],
+ cwd=llvm_dir,
+ encoding='utf-8')
+
+ end_vers = rev.number if isinstance(rev, git_llvm_rev.Rev) else None
+ patch_props = {
+ 'rel_patch_path': rel_patch_path,
+ 'metadata': {
+ 'title': commit_subject.strip(),
+ 'info': [],
+ },
+ 'platforms': sorted(platforms),
+ 'version_range': {
+ 'from': start_version.number,
+ 'until': end_vers,
+ },
+ }
+ patches_json.append(patch_props)
+
+ temp_file = patches_json_path + '.tmp'
+ with open(temp_file, 'w', encoding='utf-8') as f:
+ json.dump(patches_json,
+ f,
+ indent=4,
+ separators=(',', ': '),
+ sort_keys=True)
+ f.write('\n')
+ os.rename(temp_file, patches_json_path)
+
+
+def parse_ebuild_for_assignment(ebuild_path: str, var_name: str) -> str:
+ # '_pre' filters the LLVM 9.0 ebuild, which we never want to target, from
+ # this list.
+ candidates = [
+ x for x in os.listdir(ebuild_path)
+ if x.endswith('.ebuild') and '_pre' in x
+ ]
+
+ if not candidates:
+ raise ValueError('No ebuilds found under %r' % ebuild_path)
+
+ ebuild = os.path.join(ebuild_path, max(candidates))
+ with open(ebuild, encoding='utf-8') as f:
+ var_name_eq = var_name + '='
+ for orig_line in f:
+ if not orig_line.startswith(var_name_eq):
+ continue
+
+ # We shouldn't see much variety here, so do the simplest thing possible.
+ line = orig_line[len(var_name_eq):]
+ # Remove comments
+ line = line.split('#')[0]
+ # Remove quotes
+ line = shlex.split(line)
+ if len(line) != 1:
+ raise ValueError('Expected exactly one quoted value in %r' % orig_line)
+ return line[0].strip()
+
+ raise ValueError('No %s= line found in %r' % (var_name, ebuild))
+
+
+# Resolves a git ref (or similar) to a LLVM SHA.
+def resolve_llvm_ref(llvm_dir: str, sha: str) -> str:
+ return subprocess.check_output(
+ ['git', 'rev-parse', sha],
+ encoding='utf-8',
+ cwd=llvm_dir,
+ ).strip()
+
+
+# Get the package name of an LLVM project
+def project_to_package(project: str) -> str:
+ if project == 'libunwind':
+ return 'llvm-libunwind'
+ return project
+
+
+# Get the LLVM project name of a package
+def package_to_project(package: str) -> str:
+ if package == 'llvm-libunwind':
+ return 'libunwind'
+ return package
+
+
+# Get the LLVM projects change in the specifed sha
+def get_package_names(sha: str, llvm_dir: str) -> list:
+ paths = subprocess.check_output(
+ ['git', 'show', '--name-only', '--format=', sha],
+ cwd=llvm_dir,
+ encoding='utf-8').splitlines()
+ # Some LLVM projects are built by LLVM ebuild on X86, so always apply the
+ # patch to LLVM ebuild
+ packages = {'llvm'}
+ # Detect if there are more packages to apply the patch to
+ for path in paths:
+ package = project_to_package(path.split('/')[0])
+ if package in ('compiler-rt', 'libcxx', 'libcxxabi', 'llvm-libunwind'):
+ packages.add(package)
+ packages = list(sorted(packages))
+ return packages
+
+
+def create_patch_for_packages(packages: t.List[str], symlinks: t.List[str],
+ start_rev: git_llvm_rev.Rev,
+ rev: t.Union[git_llvm_rev.Rev, str], sha: str,
+ llvm_dir: str, platforms: t.List[str]):
+ """Create a patch and add its metadata for each package"""
+ for package, symlink in zip(packages, symlinks):
+ symlink_dir = os.path.dirname(symlink)
+ patches_json_path = os.path.join(symlink_dir, 'files/PATCHES.json')
+ relative_patches_dir = 'cherry' if package == 'llvm' else ''
+ patches_dir = os.path.join(symlink_dir, 'files', relative_patches_dir)
+ logging.info('Getting %s (%s) into %s', rev, sha, package)
+ add_patch(patches_json_path,
+ patches_dir,
+ relative_patches_dir,
+ start_rev,
+ llvm_dir,
+ rev,
+ sha,
+ package,
+ platforms=platforms)
+
+
+def make_cl(symlinks_to_uprev: t.List[str], llvm_symlink_dir: str, branch: str,
+ commit_messages: t.List[str], reviewers: t.Optional[t.List[str]],
+ cc: t.Optional[t.List[str]]):
+ symlinks_to_uprev = sorted(set(symlinks_to_uprev))
+ for symlink in symlinks_to_uprev:
+ update_chromeos_llvm_hash.UprevEbuildSymlink(symlink)
+ subprocess.check_output(['git', 'add', '--all'],
+ cwd=os.path.dirname(symlink))
+ git.UploadChanges(llvm_symlink_dir, branch, commit_messages, reviewers, cc)
+ git.DeleteBranch(llvm_symlink_dir, branch)
+
+
+def resolve_symbolic_sha(start_sha: str, llvm_symlink_dir: str) -> str:
+ if start_sha == 'llvm':
+ return parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_HASH')
+
+ if start_sha == 'llvm-next':
+ return parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_NEXT_HASH')
+
+ return start_sha
+
+
+def find_patches_and_make_cl(
+ chroot_path: str, patches: t.List[str], start_rev: git_llvm_rev.Rev,
+ llvm_config: git_llvm_rev.LLVMConfig, llvm_symlink_dir: str,
+ create_cl: bool, skip_dependencies: bool,
+ reviewers: t.Optional[t.List[str]], cc: t.Optional[t.List[str]],
+ platforms: t.List[str]):
+
+ converted_patches = [
+ _convert_patch(llvm_config, skip_dependencies, p) for p in patches
+ ]
+ potential_duplicates = _get_duplicate_shas(converted_patches)
+ if potential_duplicates:
+ err_msg = '\n'.join(f'{a.patch} == {b.patch}'
+ for a, b in potential_duplicates)
+ raise RuntimeError(f'Found Duplicate SHAs:\n{err_msg}')
+
+ # CL Related variables, only used if `create_cl`
+ symlinks_to_uprev = []
+ commit_messages = [
+ 'llvm: get patches from upstream\n',
+ ]
+ branch = f'get-upstream-{datetime.now().strftime("%Y%m%d%H%M%S%f")}'
+
+ if create_cl:
+ git.CreateBranch(llvm_symlink_dir, branch)
+
+ for parsed_patch in converted_patches:
+ # Find out the llvm projects changed in this commit
+ packages = get_package_names(parsed_patch.sha, llvm_config.dir)
+ # Find out the ebuild symlinks of the corresponding ChromeOS packages
+ symlinks = chroot.GetChrootEbuildPaths(chroot_path, [
+ 'sys-devel/llvm' if package == 'llvm' else 'sys-libs/' + package
+ for package in packages
+ ])
+ symlinks = chroot.ConvertChrootPathsToAbsolutePaths(chroot_path, symlinks)
+ # Create a local patch for all the affected llvm projects
+ create_patch_for_packages(packages,
+ symlinks,
+ start_rev,
+ parsed_patch.rev,
+ parsed_patch.sha,
+ llvm_config.dir,
+ platforms=platforms)
+ if create_cl:
+ symlinks_to_uprev.extend(symlinks)
+
+ commit_messages.extend([
+ parsed_patch.git_msg(),
+ subprocess.check_output(
+ ['git', 'log', '-n1', '--oneline', parsed_patch.sha],
+ cwd=llvm_config.dir,
+ encoding='utf-8')
+ ])
+
+ if parsed_patch.is_differential:
+ subprocess.check_output(['git', 'reset', '--hard', 'HEAD^'],
+ cwd=llvm_config.dir)
+
+ if create_cl:
+ make_cl(symlinks_to_uprev, llvm_symlink_dir, branch, commit_messages,
+ reviewers, cc)
+
+
+@dataclasses.dataclass(frozen=True)
+class ParsedPatch:
+ """Class to keep track of bundled patch info."""
+ patch: str
+ sha: str
+ is_differential: bool
+ rev: t.Union[git_llvm_rev.Rev, str]
+
+ def git_msg(self) -> str:
+ if self.is_differential:
+ return f'\n\nreviews.llvm.org/{self.patch}\n'
+ return f'\n\nreviews.llvm.org/rG{self.sha}\n'
+
+
+def _convert_patch(llvm_config: git_llvm_rev.LLVMConfig,
+ skip_dependencies: bool, patch: str) -> ParsedPatch:
+ """Extract git revision info from a patch.
+
+ Args:
+ llvm_config: LLVM configuration object.
+ skip_dependencies: Pass --skip-dependecies for to `arc`
+ patch: A single patch referent string.
+
+ Returns:
+ A [ParsedPatch] object.
+ """
+
+ # git hash should only have lower-case letters
+ is_differential = patch.startswith('D')
+ if is_differential:
+ subprocess.check_output(
+ [
+ 'arc', 'patch', '--nobranch',
+ '--skip-dependencies' if skip_dependencies else '--revision', patch
+ ],
+ cwd=llvm_config.dir,
+ )
+ sha = resolve_llvm_ref(llvm_config.dir, 'HEAD')
+ rev = patch
+ else:
+ sha = resolve_llvm_ref(llvm_config.dir, patch)
+ rev = git_llvm_rev.translate_sha_to_rev(llvm_config, sha)
+ return ParsedPatch(patch=patch,
+ sha=sha,
+ rev=rev,
+ is_differential=is_differential)
+
+
+def _get_duplicate_shas(patches: t.List[ParsedPatch]
+ ) -> t.List[t.Tuple[ParsedPatch, ParsedPatch]]:
+ """Return a list of Patches which have duplicate SHA's"""
+ return [(left, right) for i, left in enumerate(patches)
+ for right in patches[i + 1:] if left.sha == right.sha]
+
+
+def get_from_upstream(chroot_path: str,
+ create_cl: bool,
+ start_sha: str,
+ patches: t.List[str],
+ platforms: t.List[str],
+ skip_dependencies: bool = False,
+ reviewers: t.List[str] = None,
+ cc: t.List[str] = None):
+ llvm_symlink = chroot.ConvertChrootPathsToAbsolutePaths(
+ chroot_path, chroot.GetChrootEbuildPaths(chroot_path,
+ ['sys-devel/llvm']))[0]
+ llvm_symlink_dir = os.path.dirname(llvm_symlink)
+
+ git_status = subprocess.check_output(['git', 'status', '-s'],
+ cwd=llvm_symlink_dir,
+ encoding='utf-8')
+
+ if git_status:
+ error_path = os.path.dirname(os.path.dirname(llvm_symlink_dir))
+ raise ValueError(f'Uncommited changes detected in {error_path}')
+
+ start_sha = resolve_symbolic_sha(start_sha, llvm_symlink_dir)
+ logging.info('Base llvm hash == %s', start_sha)
+
+ llvm_config = git_llvm_rev.LLVMConfig(
+ remote='origin', dir=get_llvm_hash.GetAndUpdateLLVMProjectInLLVMTools())
+ start_sha = resolve_llvm_ref(llvm_config.dir, start_sha)
+
+ find_patches_and_make_cl(chroot_path=chroot_path,
+ patches=patches,
+ platforms=platforms,
+ start_rev=git_llvm_rev.translate_sha_to_rev(
+ llvm_config, start_sha),
+ llvm_config=llvm_config,
+ llvm_symlink_dir=llvm_symlink_dir,
+ create_cl=create_cl,
+ skip_dependencies=skip_dependencies,
+ reviewers=reviewers,
+ cc=cc)
+ logging.info('Complete.')
+
+
+def main():
+ chroot.VerifyOutsideChroot()
+ logging.basicConfig(
+ format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
+ level=logging.INFO,
+ )
+
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=__DOC_EPILOGUE)
+ parser.add_argument('--chroot_path',
+ default=os.path.join(os.path.expanduser('~'),
+ 'chromiumos'),
+ help='the path to the chroot (default: %(default)s)')
+ parser.add_argument(
+ '--start_sha',
+ default='llvm-next',
+ help='LLVM SHA that the patch should start applying at. You can specify '
+ '"llvm" or "llvm-next", as well. Defaults to %(default)s.')
+ parser.add_argument('--sha',
+ action='append',
+ default=[],
+ help='The LLVM git SHA to cherry-pick.')
+ parser.add_argument(
+ '--differential',
+ action='append',
+ default=[],
+ help='The LLVM differential revision to apply. Example: D1234')
+ parser.add_argument(
+ '--platform',
+ action='append',
+ required=True,
+ help='Apply this patch to the give platform. Common options include '
+ '"chromiumos" and "android". Can be specified multiple times to '
+ 'apply to multiple platforms')
+ parser.add_argument('--create_cl',
+ action='store_true',
+ help='Automatically create a CL if specified')
+ parser.add_argument(
+ '--skip_dependencies',
+ action='store_true',
+ help="Skips a LLVM differential revision's dependencies. Only valid "
+ 'when --differential appears exactly once.')
+ args = parser.parse_args()
+
+ if not (args.sha or args.differential):
+ parser.error('--sha or --differential required')
+
+ if args.skip_dependencies and len(args.differential) != 1:
+ parser.error("--skip_dependencies is only valid when there's exactly one "
+ 'supplied differential')
+
+ get_from_upstream(
+ chroot_path=args.chroot_path,
+ create_cl=args.create_cl,
+ start_sha=args.start_sha,
+ patches=args.sha + args.differential,
+ skip_dependencies=args.skip_dependencies,
+ platforms=args.platform,
+ )
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/llvm_tools/git.py b/llvm_tools/git.py
index f38d5e72..22c7002a 100755
--- a/llvm_tools/git.py
+++ b/llvm_tools/git.py
@@ -65,14 +65,14 @@ def DeleteBranch(repo, branch):
if not os.path.isdir(repo):
raise ValueError('Invalid directory path provided: %s' % repo)
- subprocess.check_output(['git', '-C', repo, 'checkout', 'cros/master'])
+ subprocess.check_output(['git', '-C', repo, 'checkout', 'cros/main'])
subprocess.check_output(['git', '-C', repo, 'reset', 'HEAD', '--hard'])
subprocess.check_output(['git', '-C', repo, 'branch', '-D', branch])
-def UploadChanges(repo, branch, commit_messages):
+def UploadChanges(repo, branch, commit_messages, reviewers=None, cc=None):
"""Uploads the changes in the specifed branch of the given repo for review.
Args:
@@ -80,6 +80,8 @@ def UploadChanges(repo, branch, commit_messages):
branch: The name of the branch to upload.
commit_messages: A string of commit message(s) (i.e. '[message]'
of the changes made.
+ reviewers: A list of reviewers to add to the CL.
+ cc: A list of contributors to CC about the CL.
Returns:
A nametuple that has two (key, value) pairs, where the first pair is the
@@ -101,12 +103,24 @@ def UploadChanges(repo, branch, commit_messages):
subprocess.check_output(['git', 'commit', '-F', f.name], cwd=repo)
# Upload the changes for review.
+ git_args = [
+ 'repo',
+ 'upload',
+ '--yes',
+ f'--reviewers={",".join(reviewers)}' if reviewers else '--ne',
+ '--no-verify',
+ f'--br={branch}',
+ ]
+
+ if cc:
+ git_args.append(f'--cc={",".join(cc)}')
+
out = subprocess.check_output(
- ['repo', 'upload', '--yes', '--ne', '--no-verify',
- '--br=%s' % branch],
+ git_args,
stderr=subprocess.STDOUT,
cwd=repo,
- encoding='utf-8')
+ encoding='utf-8',
+ )
print(out)
diff --git a/llvm_tools/git_llvm_rev.py b/llvm_tools/git_llvm_rev.py
index c8c1505c..b62b26e2 100755
--- a/llvm_tools/git_llvm_rev.py
+++ b/llvm_tools/git_llvm_rev.py
@@ -183,6 +183,11 @@ def translate_sha_to_rev(llvm_config: LLVMConfig, sha_or_ref: str) -> Rev:
raise ValueError(
f'No viable branches found from {llvm_config.remote} with {sha}')
+ # It seems that some `origin/release/.*` branches have
+ # `origin/upstream/release/.*` equivalents, which is... awkward to deal with.
+ # Prefer the latter, since that seems to have newer commits than the former.
+ # Technically n^2, but len(elements) should be like, tens in the worst case.
+ candidates = [x for x in candidates if f'upstream/{x}' not in candidates]
if len(candidates) != 1:
raise ValueError(
f'Ambiguity: multiple branches from {llvm_config.remote} have {sha}: '
diff --git a/llvm_tools/git_llvm_rev_test.py b/llvm_tools/git_llvm_rev_test.py
index 74280c5d..d05093a8 100755
--- a/llvm_tools/git_llvm_rev_test.py
+++ b/llvm_tools/git_llvm_rev_test.py
@@ -93,7 +93,7 @@ class Test(unittest.TestCase):
def test_zz_branch_revs_work_after_merge_points_and_svn_cutoff(self) -> None:
# Arbitrary 9.x commit without an attached llvm-svn: value.
sha = self.rev_to_sha_with_round_trip(
- git_llvm_rev.Rev(branch='release/9.x', number=366670))
+ git_llvm_rev.Rev(branch='upstream/release/9.x', number=366670))
self.assertEqual(sha, '4e858e4ac00b59f064da4e1f7e276916e7d296aa')
def test_zz_branch_revs_work_at_merge_points(self) -> None:
@@ -108,7 +108,7 @@ class Test(unittest.TestCase):
# branch, we'll pick main for this. That's fine.
sha = git_llvm_rev.translate_rev_to_sha(
get_llvm_config(),
- git_llvm_rev.Rev(branch='release/9.x', number=rev_number))
+ git_llvm_rev.Rev(branch='upstream/release/9.x', number=rev_number))
self.assertEqual(sha, backing_sha)
def test_zz_branch_revs_work_after_merge_points(self) -> None:
@@ -117,7 +117,7 @@ class Test(unittest.TestCase):
# ours, and are therefore untrustworthy. The commit for this *does* have a
# different `llvm-svn:` string than we should have.
sha = self.rev_to_sha_with_round_trip(
- git_llvm_rev.Rev(branch='release/9.x', number=366427))
+ git_llvm_rev.Rev(branch='upstream/release/9.x', number=366427))
self.assertEqual(sha, '2cf681a11aea459b50d712abc7136f7129e4d57f')
diff --git a/llvm_tools/llvm_bisection.py b/llvm_tools/llvm_bisection.py
index b1898ea9..0148efd2 100755
--- a/llvm_tools/llvm_bisection.py
+++ b/llvm_tools/llvm_bisection.py
@@ -20,6 +20,7 @@ import chroot
import get_llvm_hash
import git_llvm_rev
import modify_a_tryjob
+import update_chromeos_llvm_hash
import update_tryjob_status
@@ -51,18 +52,16 @@ def GetCommandLineArgs():
'the first bad version (default: %(default)s)')
# Add argument for the good LLVM revision for bisection.
- parser.add_argument(
- '--start_rev',
- required=True,
- type=int,
- help='The good revision for the bisection.')
+ parser.add_argument('--start_rev',
+ required=True,
+ type=int,
+ help='The good revision for the bisection.')
# Add argument for the bad LLVM revision for bisection.
- parser.add_argument(
- '--end_rev',
- required=True,
- type=int,
- help='The bad revision for the bisection.')
+ parser.add_argument('--end_rev',
+ required=True,
+ type=int,
+ help='The bad revision for the bisection.')
# Add argument for the absolute path to the file that contains information on
# the previous tested svn version.
@@ -88,42 +87,38 @@ def GetCommandLineArgs():
'of updating the packages')
# Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
+ parser.add_argument('--options',
+ required=False,
+ nargs='+',
+ help='options to use for the tryjob testing')
# Add argument for the builder to use for the tryjob.
- parser.add_argument(
- '--builder', required=True, help='builder to use for the tryjob testing')
+ parser.add_argument('--builder',
+ required=True,
+ help='builder to use for the tryjob testing')
# Add argument for the description of the tryjob.
- parser.add_argument(
- '--description',
- required=False,
- nargs='+',
- help='the description of the tryjob')
+ parser.add_argument('--description',
+ required=False,
+ nargs='+',
+ help='the description of the tryjob')
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=cros_root,
+ help='the path to the chroot (default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--nocleanup',
- action='store_false',
- dest='cleanup',
- help='Abandon CLs created for bisectoin')
+ parser.add_argument('--nocleanup',
+ action='store_false',
+ dest='cleanup',
+ help='Abandon CLs created for bisectoin')
args_output = parser.parse_args()
@@ -174,8 +169,7 @@ def GetRemainingRange(start, end, tryjobs):
all_bad_revisions = [end]
all_bad_revisions.extend(
- cur_tryjob['rev']
- for cur_tryjob in tryjobs
+ cur_tryjob['rev'] for cur_tryjob in tryjobs
if cur_tryjob['status'] == update_tryjob_status.TryjobStatus.BAD.value)
# The minimum value for the 'bad' field in the tryjobs is the new end
@@ -184,8 +178,7 @@ def GetRemainingRange(start, end, tryjobs):
all_good_revisions = [start]
all_good_revisions.extend(
- cur_tryjob['rev']
- for cur_tryjob in tryjobs
+ cur_tryjob['rev'] for cur_tryjob in tryjobs
if cur_tryjob['status'] == update_tryjob_status.TryjobStatus.GOOD.value)
# The maximum value for the 'good' field in the tryjobs is the new start
@@ -205,8 +198,8 @@ def GetRemainingRange(start, end, tryjobs):
pending_revisions = {
tryjob['rev']
for tryjob in tryjobs
- if tryjob['status'] == update_tryjob_status.TryjobStatus.PENDING.value and
- good_rev < tryjob['rev'] < bad_rev
+ if tryjob['status'] == update_tryjob_status.TryjobStatus.PENDING.value
+ and good_rev < tryjob['rev'] < bad_rev
}
# Find all revisions that are to be skipped within 'good_rev' and 'bad_rev'.
@@ -217,8 +210,8 @@ def GetRemainingRange(start, end, tryjobs):
skip_revisions = {
tryjob['rev']
for tryjob in tryjobs
- if tryjob['status'] == update_tryjob_status.TryjobStatus.SKIP.value and
- good_rev < tryjob['rev'] < bad_rev
+ if tryjob['status'] == update_tryjob_status.TryjobStatus.SKIP.value
+ and good_rev < tryjob['rev'] < bad_rev
}
return good_rev, bad_rev, pending_revisions, skip_revisions
@@ -295,66 +288,62 @@ def main(args_output):
"""
chroot.VerifyOutsideChroot()
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
patch_metadata_file = 'PATCHES.json'
start = args_output.start_rev
end = args_output.end_rev
bisect_state = LoadStatusFile(args_output.last_tested, start, end)
if start != bisect_state['start'] or end != bisect_state['end']:
- raise ValueError(f'The start {start} or the end {end} version provided is '
- f'different than "start" {bisect_state["start"]} or "end" '
- f'{bisect_state["end"]} in the .JSON file')
+ raise ValueError(
+ f'The start {start} or the end {end} version provided is '
+ f'different than "start" {bisect_state["start"]} or "end" '
+ f'{bisect_state["end"]} in the .JSON file')
- # Pending and skipped revisions are between 'start_revision' and
- # 'end_revision'.
- start_revision, end_revision, pending_revisions, skip_revisions = \
- GetRemainingRange(start, end, bisect_state['jobs'])
+ # Pending and skipped revisions are between 'start_rev' and 'end_rev'.
+ start_rev, end_rev, pending_revs, skip_revs = GetRemainingRange(
+ start, end, bisect_state['jobs'])
- revisions, git_hashes = GetCommitsBetween(start_revision, end_revision,
+ revisions, git_hashes = GetCommitsBetween(start_rev, end_rev,
args_output.parallel,
- args_output.src_path,
- pending_revisions, skip_revisions)
+ args_output.src_path, pending_revs,
+ skip_revs)
- # No more revisions between 'start_revision' and 'end_revision', so
+ # No more revisions between 'start_rev' and 'end_rev', so
# bisection is complete.
#
- # This is determined by finding all valid revisions between 'start_revision'
- # and 'end_revision' and that are NOT in the 'pending' and 'skipped' set.
+ # This is determined by finding all valid revisions between 'start_rev'
+ # and 'end_rev' and that are NOT in the 'pending' and 'skipped' set.
if not revisions:
- if pending_revisions:
+ if pending_revs:
# Some tryjobs are not finished which may change the actual bad
# commit/revision when those tryjobs are finished.
- no_revisions_message = (f'No revisions between start {start_revision} '
- f'and end {end_revision} to create tryjobs\n')
+ no_revisions_message = (f'No revisions between start {start_rev} '
+ f'and end {end_rev} to create tryjobs\n')
- if pending_revisions:
- no_revisions_message += (
- 'The following tryjobs are pending:\n' +
- '\n'.join(str(rev) for rev in pending_revisions) + '\n')
+ if pending_revs:
+ no_revisions_message += ('The following tryjobs are pending:\n' +
+ '\n'.join(str(rev)
+ for rev in pending_revs) + '\n')
- if skip_revisions:
+ if skip_revs:
no_revisions_message += ('The following tryjobs were skipped:\n' +
- '\n'.join(str(rev) for rev in skip_revisions) +
- '\n')
+ '\n'.join(str(rev)
+ for rev in skip_revs) + '\n')
raise ValueError(no_revisions_message)
print(f'Finished bisecting for {args_output.last_tested}')
if args_output.src_path:
bad_llvm_hash = get_llvm_hash.GetGitHashFrom(args_output.src_path,
- end_revision)
+ end_rev)
else:
- bad_llvm_hash = get_llvm_hash.LLVMHash().GetLLVMHash(end_revision)
- print(f'The bad revision is {end_revision} and its commit hash is '
+ bad_llvm_hash = get_llvm_hash.LLVMHash().GetLLVMHash(end_rev)
+ print(f'The bad revision is {end_rev} and its commit hash is '
f'{bad_llvm_hash}')
- if skip_revisions:
- skip_revisions_message = ('\nThe following revisions were skipped:\n' +
- '\n'.join(str(rev) for rev in skip_revisions))
- print(skip_revisions_message)
+ if skip_revs:
+ skip_revs_message = ('\nThe following revisions were skipped:\n' +
+ '\n'.join(str(rev) for rev in skip_revs))
+ print(skip_revs_message)
if args_output.cleanup:
# Abandon all the CLs created for bisection
@@ -378,9 +367,9 @@ def main(args_output):
raise ValueError(f'Revision {rev} exists already in "jobs"')
Bisect(revisions, git_hashes, bisect_state, args_output.last_tested,
- update_packages, args_output.chroot_path, patch_metadata_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builder, args_output.verbose)
+ update_chromeos_llvm_hash.DEFAULT_PACKAGES, args_output.chroot_path,
+ patch_metadata_file, args_output.extra_change_lists,
+ args_output.options, args_output.builder, args_output.verbose)
if __name__ == '__main__':
diff --git a/llvm_tools/modify_a_tryjob.py b/llvm_tools/modify_a_tryjob.py
index 4d41e6b2..519fb51e 100755
--- a/llvm_tools/modify_a_tryjob.py
+++ b/llvm_tools/modify_a_tryjob.py
@@ -17,9 +17,9 @@ import sys
import chroot
import failure_modes
import get_llvm_hash
+import update_chromeos_llvm_hash
import update_packages_and_run_tests
import update_tryjob_status
-import update_chromeos_llvm_hash
class ModifyTryjob(enum.Enum):
@@ -57,11 +57,10 @@ def GetCommandLineArgs():
# Add argument that determines which revision to search for in the list of
# tryjobs.
- parser.add_argument(
- '--revision',
- required=True,
- type=int,
- help='The revision to either remove or relaunch.')
+ parser.add_argument('--revision',
+ required=True,
+ type=int,
+ help='The revision to either remove or relaunch.')
# Add argument for other change lists that want to run alongside the tryjob.
parser.add_argument(
@@ -72,40 +71,38 @@ def GetCommandLineArgs():
'of updating the packages')
# Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
+ parser.add_argument('--options',
+ required=False,
+ nargs='+',
+ help='options to use for the tryjob testing')
# Add argument for the builder to use for the tryjob.
- parser.add_argument('--builder', help='builder to use for the tryjob testing')
+ parser.add_argument('--builder',
+ help='builder to use for the tryjob testing')
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=cros_root,
+ help='the path to the chroot (default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
args_output = parser.parse_args()
- if not os.path.isfile(args_output.status_file) or \
- not args_output.status_file.endswith('.json'):
+ if (not os.path.isfile(args_output.status_file)
+ or not args_output.status_file.endswith('.json')):
raise ValueError('File does not exist or does not ending in ".json" '
': %s' % args_output.status_file)
- if args_output.modify_tryjob == ModifyTryjob.ADD.value and \
- not args_output.builder:
+ if (args_output.modify_tryjob == ModifyTryjob.ADD.value
+ and not args_output.builder):
raise ValueError('A builder is required for adding a tryjob.')
- elif args_output.modify_tryjob != ModifyTryjob.ADD.value and \
- args_output.builder:
+ elif (args_output.modify_tryjob != ModifyTryjob.ADD.value
+ and args_output.builder):
raise ValueError('Specifying a builder is only available when adding a '
'tryjob.')
@@ -234,13 +231,13 @@ def PerformTryjobModification(revision, modify_tryjob, status_file, extra_cls,
bisect_contents['jobs'][tryjob_index]['cl'],
bisect_contents['jobs'][tryjob_index]['extra_cls'],
bisect_contents['jobs'][tryjob_index]['options'],
- bisect_contents['jobs'][tryjob_index]['builder'], chroot_path, verbose)
+ bisect_contents['jobs'][tryjob_index]['builder'], chroot_path)
bisect_contents['jobs'][tryjob_index][
'status'] = update_tryjob_status.TryjobStatus.PENDING.value
bisect_contents['jobs'][tryjob_index]['link'] = tryjob_results[0]['link']
- bisect_contents['jobs'][tryjob_index]['buildbucket_id'] = tryjob_results[0][
- 'buildbucket_id']
+ bisect_contents['jobs'][tryjob_index]['buildbucket_id'] = tryjob_results[
+ 0]['buildbucket_id']
print('Successfully relaunched the tryjob for revision %d and updated '
'the tryjob link to %s' % (revision, tryjob_results[0]['link']))
@@ -253,17 +250,14 @@ def PerformTryjobModification(revision, modify_tryjob, status_file, extra_cls,
# Make sure the revision is within the bounds of the start and end of the
# bisection.
elif bisect_contents['start'] < revision < bisect_contents['end']:
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
patch_metadata_file = 'PATCHES.json'
git_hash, revision = get_llvm_hash.GetLLVMHashAndVersionFromSVNOption(
revision)
- tryjob_dict = AddTryjob(update_packages, git_hash, revision, chroot_path,
+ tryjob_dict = AddTryjob(update_chromeos_llvm_hash.DEFAULT_PACKAGES,
+ git_hash, revision, chroot_path,
patch_metadata_file, extra_cls, options, builder,
verbose, revision)
@@ -277,7 +271,10 @@ def PerformTryjobModification(revision, modify_tryjob, status_file, extra_cls,
modify_tryjob)
with open(status_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
+ json.dump(bisect_contents,
+ update_tryjobs,
+ indent=4,
+ separators=(',', ': '))
def main():
@@ -290,9 +287,9 @@ def main():
PerformTryjobModification(args_output.revision,
ModifyTryjob(args_output.modify_tryjob),
args_output.status_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builder, args_output.chroot_path,
- args_output.verbose)
+ args_output.extra_change_lists,
+ args_output.options, args_output.builder,
+ args_output.chroot_path, args_output.verbose)
if __name__ == '__main__':
diff --git a/llvm_tools/nightly_revert_checker.py b/llvm_tools/nightly_revert_checker.py
index 3a23890a..89485088 100755
--- a/llvm_tools/nightly_revert_checker.py
+++ b/llvm_tools/nightly_revert_checker.py
@@ -6,12 +6,10 @@
"""Checks for new reverts in LLVM on a nightly basis.
-If any reverts are found that were previously unknown, this fires off an email.
-All LLVM SHAs to monitor are autodetected.
+If any reverts are found that were previously unknown, this cherry-picks them or
+fires off an email. All LLVM SHAs to monitor are autodetected.
"""
-# pylint: disable=cros-logging-import
-
from __future__ import print_function
import argparse
@@ -26,38 +24,50 @@ import typing as t
import cros_utils.email_sender as email_sender
import cros_utils.tiny_render as tiny_render
+
import get_llvm_hash
+import get_upstream_patch
import git_llvm_rev
import revert_checker
State = t.Any
-def _find_interesting_android_shas(
- android_llvm_toolchain_dir: str) -> t.List[t.Tuple[str]]:
+def _find_interesting_android_shas(android_llvm_toolchain_dir: str
+ ) -> t.List[t.Tuple[str, str]]:
llvm_project = os.path.join(android_llvm_toolchain_dir,
'toolchain/llvm-project')
def get_llvm_merge_base(branch: str) -> str:
- return subprocess.check_output(
- ['git', 'merge-base', branch, 'aosp/upstream-master'],
+ head_sha = subprocess.check_output(
+ ['git', 'rev-parse', branch],
cwd=llvm_project,
encoding='utf-8',
).strip()
+ merge_base = subprocess.check_output(
+ ['git', 'merge-base', branch, 'aosp/upstream-main'],
+ cwd=llvm_project,
+ encoding='utf-8',
+ ).strip()
+ logging.info('Merge-base for %s (HEAD == %s) and upstream-main is %s',
+ branch, head_sha, merge_base)
+ return merge_base
- main_legacy = get_llvm_merge_base('aosp/master-legacy')
+ main_legacy = get_llvm_merge_base('aosp/master-legacy') # nocheck
testing_upstream = get_llvm_merge_base('aosp/testing-upstream')
result = [('main-legacy', main_legacy)]
# If these are the same SHA, there's no point in tracking both.
if main_legacy != testing_upstream:
result.append(('testing-upstream', testing_upstream))
+ else:
+ logging.info('main-legacy and testing-upstream are identical; ignoring '
+ 'the latter.')
return result
-def _parse_llvm_ebuild_for_shas(
- ebuild_file: io.TextIOWrapper) -> t.List[t.Tuple[str]]:
-
+def _parse_llvm_ebuild_for_shas(ebuild_file: io.TextIOWrapper
+ ) -> t.List[t.Tuple[str, str]]:
def parse_ebuild_assignment(line: str) -> str:
no_comments = line.split('#')[0]
no_assign = no_comments.split('=', 1)[1].strip()
@@ -84,12 +94,12 @@ def _parse_llvm_ebuild_for_shas(
return results
-def _find_interesting_chromeos_shas(chromeos_base: str) -> t.List[t.Tuple[str]]:
+def _find_interesting_chromeos_shas(chromeos_base: str
+ ) -> t.List[t.Tuple[str, str]]:
llvm_dir = os.path.join(chromeos_base,
'src/third_party/chromiumos-overlay/sys-devel/llvm')
candidate_ebuilds = [
- os.path.join(llvm_dir, x)
- for x in os.listdir(llvm_dir)
+ os.path.join(llvm_dir, x) for x in os.listdir(llvm_dir)
if '_pre' in x and not os.path.islink(os.path.join(llvm_dir, x))
]
@@ -193,25 +203,132 @@ def _read_state(state_file: str) -> State:
return {}
-def main(argv: t.List[str]) -> None:
+def find_shas(llvm_dir: str, interesting_shas: t.List[t.Tuple[str, str]],
+ state: State, new_state: State):
+ for friendly_name, sha in interesting_shas:
+ logging.info('Finding reverts across %s (%s)', friendly_name, sha)
+ all_reverts = revert_checker.find_reverts(llvm_dir,
+ sha,
+ root='origin/' +
+ git_llvm_rev.MAIN_BRANCH)
+ logging.info('Detected the following revert(s) across %s:\n%s',
+ friendly_name, pprint.pformat(all_reverts))
+
+ new_state[sha] = [r.sha for r in all_reverts]
+
+ if sha not in state:
+ logging.info('SHA %s is new to me', sha)
+ existing_reverts = set()
+ else:
+ existing_reverts = set(state[sha])
+
+ new_reverts = [r for r in all_reverts if r.sha not in existing_reverts]
+ if not new_reverts:
+ logging.info('...All of which have been reported.')
+ continue
+
+ yield (friendly_name, sha, new_reverts)
+
+
+def do_cherrypick(chroot_path: str, llvm_dir: str,
+ interesting_shas: t.List[t.Tuple[str, str]], state: State,
+ reviewers: t.List[str], cc: t.List[str]) -> State:
+ new_state: State = {}
+ seen: t.Set[str] = set()
+ for friendly_name, _sha, reverts in find_shas(llvm_dir, interesting_shas,
+ state, new_state):
+ if friendly_name in seen:
+ continue
+ seen.add(friendly_name)
+ for sha, reverted_sha in reverts:
+ try:
+ # We upload reverts for all platforms by default, since there's no
+ # real reason for them to be CrOS-specific.
+ get_upstream_patch.get_from_upstream(chroot_path=chroot_path,
+ create_cl=True,
+ start_sha=reverted_sha,
+ patches=[sha],
+ reviewers=reviewers,
+ cc=cc,
+ platforms=())
+ except get_upstream_patch.CherrypickError as e:
+ logging.info('%s, skipping...', str(e))
+ return new_state
+
+
+def do_email(is_dry_run: bool, llvm_dir: str, repository: str,
+ interesting_shas: t.List[t.Tuple[str, str]], state: State,
+ recipients: _EmailRecipients) -> State:
+ def prettify_sha(sha: str) -> tiny_render.Piece:
+ rev = get_llvm_hash.GetVersionFrom(llvm_dir, sha)
+
+ # 12 is arbitrary, but should be unambiguous enough.
+ short_sha = sha[:12]
+ return tiny_render.Switch(
+ text=f'r{rev} ({short_sha})',
+ html=tiny_render.Link(href='https://reviews.llvm.org/rG' + sha,
+ inner='r' + str(rev)),
+ )
+
+ def get_sha_description(sha: str) -> tiny_render.Piece:
+ return subprocess.check_output(
+ ['git', 'log', '-n1', '--format=%s', sha],
+ cwd=llvm_dir,
+ encoding='utf-8',
+ ).strip()
+
+ new_state: State = {}
+ for friendly_name, sha, new_reverts in find_shas(llvm_dir, interesting_shas,
+ state, new_state):
+ email = _generate_revert_email(repository, friendly_name, sha,
+ prettify_sha, get_sha_description,
+ new_reverts)
+ if is_dry_run:
+ logging.info('Would send email:\nSubject: %s\nBody:\n%s\n',
+ email.subject, tiny_render.render_text_pieces(email.body))
+ else:
+ logging.info('Sending email with subject %r...', email.subject)
+ _send_revert_email(recipients, email)
+ logging.info('Email sent.')
+ return new_state
+
+
+def parse_args(argv: t.List[str]) -> t.Any:
parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
- '--state_file', required=True, help='File to store persistent state in.')
+ 'action',
+ choices=['cherry-pick', 'email', 'dry-run'],
+ help='Automatically cherry-pick upstream reverts, send an email, or '
+ 'write to stdout.')
+ parser.add_argument('--state_file',
+ required=True,
+ help='File to store persistent state in.')
+ parser.add_argument('--llvm_dir',
+ required=True,
+ help='Up-to-date LLVM directory to use.')
+ parser.add_argument('--debug', action='store_true')
parser.add_argument(
- '--llvm_dir', required=True, help='Up-to-date LLVM directory to use.')
+ '--reviewers',
+ type=str,
+ nargs='*',
+ help='Requests reviews from REVIEWERS. All REVIEWERS must have existing '
+ 'accounts.')
parser.add_argument(
- '--dry_run',
- action='store_true',
- help='Print email contents, rather than sending them.')
- parser.add_argument('--debug', action='store_true')
+ '--cc',
+ type=str,
+ nargs='*',
+ help='CCs the CL to the recipients. All recipients must have existing '
+ 'accounts.')
subparsers = parser.add_subparsers(dest='repository')
subparsers.required = True
chromeos_subparser = subparsers.add_parser('chromeos')
- chromeos_subparser.add_argument(
- '--chromeos_dir', required=True, help='Up-to-date CrOS directory to use.')
+ chromeos_subparser.add_argument('--chromeos_dir',
+ required=True,
+ help='Up-to-date CrOS directory to use.')
android_subparser = subparsers.add_parser('android')
android_subparser.add_argument(
@@ -219,91 +336,70 @@ def main(argv: t.List[str]) -> None:
required=True,
help='Up-to-date android-llvm-toolchain directory to use.')
- opts = parser.parse_args(argv)
+ return parser.parse_args(argv)
+
+
+def find_chroot(opts: t.Any, reviewers: t.List[str], cc: t.List[str]
+ ) -> t.Tuple[str, t.List[t.Tuple[str, str]], _EmailRecipients]:
+ recipients = reviewers + cc
+ if opts.repository == 'chromeos':
+ chroot_path = opts.chromeos_dir
+ return (chroot_path, _find_interesting_chromeos_shas(chroot_path),
+ _EmailRecipients(well_known=['mage'], direct=recipients))
+ elif opts.repository == 'android':
+ if opts.action == 'cherry-pick':
+ raise RuntimeError(
+ "android doesn't currently support automatic cherry-picking.")
+
+ chroot_path = opts.android_llvm_toolchain_dir
+ return (chroot_path, _find_interesting_android_shas(chroot_path),
+ _EmailRecipients(well_known=[],
+ direct=['android-llvm-dev@google.com'] +
+ recipients))
+ else:
+ raise ValueError(f'Unknown repository {opts.repository}')
+
+
+def main(argv: t.List[str]) -> int:
+ opts = parse_args(argv)
logging.basicConfig(
format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
level=logging.DEBUG if opts.debug else logging.INFO,
)
- dry_run = opts.dry_run
+ action = opts.action
llvm_dir = opts.llvm_dir
repository = opts.repository
state_file = opts.state_file
+ reviewers = opts.reviewers if opts.reviewers else []
+ cc = opts.cc if opts.cc else []
- if repository == 'chromeos':
- interesting_shas = _find_interesting_chromeos_shas(opts.chromeos_dir)
- recipients = _EmailRecipients(well_known=['mage'], direct=[])
- elif repository == 'android':
- interesting_shas = _find_interesting_android_shas(
- opts.android_llvm_toolchain_dir)
- recipients = _EmailRecipients(
- well_known=[], direct=['android-llvm-dev@google.com'])
- else:
- raise ValueError('Unknown repository %s' % opts.repository)
-
+ chroot_path, interesting_shas, recipients = find_chroot(opts, reviewers, cc)
logging.info('Interesting SHAs were %r', interesting_shas)
state = _read_state(state_file)
logging.info('Loaded state\n%s', pprint.pformat(state))
- def prettify_sha(sha: str) -> tiny_render.Piece:
- rev = get_llvm_hash.GetVersionFrom(llvm_dir, sha)
-
- # 12 is arbitrary, but should be unambiguous enough.
- short_sha = sha[:12]
- return tiny_render.Switch(
- text='r%s (%s)' % (rev, short_sha),
- html=tiny_render.Link(
- href='https://reviews.llvm.org/rG' + sha, inner='r' + str(rev)),
- )
-
- def get_sha_description(sha: str) -> tiny_render.Piece:
- return subprocess.check_output(
- ['git', 'log', '-n1', '--format=%s', sha],
- cwd=llvm_dir,
- encoding='utf-8',
- ).strip()
-
- new_state: State = {}
- revert_emails_to_send: t.List[t.Tuple[str, t.List[revert_checker
- .Revert]]] = []
- for friendly_name, sha in interesting_shas:
- logging.info('Finding reverts across %s (%s)', friendly_name, sha)
- all_reverts = revert_checker.find_reverts(
- llvm_dir, sha, root='origin/' + git_llvm_rev.MAIN_BRANCH)
- logging.info('Detected the following revert(s) across %s:\n%s',
- friendly_name, pprint.pformat(all_reverts))
-
- new_state[sha] = [r.sha for r in all_reverts]
-
- if sha not in state:
- logging.info('SHA %s is new to me', sha)
- existing_reverts = set()
- else:
- existing_reverts = set(state[sha])
-
- new_reverts = [r for r in all_reverts if r.sha not in existing_reverts]
- if not new_reverts:
- logging.info('...All of which have been reported.')
- continue
-
- revert_emails_to_send.append(
- _generate_revert_email(repository, friendly_name, sha, prettify_sha,
- get_sha_description, new_reverts))
-
# We want to be as free of obvious side-effects as possible in case something
- # above breaks. Hence, send the email as late as possible.
- for email in revert_emails_to_send:
- if dry_run:
- logging.info('Would send email:\nSubject: %s\nBody:\n%s\n', email.subject,
- tiny_render.render_text_pieces(email.body))
- else:
- logging.info('Sending email with subject %r...', email.subject)
- _send_revert_email(recipients, email)
- logging.info('Email sent.')
+ # above breaks. Hence, action as late as possible.
+ if action == 'cherry-pick':
+ new_state = do_cherrypick(chroot_path=chroot_path,
+ llvm_dir=llvm_dir,
+ interesting_shas=interesting_shas,
+ state=state,
+ reviewers=reviewers,
+ cc=cc)
+ else:
+ new_state = do_email(is_dry_run=action == 'dry-run',
+ llvm_dir=llvm_dir,
+ repository=repository,
+ interesting_shas=interesting_shas,
+ state=state,
+ recipients=recipients)
_write_state(state_file, new_state)
+ return 0
if __name__ == '__main__':
diff --git a/llvm_tools/nightly_revert_checker_test.py b/llvm_tools/nightly_revert_checker_test.py
index 68338a59..a8ab4195 100755
--- a/llvm_tools/nightly_revert_checker_test.py
+++ b/llvm_tools/nightly_revert_checker_test.py
@@ -10,8 +10,10 @@ from __future__ import print_function
import io
import unittest
+from unittest.mock import patch
import cros_utils.tiny_render as tiny_render
+import get_upstream_patch
import nightly_revert_checker
import revert_checker
@@ -153,6 +155,43 @@ class Test(unittest.TestCase):
self.assertIn('Failed to detect SHAs', str(e.exception))
+ @patch('revert_checker.find_reverts')
+ @patch('get_upstream_patch.get_from_upstream')
+ def test_do_cherrypick_is_called(self, do_cherrypick, find_reverts):
+ find_reverts.return_value = [
+ revert_checker.Revert('12345abcdef', 'fedcba54321')
+ ]
+ nightly_revert_checker.do_cherrypick(
+ chroot_path='/path/to/chroot',
+ llvm_dir='/path/to/llvm',
+ interesting_shas=[('12345abcdef', 'fedcba54321')],
+ state={},
+ reviewers=['meow@chromium.org'],
+ cc=['purr@chromium.org'])
+
+ do_cherrypick.assert_called_once()
+ find_reverts.assert_called_once()
+
+ @patch('revert_checker.find_reverts')
+ @patch('get_upstream_patch.get_from_upstream')
+ def test_do_cherrypick_handles_cherrypick_error(self, do_cherrypick,
+ find_reverts):
+ find_reverts.return_value = [
+ revert_checker.Revert('12345abcdef', 'fedcba54321')
+ ]
+ do_cherrypick.side_effect = get_upstream_patch.CherrypickError(
+ 'Patch at 12345abcdef already exists in PATCHES.json')
+ nightly_revert_checker.do_cherrypick(
+ chroot_path='/path/to/chroot',
+ llvm_dir='/path/to/llvm',
+ interesting_shas=[('12345abcdef', 'fedcba54321')],
+ state={},
+ reviewers=['meow@chromium.org'],
+ cc=['purr@chromium.org'])
+
+ do_cherrypick.assert_called_once()
+ find_reverts.assert_called_once()
+
if __name__ == '__main__':
unittest.main()
diff --git a/llvm_tools/patch_manager.py b/llvm_tools/patch_manager.py
index 3c83fa96..f2d6b322 100755
--- a/llvm_tools/patch_manager.py
+++ b/llvm_tools/patch_manager.py
@@ -212,8 +212,13 @@ def GetPatchMetadata(patch_dict):
"""
# Get the metadata values of a patch if possible.
- start_version = patch_dict.get('start_version', 0)
- end_version = patch_dict.get('end_version', None)
+ # FIXME(b/221489531): Remove start_version & end_version
+ if 'version_range' in patch_dict:
+ start_version = patch_dict['version_range'].get('from', 0)
+ end_version = patch_dict['version_range'].get('until', None)
+ else:
+ start_version = patch_dict.get('start_version', 0)
+ end_version = patch_dict.get('end_version', None)
is_critical = patch_dict.get('is_critical', False)
return start_version, end_version, is_critical
diff --git a/llvm_tools/patch_manager_unittest.py b/llvm_tools/patch_manager_unittest.py
index 62947ed1..69bb683e 100755
--- a/llvm_tools/patch_manager_unittest.py
+++ b/llvm_tools/patch_manager_unittest.py
@@ -182,7 +182,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_stdout.patch',
- 'end_version': 1000
+ 'version_range': {
+ 'until': 1000,
+ }
}
self.assertEqual(
@@ -275,7 +277,9 @@ class PatchManagerTest(unittest.TestCase):
patch = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ },
}]
abs_patch_path = '/abs/path/to/filesdir/PATCHES'
@@ -293,13 +297,17 @@ class PatchManagerTest(unittest.TestCase):
test_updated_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}]
expected_patch_metadata = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}
with CreateTemporaryJsonFile() as json_test_file:
@@ -335,7 +343,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': rel_patch_path,
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}]
with CreateTemporaryJsonFile() as json_test_file:
@@ -379,7 +389,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': rel_patch_path,
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000,
+ },
}]
with CreateTemporaryJsonFile() as json_test_file:
@@ -415,28 +427,36 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1250
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1250
+ }
}
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 900
+ 'version_range': {
+ 'from': 20,
+ 'until': 900
+ }
}
test_patch_metadata = [
@@ -520,28 +540,36 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 2000
+ 'version_range': {
+ 'from': 20,
+ 'until': 2000
+ }
}
test_patch_metadata = [
@@ -654,8 +682,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -665,7 +695,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -674,8 +706,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -684,8 +718,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 1400
+ 'version_range': {
+ 'from': 20,
+ 'until': 1400
+ }
}
test_patch_metadata = [
@@ -786,8 +822,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -797,7 +835,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000,
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -806,8 +846,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -816,8 +858,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 1600,
- 'end_version': 2000
+ 'version_range': {
+ 'from': 1600,
+ 'until': 2000
+ }
}
test_patch_metadata = [
diff --git a/llvm_tools/patch_sync/.gitignore b/llvm_tools/patch_sync/.gitignore
new file mode 100644
index 00000000..2f7896d1
--- /dev/null
+++ b/llvm_tools/patch_sync/.gitignore
@@ -0,0 +1 @@
+target/
diff --git a/llvm_tools/patch_sync/Cargo.lock b/llvm_tools/patch_sync/Cargo.lock
new file mode 100644
index 00000000..1ad74a77
--- /dev/null
+++ b/llvm_tools/patch_sync/Cargo.lock
@@ -0,0 +1,460 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b26702f315f53b6071259e15dd9d64528213b44d61de1ec926eca7715d62203"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "2.34.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "bitflags",
+ "strsim",
+ "textwrap",
+ "unicode-width",
+ "vec_map",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "digest"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "heck"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.112"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "opaque-debug"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
+
+[[package]]
+name = "patch_sync"
+version = "1.1.0"
+dependencies = [
+ "anyhow",
+ "rand",
+ "regex",
+ "scopeguard",
+ "serde",
+ "serde_json",
+ "sha2",
+ "structopt",
+ "time",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "serde"
+version = "1.0.132"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.132"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha2"
+version = "0.9.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa"
+dependencies = [
+ "block-buffer",
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+ "opaque-debug",
+]
+
+[[package]]
+name = "strsim"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+
+[[package]]
+name = "structopt"
+version = "0.3.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40b9788f4202aa75c240ecc9c15c65185e6a39ccdeb0fd5d008b98825464c87c"
+dependencies = [
+ "clap",
+ "lazy_static",
+ "structopt-derive",
+]
+
+[[package]]
+name = "structopt-derive"
+version = "0.4.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23a1dfb999630e338648c83e91c59a4e9fb7620f520c3194b6b89e276f2f1959"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "time"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41effe7cfa8af36f439fac33861b66b049edc6f9a32331e2312660529c1c24ad"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "typenum"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "vec_map"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
+
+[[package]]
+name = "version_check"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/llvm_tools/patch_sync/Cargo.toml b/llvm_tools/patch_sync/Cargo.toml
new file mode 100644
index 00000000..ed33d5ca
--- /dev/null
+++ b/llvm_tools/patch_sync/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "patch_sync"
+version = "1.1.0"
+authors = ["Jordan R Abrahams-Whitehead <ajordanr@google.com>"]
+edition = "2018"
+
+[profile.release]
+panic = "abort"
+
+[dependencies]
+anyhow = "1.0"
+regex = "1.5"
+serde = {version = "1.0", features = ["derive"]}
+serde_json = "1.0"
+sha2 = "0.9"
+structopt = "0.3"
+time = "0.3"
+scopeguard = "1.1.0"
+
+[dev-dependencies]
+rand = "0.8"
diff --git a/llvm_tools/patch_sync/src/android_utils.rs b/llvm_tools/patch_sync/src/android_utils.rs
new file mode 100644
index 00000000..77cb4b8a
--- /dev/null
+++ b/llvm_tools/patch_sync/src/android_utils.rs
@@ -0,0 +1,62 @@
+use std::path::Path;
+use std::process::Command;
+
+use anyhow::{bail, ensure, Result};
+
+const LLVM_ANDROID_REL_PATH: &str = "toolchain/llvm_android";
+
+/// Return the Android checkout's current llvm version.
+///
+/// This uses android_version.get_svn_revision_number, a python function
+/// that can't be executed directly. We spawn a Python3 program
+/// to run it and get the result from that.
+pub fn get_android_llvm_version(android_checkout: &Path) -> Result<String> {
+ let mut command = new_android_cmd(android_checkout, "python3")?;
+ command.args([
+ "-c",
+ "import android_version; print(android_version.get_svn_revision_number(), end='')",
+ ]);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "could not get android llvm version: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ let out_string = String::from_utf8(output.stdout)?.trim().to_string();
+ Ok(out_string)
+}
+
+/// Sort the Android patches using the cherrypick_cl.py Android utility.
+///
+/// This assumes that:
+/// 1. There exists a python script called cherrypick_cl.py
+/// 2. That calling it with the given arguments sorts the PATCHES.json file.
+/// 3. Calling it does nothing besides sorting the PATCHES.json file.
+///
+/// We aren't doing our own sorting because we shouldn't have to update patch_sync along
+/// with cherrypick_cl.py any time they change the __lt__ implementation.
+pub fn sort_android_patches(android_checkout: &Path) -> Result<()> {
+ let mut command = new_android_cmd(android_checkout, "python3")?;
+ command.args(["cherrypick_cl.py", "--reason", "patch_sync sorting"]);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "could not sort: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ Ok(())
+}
+
+fn new_android_cmd(android_checkout: &Path, cmd: &str) -> Result<Command> {
+ let mut command = Command::new(cmd);
+ let llvm_android_dir = android_checkout.join(LLVM_ANDROID_REL_PATH);
+ ensure!(
+ llvm_android_dir.is_dir(),
+ "can't make android command; {} is not a directory",
+ llvm_android_dir.display()
+ );
+ command.current_dir(llvm_android_dir);
+ Ok(command)
+}
diff --git a/llvm_tools/patch_sync/src/main.rs b/llvm_tools/patch_sync/src/main.rs
new file mode 100644
index 00000000..c244f1c0
--- /dev/null
+++ b/llvm_tools/patch_sync/src/main.rs
@@ -0,0 +1,332 @@
+mod android_utils;
+mod patch_parsing;
+mod version_control;
+
+use std::borrow::ToOwned;
+use std::collections::BTreeSet;
+use std::path::{Path, PathBuf};
+
+use anyhow::{Context, Result};
+use structopt::StructOpt;
+
+use patch_parsing::{filter_patches_by_platform, PatchCollection, PatchDictSchema};
+use version_control::RepoSetupContext;
+
+fn main() -> Result<()> {
+ match Opt::from_args() {
+ Opt::Show {
+ cros_checkout_path,
+ android_checkout_path,
+ sync,
+ keep_unmerged,
+ } => show_subcmd(ShowOpt {
+ cros_checkout_path,
+ android_checkout_path,
+ sync,
+ keep_unmerged,
+ }),
+ Opt::Transpose {
+ cros_checkout_path,
+ cros_reviewers,
+ old_cros_ref,
+ android_checkout_path,
+ android_reviewers,
+ old_android_ref,
+ sync,
+ verbose,
+ dry_run,
+ no_commit,
+ wip,
+ disable_cq,
+ } => transpose_subcmd(TransposeOpt {
+ cros_checkout_path,
+ cros_reviewers: cros_reviewers
+ .map(|r| r.split(',').map(ToOwned::to_owned).collect())
+ .unwrap_or_default(),
+ old_cros_ref,
+ android_checkout_path,
+ android_reviewers: android_reviewers
+ .map(|r| r.split(',').map(ToOwned::to_owned).collect())
+ .unwrap_or_default(),
+ old_android_ref,
+ sync,
+ verbose,
+ dry_run,
+ no_commit,
+ wip,
+ disable_cq,
+ }),
+ }
+}
+
+struct ShowOpt {
+ cros_checkout_path: PathBuf,
+ android_checkout_path: PathBuf,
+ keep_unmerged: bool,
+ sync: bool,
+}
+
+fn show_subcmd(args: ShowOpt) -> Result<()> {
+ let ShowOpt {
+ cros_checkout_path,
+ android_checkout_path,
+ keep_unmerged,
+ sync,
+ } = args;
+ let ctx = RepoSetupContext {
+ cros_checkout: cros_checkout_path,
+ android_checkout: android_checkout_path,
+ sync_before: sync,
+ wip_mode: true, // Has no effect, as we're not making changes
+ enable_cq: false, // Has no effect, as we're not uploading anything
+ };
+ ctx.setup()?;
+ let make_collection = |platform: &str, patches_fp: &Path| -> Result<PatchCollection> {
+ let parsed_collection = PatchCollection::parse_from_file(patches_fp)
+ .with_context(|| format!("could not parse {} PATCHES.json", platform))?;
+ Ok(if keep_unmerged {
+ parsed_collection
+ } else {
+ filter_patches_by_platform(&parsed_collection, platform).map_patches(|p| {
+ // Need to do this platforms creation as Rust 1.55 cannot use "from".
+ let mut platforms = BTreeSet::new();
+ platforms.insert(platform.to_string());
+ PatchDictSchema {
+ platforms,
+ ..p.clone()
+ }
+ })
+ })
+ };
+ let cur_cros_collection = make_collection("chromiumos", &ctx.cros_patches_path())?;
+ let cur_android_collection = make_collection("android", &ctx.android_patches_path())?;
+ let merged = cur_cros_collection.union(&cur_android_collection)?;
+ println!("{}", merged.serialize_patches()?);
+ Ok(())
+}
+
+struct TransposeOpt {
+ cros_checkout_path: PathBuf,
+ old_cros_ref: String,
+ android_checkout_path: PathBuf,
+ old_android_ref: String,
+ sync: bool,
+ verbose: bool,
+ dry_run: bool,
+ no_commit: bool,
+ cros_reviewers: Vec<String>,
+ android_reviewers: Vec<String>,
+ wip: bool,
+ disable_cq: bool,
+}
+
+fn transpose_subcmd(args: TransposeOpt) -> Result<()> {
+ let ctx = RepoSetupContext {
+ cros_checkout: args.cros_checkout_path,
+ android_checkout: args.android_checkout_path,
+ sync_before: args.sync,
+ wip_mode: args.wip,
+ enable_cq: !args.disable_cq,
+ };
+ ctx.setup()?;
+ let cros_patches_path = ctx.cros_patches_path();
+ let android_patches_path = ctx.android_patches_path();
+
+ // Get new Patches -------------------------------------------------------
+ let (cur_cros_collection, new_cros_patches) = patch_parsing::new_patches(
+ &cros_patches_path,
+ &ctx.old_cros_patch_contents(&args.old_cros_ref)?,
+ "chromiumos",
+ )
+ .context("finding new patches for chromiumos")?;
+ let (cur_android_collection, new_android_patches) = patch_parsing::new_patches(
+ &android_patches_path,
+ &ctx.old_android_patch_contents(&args.old_android_ref)?,
+ "android",
+ )
+ .context("finding new patches for android")?;
+
+ // Have to ignore patches that are already at the destination, even if
+ // the patches are new.
+ let new_cros_patches = new_cros_patches.subtract(&cur_android_collection)?;
+ let new_android_patches = new_android_patches.subtract(&cur_cros_collection)?;
+
+ // Need to do an extra filtering step for Android, as AOSP doesn't
+ // want patches outside of the start/end bounds.
+ let android_llvm_version: u64 = {
+ let android_llvm_version_str =
+ android_utils::get_android_llvm_version(&ctx.android_checkout)?;
+ android_llvm_version_str.parse::<u64>().with_context(|| {
+ format!(
+ "converting llvm version to u64: '{}'",
+ android_llvm_version_str
+ )
+ })?
+ };
+ let new_android_patches = new_android_patches.filter_patches(|p| {
+ match (p.get_start_version(), p.get_end_version()) {
+ (Some(start), Some(end)) => start <= android_llvm_version && android_llvm_version < end,
+ (Some(start), None) => start <= android_llvm_version,
+ (None, Some(end)) => android_llvm_version < end,
+ (None, None) => true,
+ }
+ });
+
+ if args.verbose {
+ display_patches("New patches from Chromium OS", &new_cros_patches);
+ display_patches("New patches from Android", &new_android_patches);
+ }
+
+ if args.dry_run {
+ println!("--dry-run specified; skipping modifications");
+ return Ok(());
+ }
+
+ modify_repos(
+ &ctx,
+ args.no_commit,
+ ModifyOpt {
+ new_cros_patches,
+ cur_cros_collection,
+ cros_reviewers: args.cros_reviewers,
+ new_android_patches,
+ cur_android_collection,
+ android_reviewers: args.android_reviewers,
+ },
+ )
+}
+
+struct ModifyOpt {
+ new_cros_patches: PatchCollection,
+ cur_cros_collection: PatchCollection,
+ cros_reviewers: Vec<String>,
+ new_android_patches: PatchCollection,
+ cur_android_collection: PatchCollection,
+ android_reviewers: Vec<String>,
+}
+
+fn modify_repos(ctx: &RepoSetupContext, no_commit: bool, opt: ModifyOpt) -> Result<()> {
+ // Cleanup on scope exit.
+ scopeguard::defer! {
+ ctx.cleanup();
+ }
+ // Transpose Patches -----------------------------------------------------
+ let mut cur_android_collection = opt.cur_android_collection;
+ let mut cur_cros_collection = opt.cur_cros_collection;
+ if !opt.new_cros_patches.is_empty() {
+ opt.new_cros_patches
+ .transpose_write(&mut cur_android_collection)?;
+ }
+ if !opt.new_android_patches.is_empty() {
+ opt.new_android_patches
+ .transpose_write(&mut cur_cros_collection)?;
+ }
+
+ if no_commit {
+ println!("--no-commit specified; not committing or uploading");
+ return Ok(());
+ }
+ // Commit and upload for review ------------------------------------------
+ // Note we want to check if the android patches are empty for CrOS, and
+ // vice versa. This is a little counterintuitive.
+ if !opt.new_android_patches.is_empty() {
+ ctx.cros_repo_upload(&opt.cros_reviewers)
+ .context("uploading chromiumos changes")?;
+ }
+ if !opt.new_cros_patches.is_empty() {
+ if let Err(e) = android_utils::sort_android_patches(&ctx.android_checkout) {
+ eprintln!(
+ "Couldn't sort Android patches; continuing. Caused by: {}",
+ e
+ );
+ }
+ ctx.android_repo_upload(&opt.android_reviewers)
+ .context("uploading android changes")?;
+ }
+ Ok(())
+}
+
+fn display_patches(prelude: &str, collection: &PatchCollection) {
+ println!("{}", prelude);
+ if collection.patches.is_empty() {
+ println!(" [No Patches]");
+ return;
+ }
+ println!("{}", collection);
+}
+
+#[derive(Debug, structopt::StructOpt)]
+#[structopt(name = "patch_sync", about = "A pipeline for syncing the patch code")]
+enum Opt {
+ /// Show a combined view of the PATCHES.json file, without making any changes.
+ #[allow(dead_code)]
+ Show {
+ #[structopt(parse(from_os_str))]
+ cros_checkout_path: PathBuf,
+ #[structopt(parse(from_os_str))]
+ android_checkout_path: PathBuf,
+
+ /// Keep a patch's platform field even if it's not merged at that platform.
+ #[structopt(long)]
+ keep_unmerged: bool,
+
+ /// Run repo sync before transposing.
+ #[structopt(short, long)]
+ sync: bool,
+ },
+ /// Transpose patches from two PATCHES.json files
+ /// to each other.
+ Transpose {
+ /// Path to the ChromiumOS source repo checkout.
+ #[structopt(long = "cros-checkout", parse(from_os_str))]
+ cros_checkout_path: PathBuf,
+
+ /// Emails to send review requests to during Chromium OS upload.
+ /// Comma separated.
+ #[structopt(long = "cros-rev")]
+ cros_reviewers: Option<String>,
+
+ /// Git ref (e.g. hash) for the ChromiumOS overlay to use as the base.
+ #[structopt(long = "overlay-base-ref")]
+ old_cros_ref: String,
+
+ /// Path to the Android Open Source Project source repo checkout.
+ #[structopt(long = "aosp-checkout", parse(from_os_str))]
+ android_checkout_path: PathBuf,
+
+ /// Emails to send review requests to during Android upload.
+ /// Comma separated.
+ #[structopt(long = "aosp-rev")]
+ android_reviewers: Option<String>,
+
+ /// Git ref (e.g. hash) for the llvm_android repo to use as the base.
+ #[structopt(long = "aosp-base-ref")]
+ old_android_ref: String,
+
+ /// Run repo sync before transposing.
+ #[structopt(short, long)]
+ sync: bool,
+
+ /// Print information to stdout
+ #[structopt(short, long)]
+ verbose: bool,
+
+ /// Do not change any files. Useful in combination with `--verbose`
+ /// Implies `--no-commit`.
+ #[structopt(long)]
+ dry_run: bool,
+
+ /// Do not commit or upload any changes made.
+ #[structopt(long)]
+ no_commit: bool,
+
+ /// Upload and send things for review, but mark as WIP and send no
+ /// emails.
+ #[structopt(long)]
+ wip: bool,
+
+ /// Don't run CQ if set. Only has an effect if uploading.
+ #[structopt(long)]
+ disable_cq: bool,
+ },
+}
diff --git a/llvm_tools/patch_sync/src/patch_parsing.rs b/llvm_tools/patch_sync/src/patch_parsing.rs
new file mode 100644
index 00000000..124f0d6f
--- /dev/null
+++ b/llvm_tools/patch_sync/src/patch_parsing.rs
@@ -0,0 +1,462 @@
+use std::collections::{BTreeMap, BTreeSet};
+use std::fs::{copy, File};
+use std::io::{BufRead, BufReader, Read, Write};
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use serde::{Deserialize, Serialize};
+use sha2::{Digest, Sha256};
+
+/// JSON serde struct.
+// FIXME(b/221489531): Remove when we clear out start_version and
+// end_version.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PatchDictSchema {
+ /// [deprecated(since = "1.1", note = "Use version_range")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub end_version: Option<u64>,
+ pub metadata: Option<BTreeMap<String, serde_json::Value>>,
+ #[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
+ pub platforms: BTreeSet<String>,
+ pub rel_patch_path: String,
+ /// [deprecated(since = "1.1", note = "Use version_range")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub start_version: Option<u64>,
+ pub version_range: Option<VersionRange>,
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct VersionRange {
+ pub from: Option<u64>,
+ pub until: Option<u64>,
+}
+
+// FIXME(b/221489531): Remove when we clear out start_version and
+// end_version.
+impl PatchDictSchema {
+ pub fn get_start_version(&self) -> Option<u64> {
+ self.version_range
+ .map(|x| x.from)
+ .unwrap_or(self.start_version)
+ }
+
+ pub fn get_end_version(&self) -> Option<u64> {
+ self.version_range
+ .map(|x| x.until)
+ .unwrap_or(self.end_version)
+ }
+}
+
+/// Struct to keep track of patches and their relative paths.
+#[derive(Debug, Clone)]
+pub struct PatchCollection {
+ pub patches: Vec<PatchDictSchema>,
+ pub workdir: PathBuf,
+}
+
+impl PatchCollection {
+ /// Create a `PatchCollection` from a PATCHES.
+ pub fn parse_from_file(json_file: &Path) -> Result<Self> {
+ Ok(Self {
+ patches: serde_json::from_reader(File::open(json_file)?)?,
+ workdir: json_file
+ .parent()
+ .ok_or_else(|| anyhow!("failed to get json_file parent"))?
+ .to_path_buf(),
+ })
+ }
+
+ /// Create a `PatchCollection` from a string literal and a workdir.
+ pub fn parse_from_str(workdir: PathBuf, contents: &str) -> Result<Self> {
+ Ok(Self {
+ patches: serde_json::from_str(contents).context("parsing from str")?,
+ workdir,
+ })
+ }
+
+ /// Copy this collection with patches filtered by given criterion.
+ pub fn filter_patches(&self, f: impl FnMut(&PatchDictSchema) -> bool) -> Self {
+ Self {
+ patches: self.patches.iter().cloned().filter(f).collect(),
+ workdir: self.workdir.clone(),
+ }
+ }
+
+ /// Map over the patches.
+ pub fn map_patches(&self, f: impl FnMut(&PatchDictSchema) -> PatchDictSchema) -> Self {
+ Self {
+ patches: self.patches.iter().map(f).collect(),
+ workdir: self.workdir.clone(),
+ }
+ }
+
+ /// Return true if the collection is tracking any patches.
+ pub fn is_empty(&self) -> bool {
+ self.patches.is_empty()
+ }
+
+ /// Compute the set-set subtraction, returning a new `PatchCollection` which
+ /// keeps the minuend's workdir.
+ pub fn subtract(&self, subtrahend: &Self) -> Result<Self> {
+ let mut new_patches = Vec::new();
+ // This is O(n^2) when it could be much faster, but n is always going to be less
+ // than 1k and speed is not important here.
+ for our_patch in &self.patches {
+ let found_in_sub = subtrahend.patches.iter().any(|sub_patch| {
+ let hash1 = subtrahend
+ .hash_from_rel_patch(sub_patch)
+ .expect("getting hash from subtrahend patch");
+ let hash2 = self
+ .hash_from_rel_patch(our_patch)
+ .expect("getting hash from our patch");
+ hash1 == hash2
+ });
+ if !found_in_sub {
+ new_patches.push(our_patch.clone());
+ }
+ }
+ Ok(Self {
+ patches: new_patches,
+ workdir: self.workdir.clone(),
+ })
+ }
+
+ pub fn union(&self, other: &Self) -> Result<Self> {
+ self.union_helper(
+ other,
+ |p| self.hash_from_rel_patch(p),
+ |p| other.hash_from_rel_patch(p),
+ )
+ }
+
+ fn union_helper(
+ &self,
+ other: &Self,
+ our_hash_f: impl Fn(&PatchDictSchema) -> Result<String>,
+ their_hash_f: impl Fn(&PatchDictSchema) -> Result<String>,
+ ) -> Result<Self> {
+ // 1. For all our patches:
+ // a. If there exists a matching patch hash from `other`:
+ // i. Create a new patch with merged platform info,
+ // ii. add the new patch to our new collection.
+ // iii. Mark the other patch as "merged"
+ // b. Otherwise, copy our patch to the new collection
+ // 2. For all unmerged patches from the `other`
+ // a. Copy their patch into the new collection
+ let mut combined_patches = Vec::new();
+ let mut other_merged = vec![false; other.patches.len()];
+
+ // 1.
+ for p in &self.patches {
+ let our_hash = our_hash_f(p)?;
+ let mut found = false;
+ // a.
+ for (idx, merged) in other_merged.iter_mut().enumerate() {
+ if !*merged {
+ let other_p = &other.patches[idx];
+ let their_hash = their_hash_f(other_p)?;
+ if our_hash == their_hash {
+ // i.
+ let new_platforms =
+ p.platforms.union(&other_p.platforms).cloned().collect();
+ // ii.
+ combined_patches.push(PatchDictSchema {
+ rel_patch_path: p.rel_patch_path.clone(),
+ start_version: p.start_version,
+ end_version: p.end_version,
+ platforms: new_platforms,
+ metadata: p.metadata.clone(),
+ version_range: p.version_range,
+ });
+ // iii.
+ *merged = true;
+ found = true;
+ break;
+ }
+ }
+ }
+ // b.
+ if !found {
+ combined_patches.push(p.clone());
+ }
+ }
+ // 2.
+ // Add any remaining, other-only patches.
+ for (idx, merged) in other_merged.iter().enumerate() {
+ if !*merged {
+ combined_patches.push(other.patches[idx].clone());
+ }
+ }
+
+ Ok(Self {
+ workdir: self.workdir.clone(),
+ patches: combined_patches,
+ })
+ }
+
+ /// Copy all patches from this collection into another existing collection, and write that
+ /// to the existing collection's file.
+ pub fn transpose_write(&self, existing_collection: &mut Self) -> Result<()> {
+ for p in &self.patches {
+ let original_file_path = self.workdir.join(&p.rel_patch_path);
+ let copy_file_path = existing_collection.workdir.join(&p.rel_patch_path);
+ copy_create_parents(&original_file_path, &copy_file_path)?;
+ existing_collection.patches.push(p.clone());
+ }
+ existing_collection.write_patches_json("PATCHES.json")
+ }
+
+ /// Write out the patch collection contents to a PATCHES.json file.
+ fn write_patches_json(&self, filename: &str) -> Result<()> {
+ let write_path = self.workdir.join(filename);
+ let mut new_patches_file = File::create(&write_path)
+ .with_context(|| format!("writing to {}", write_path.display()))?;
+ new_patches_file.write_all(self.serialize_patches()?.as_bytes())?;
+ Ok(())
+ }
+
+ pub fn serialize_patches(&self) -> Result<String> {
+ let mut serialization_buffer = Vec::<u8>::new();
+ // Four spaces to indent json serialization.
+ let mut serializer = serde_json::Serializer::with_formatter(
+ &mut serialization_buffer,
+ serde_json::ser::PrettyFormatter::with_indent(b" "),
+ );
+ self.patches
+ .serialize(&mut serializer)
+ .context("serializing patches to JSON")?;
+ // Append a newline at the end if not present. This is necessary to get
+ // past some pre-upload hooks.
+ if serialization_buffer.last() != Some(&b'\n') {
+ serialization_buffer.push(b'\n');
+ }
+ Ok(std::str::from_utf8(&serialization_buffer)?.to_string())
+ }
+
+ /// Return whether a given patch actually exists on the file system.
+ pub fn patch_exists(&self, patch: &PatchDictSchema) -> bool {
+ self.workdir.join(&patch.rel_patch_path).exists()
+ }
+
+ fn hash_from_rel_patch(&self, patch: &PatchDictSchema) -> Result<String> {
+ hash_from_patch_path(&self.workdir.join(&patch.rel_patch_path))
+ }
+}
+
+impl std::fmt::Display for PatchCollection {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ for (i, p) in self.patches.iter().enumerate() {
+ let title = p
+ .metadata
+ .as_ref()
+ .and_then(|x| x.get("title"))
+ .and_then(serde_json::Value::as_str)
+ .unwrap_or("[No Title]");
+ let path = self.workdir.join(&p.rel_patch_path);
+ writeln!(f, "* {}", title)?;
+ if i == self.patches.len() - 1 {
+ write!(f, " {}", path.display())?;
+ } else {
+ writeln!(f, " {}", path.display())?;
+ }
+ }
+ Ok(())
+ }
+}
+
+/// Generate a PatchCollection incorporating only the diff between current patches and old patch
+/// contents.
+pub fn new_patches(
+ patches_path: &Path,
+ old_patch_contents: &str,
+ platform: &str,
+) -> Result<(PatchCollection, PatchCollection)> {
+ let cur_collection = PatchCollection::parse_from_file(patches_path)
+ .with_context(|| format!("parsing {} PATCHES.json", platform))?;
+ let cur_collection = filter_patches_by_platform(&cur_collection, platform);
+ let cur_collection = cur_collection.filter_patches(|p| cur_collection.patch_exists(p));
+ let new_patches: PatchCollection = {
+ let old_collection = PatchCollection::parse_from_str(
+ patches_path.parent().unwrap().to_path_buf(),
+ old_patch_contents,
+ )?;
+ let old_collection = old_collection.filter_patches(|p| old_collection.patch_exists(p));
+ cur_collection.subtract(&old_collection)?
+ };
+ let new_patches = new_patches.map_patches(|p| {
+ let mut platforms = BTreeSet::new();
+ platforms.extend(["android".to_string(), "chromiumos".to_string()]);
+ PatchDictSchema {
+ platforms: platforms.union(&p.platforms).cloned().collect(),
+ ..p.to_owned()
+ }
+ });
+ Ok((cur_collection, new_patches))
+}
+
+/// Create a new collection with only the patches that apply to the
+/// given platform.
+///
+/// If there's no platform listed, the patch should still apply if the patch file exists.
+pub fn filter_patches_by_platform(collection: &PatchCollection, platform: &str) -> PatchCollection {
+ collection.filter_patches(|p| {
+ p.platforms.contains(platform) || (p.platforms.is_empty() && collection.patch_exists(p))
+ })
+}
+
+/// Get the hash from the patch file contents.
+///
+/// Not every patch file actually contains its own hash,
+/// we must compute the hash ourselves when it's not found.
+fn hash_from_patch(patch_contents: impl Read) -> Result<String> {
+ let mut reader = BufReader::new(patch_contents);
+ let mut buf = String::new();
+ reader.read_line(&mut buf)?;
+ let mut first_line_iter = buf.trim().split(' ').fuse();
+ let (fst_word, snd_word) = (first_line_iter.next(), first_line_iter.next());
+ if let (Some("commit" | "From"), Some(hash_str)) = (fst_word, snd_word) {
+ // If the first line starts with either "commit" or "From", the following
+ // text is almost certainly a commit hash.
+ Ok(hash_str.to_string())
+ } else {
+ // This is an annoying case where the patch isn't actually a commit.
+ // So we'll hash the entire file, and hope that's sufficient.
+ let mut hasher = Sha256::new();
+ hasher.update(&buf); // Have to hash the first line.
+ reader.read_to_string(&mut buf)?;
+ hasher.update(buf); // Hash the rest of the file.
+ let sha = hasher.finalize();
+ Ok(format!("{:x}", &sha))
+ }
+}
+
+fn hash_from_patch_path(patch: &Path) -> Result<String> {
+ let f = File::open(patch).with_context(|| format!("opening patch file {}", patch.display()))?;
+ hash_from_patch(f)
+}
+
+/// Copy a file from one path to another, and create any parent
+/// directories along the way.
+fn copy_create_parents(from: &Path, to: &Path) -> Result<()> {
+ let to_parent = to
+ .parent()
+ .with_context(|| format!("getting parent of {}", to.display()))?;
+ if !to_parent.exists() {
+ std::fs::create_dir_all(to_parent)?;
+ }
+
+ copy(&from, &to)
+ .with_context(|| format!("copying file from {} to {}", &from.display(), &to.display()))?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+
+ use super::*;
+
+ /// Test we can extract the hash from patch files.
+ #[test]
+ fn test_hash_from_patch() {
+ // Example git patch from Gerrit
+ let desired_hash = "004be4037e1e9c6092323c5c9268acb3ecf9176c";
+ let test_file_contents = "commit 004be4037e1e9c6092323c5c9268acb3ecf9176c\n\
+ Author: An Author <some_email>\n\
+ Date: Thu Aug 6 12:34:16 2020 -0700";
+ assert_eq!(
+ &hash_from_patch(test_file_contents.as_bytes()).unwrap(),
+ desired_hash
+ );
+
+ // Example git patch from upstream
+ let desired_hash = "6f85225ef3791357f9b1aa097b575b0a2b0dff48";
+ let test_file_contents = "From 6f85225ef3791357f9b1aa097b575b0a2b0dff48\n\
+ Mon Sep 17 00:00:00 2001\n\
+ From: Another Author <another_email>\n\
+ Date: Wed, 18 Aug 2021 15:03:03 -0700";
+ assert_eq!(
+ &hash_from_patch(test_file_contents.as_bytes()).unwrap(),
+ desired_hash
+ );
+ }
+
+ #[test]
+ fn test_union() {
+ let patch1 = PatchDictSchema {
+ start_version: Some(0),
+ end_version: Some(1),
+ rel_patch_path: "a".into(),
+ metadata: None,
+ platforms: BTreeSet::from(["x".into()]),
+ version_range: Some(VersionRange {
+ from: Some(0),
+ until: Some(1),
+ }),
+ };
+ let patch2 = PatchDictSchema {
+ rel_patch_path: "b".into(),
+ platforms: BTreeSet::from(["x".into(), "y".into()]),
+ ..patch1.clone()
+ };
+ let patch3 = PatchDictSchema {
+ platforms: BTreeSet::from(["z".into(), "x".into()]),
+ ..patch1.clone()
+ };
+ let collection1 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1, patch2],
+ };
+ let collection2 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch3],
+ };
+ let union = collection1
+ .union_helper(
+ &collection2,
+ |p| Ok(p.rel_patch_path.to_string()),
+ |p| Ok(p.rel_patch_path.to_string()),
+ )
+ .expect("could not create union");
+ assert_eq!(union.patches.len(), 2);
+ assert_eq!(
+ union.patches[0].platforms.iter().collect::<Vec<&String>>(),
+ vec!["x", "z"]
+ );
+ assert_eq!(
+ union.patches[1].platforms.iter().collect::<Vec<&String>>(),
+ vec!["x", "y"]
+ );
+ }
+
+ #[test]
+ fn test_union_empties() {
+ let patch1 = PatchDictSchema {
+ start_version: Some(0),
+ end_version: Some(1),
+ rel_patch_path: "a".into(),
+ metadata: None,
+ platforms: Default::default(),
+ version_range: Some(VersionRange {
+ from: Some(0),
+ until: Some(1),
+ }),
+ };
+ let collection1 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1.clone()],
+ };
+ let collection2 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1],
+ };
+ let union = collection1
+ .union_helper(
+ &collection2,
+ |p| Ok(p.rel_patch_path.to_string()),
+ |p| Ok(p.rel_patch_path.to_string()),
+ )
+ .expect("could not create union");
+ assert_eq!(union.patches.len(), 1);
+ assert_eq!(union.patches[0].platforms.len(), 0);
+ }
+}
diff --git a/llvm_tools/patch_sync/src/version_control.rs b/llvm_tools/patch_sync/src/version_control.rs
new file mode 100644
index 00000000..e07d39d6
--- /dev/null
+++ b/llvm_tools/patch_sync/src/version_control.rs
@@ -0,0 +1,400 @@
+use anyhow::{anyhow, bail, ensure, Context, Result};
+use regex::Regex;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+
+const CHROMIUMOS_OVERLAY_REL_PATH: &str = "src/third_party/chromiumos-overlay";
+const ANDROID_LLVM_REL_PATH: &str = "toolchain/llvm_android";
+
+const CROS_MAIN_BRANCH: &str = "main";
+const ANDROID_MAIN_BRANCH: &str = "master"; // nocheck
+const WORK_BRANCH_NAME: &str = "__patch_sync_tmp";
+
+/// Context struct to keep track of both Chromium OS and Android checkouts.
+#[derive(Debug)]
+pub struct RepoSetupContext {
+ pub cros_checkout: PathBuf,
+ pub android_checkout: PathBuf,
+ /// Run `repo sync` before doing any comparisons.
+ pub sync_before: bool,
+ pub wip_mode: bool,
+ pub enable_cq: bool,
+}
+
+impl RepoSetupContext {
+ pub fn setup(&self) -> Result<()> {
+ if self.sync_before {
+ {
+ let crpp = self.cros_patches_path();
+ let cros_git = crpp.parent().unwrap();
+ git_cd_cmd(cros_git, ["checkout", CROS_MAIN_BRANCH])?;
+ }
+ {
+ let anpp = self.android_patches_path();
+ let android_git = anpp.parent().unwrap();
+ git_cd_cmd(android_git, ["checkout", ANDROID_MAIN_BRANCH])?;
+ }
+ repo_cd_cmd(&self.cros_checkout, &["sync", CHROMIUMOS_OVERLAY_REL_PATH])?;
+ repo_cd_cmd(&self.android_checkout, &["sync", ANDROID_LLVM_REL_PATH])?;
+ }
+ Ok(())
+ }
+
+ pub fn cros_repo_upload<S: AsRef<str>>(&self, reviewers: &[S]) -> Result<()> {
+ let llvm_dir = self
+ .cros_checkout
+ .join(&CHROMIUMOS_OVERLAY_REL_PATH)
+ .join("sys-devel/llvm");
+ ensure!(
+ llvm_dir.is_dir(),
+ "CrOS LLVM dir {} is not a directory",
+ llvm_dir.display()
+ );
+ Self::rev_bump_llvm(&llvm_dir)?;
+ let mut extra_args = Vec::new();
+ for reviewer in reviewers {
+ extra_args.push("--re");
+ extra_args.push(reviewer.as_ref());
+ }
+ if self.wip_mode {
+ extra_args.push("--wip");
+ extra_args.push("--no-emails");
+ }
+ if self.enable_cq {
+ extra_args.push("--label=Commit-Queue+1");
+ }
+ Self::repo_upload(
+ &self.cros_checkout,
+ CHROMIUMOS_OVERLAY_REL_PATH,
+ &Self::build_commit_msg(
+ "llvm: Synchronize patches from android",
+ "android",
+ "chromiumos",
+ "BUG=None\nTEST=CQ",
+ ),
+ extra_args,
+ )
+ }
+
+ pub fn android_repo_upload<S: AsRef<str>>(&self, reviewers: &[S]) -> Result<()> {
+ let mut extra_args = Vec::new();
+ for reviewer in reviewers {
+ extra_args.push("--re");
+ extra_args.push(reviewer.as_ref());
+ }
+ if self.wip_mode {
+ extra_args.push("--wip");
+ extra_args.push("--no-emails");
+ }
+ if self.enable_cq {
+ extra_args.push("--label=Presubmit-Ready+1");
+ }
+ Self::repo_upload(
+ &self.android_checkout,
+ ANDROID_LLVM_REL_PATH,
+ &Self::build_commit_msg(
+ "Synchronize patches from chromiumos",
+ "chromiumos",
+ "android",
+ "Test: N/A",
+ ),
+ extra_args,
+ )
+ }
+
+ fn cros_cleanup(&self) -> Result<()> {
+ let git_path = self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH);
+ Self::cleanup_branch(&git_path, CROS_MAIN_BRANCH, WORK_BRANCH_NAME)
+ .with_context(|| format!("cleaning up branch {}", WORK_BRANCH_NAME))?;
+ Ok(())
+ }
+
+ fn android_cleanup(&self) -> Result<()> {
+ let git_path = self.android_checkout.join(ANDROID_LLVM_REL_PATH);
+ Self::cleanup_branch(&git_path, ANDROID_MAIN_BRANCH, WORK_BRANCH_NAME)
+ .with_context(|| format!("cleaning up branch {}", WORK_BRANCH_NAME))?;
+ Ok(())
+ }
+
+ /// Wrapper around cleanups to ensure both get run, even if errors appear.
+ pub fn cleanup(&self) {
+ if let Err(e) = self.cros_cleanup() {
+ eprintln!("Failed to clean up chromiumos, continuing: {}", e);
+ }
+ if let Err(e) = self.android_cleanup() {
+ eprintln!("Failed to clean up android, continuing: {}", e);
+ }
+ }
+
+ /// Get the Android path to the PATCHES.json file
+ pub fn android_patches_path(&self) -> PathBuf {
+ self.android_checkout
+ .join(&ANDROID_LLVM_REL_PATH)
+ .join("patches/PATCHES.json")
+ }
+
+ /// Get the Chromium OS path to the PATCHES.json file
+ pub fn cros_patches_path(&self) -> PathBuf {
+ self.cros_checkout
+ .join(&CHROMIUMOS_OVERLAY_REL_PATH)
+ .join("sys-devel/llvm/files/PATCHES.json")
+ }
+
+ /// Return the contents of the old PATCHES.json from Chromium OS
+ pub fn old_cros_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH),
+ Path::new("sys-devel/llvm/files/PATCHES.json"),
+ )
+ }
+
+ /// Return the contents of the old PATCHES.json from android
+ pub fn old_android_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.android_checkout.join(ANDROID_LLVM_REL_PATH),
+ Path::new("patches/PATCHES.json"),
+ )
+ }
+
+ fn repo_upload<'a, I: IntoIterator<Item = &'a str>>(
+ checkout_path: &Path,
+ subproject_git_wd: &'a str,
+ commit_msg: &str,
+ extra_flags: I,
+ ) -> Result<()> {
+ let git_path = &checkout_path.join(&subproject_git_wd);
+ ensure!(
+ git_path.is_dir(),
+ "git_path {} is not a directory",
+ git_path.display()
+ );
+ repo_cd_cmd(
+ checkout_path,
+ &["start", WORK_BRANCH_NAME, subproject_git_wd],
+ )?;
+ let base_args = ["upload", "--br", WORK_BRANCH_NAME, "-y", "--verify"];
+ let new_args = base_args
+ .iter()
+ .copied()
+ .chain(extra_flags)
+ .chain(["--", subproject_git_wd]);
+ git_cd_cmd(git_path, &["add", "."])
+ .and_then(|_| git_cd_cmd(git_path, &["commit", "-m", commit_msg]))
+ .and_then(|_| repo_cd_cmd(checkout_path, new_args))?;
+ Ok(())
+ }
+
+ /// Clean up the git repo after we're done with it.
+ fn cleanup_branch(git_path: &Path, base_branch: &str, rm_branch: &str) -> Result<()> {
+ git_cd_cmd(git_path, ["restore", "."])?;
+ git_cd_cmd(git_path, ["clean", "-fd"])?;
+ git_cd_cmd(git_path, ["checkout", base_branch])?;
+ // It's acceptable to be able to not delete the branch. This may be
+ // because the branch does not exist, which is an expected result.
+ // Since this is a very common case, we won't report any failures related
+ // to this command failure as it'll pollute the stderr logs.
+ let _ = git_cd_cmd(git_path, ["branch", "-D", rm_branch]);
+ Ok(())
+ }
+
+ /// Increment LLVM's revision number
+ fn rev_bump_llvm(llvm_dir: &Path) -> Result<PathBuf> {
+ let ebuild = find_ebuild(llvm_dir)
+ .with_context(|| format!("finding ebuild in {} to rev bump", llvm_dir.display()))?;
+ let ebuild_dir = ebuild.parent().unwrap();
+ let suffix_matcher = Regex::new(r"-r([0-9]+)\.ebuild").unwrap();
+ let ebuild_name = ebuild
+ .file_name()
+ .unwrap()
+ .to_str()
+ .ok_or_else(|| anyhow!("converting ebuild filename to utf-8"))?;
+ let new_path = if let Some(captures) = suffix_matcher.captures(ebuild_name) {
+ let full_suffix = captures.get(0).unwrap().as_str();
+ let cur_version = captures.get(1).unwrap().as_str().parse::<u32>().unwrap();
+ let new_filename =
+ ebuild_name.replace(full_suffix, &format!("-r{}.ebuild", cur_version + 1_u32));
+ let new_path = ebuild_dir.join(new_filename);
+ fs::rename(&ebuild, &new_path)?;
+ new_path
+ } else {
+ // File did not end in a revision. We should append -r1 to the end.
+ let new_filename = ebuild.file_stem().unwrap().to_string_lossy() + "-r1.ebuild";
+ let new_path = ebuild_dir.join(new_filename.as_ref());
+ fs::rename(&ebuild, &new_path)?;
+ new_path
+ };
+ Ok(new_path)
+ }
+
+ /// Return the contents of an old file in git
+ fn old_file_contents(hash: &str, pwd: &Path, file: &Path) -> Result<String> {
+ let git_ref = format!(
+ "{}:{}",
+ hash,
+ file.to_str()
+ .ok_or_else(|| anyhow!("failed to convert filepath to str"))?
+ );
+ let output = git_cd_cmd(pwd, &["show", &git_ref])?;
+ if !output.status.success() {
+ bail!("could not get old file contents for {}", &git_ref)
+ }
+ String::from_utf8(output.stdout)
+ .with_context(|| format!("converting {} file contents to UTF-8", &git_ref))
+ }
+
+ /// Create the commit message
+ fn build_commit_msg(subj: &str, from: &str, to: &str, footer: &str) -> String {
+ format!(
+ "[patch_sync] {}\n\n\
+Copies new PATCHES.json changes from {} to {}.\n
+For questions about this job, contact chromeos-toolchain@google.com\n\n
+{}",
+ subj, from, to, footer
+ )
+ }
+}
+
+/// Return the path of an ebuild located within the given directory.
+fn find_ebuild(dir: &Path) -> Result<PathBuf> {
+ // The logic here is that we create an iterator over all file paths to ebuilds
+ // with _pre in the name. Then we sort those ebuilds based on their revision numbers.
+ // Then we return the highest revisioned one.
+
+ let ebuild_rev_matcher = Regex::new(r"-r([0-9]+)\.ebuild").unwrap();
+ // For LLVM ebuilds, we only want to check for ebuilds that have this in their file name.
+ let per_heuristic = "_pre";
+ // Get an iterator over all ebuilds with a _per in the file name.
+ let ebuild_candidates = fs::read_dir(dir)?.filter_map(|entry| {
+ let entry = entry.ok()?;
+ let path = entry.path();
+ if path.extension()? != "ebuild" {
+ // Not an ebuild, ignore.
+ return None;
+ }
+ let stem = path.file_stem()?.to_str()?;
+ if stem.contains(per_heuristic) {
+ return Some(path);
+ }
+ None
+ });
+ let try_parse_ebuild_rev = |path: PathBuf| -> Option<(u64, PathBuf)> {
+ let name = path.file_name()?;
+ if let Some(rev_match) = ebuild_rev_matcher.captures(name.to_str()?) {
+ let rev_str = rev_match.get(1)?;
+ let rev_num = rev_str.as_str().parse::<u64>().ok()?;
+ return Some((rev_num, path));
+ }
+ // If it doesn't have a revision, then it's revision 0.
+ Some((0, path))
+ };
+ let mut sorted_candidates: Vec<_> =
+ ebuild_candidates.filter_map(try_parse_ebuild_rev).collect();
+ sorted_candidates.sort_unstable_by_key(|x| x.0);
+ let highest_rev_ebuild = sorted_candidates
+ .pop()
+ .ok_or_else(|| anyhow!("could not find ebuild"))?;
+ Ok(highest_rev_ebuild.1)
+}
+
+/// Run a given git command from inside a specified git dir.
+pub fn git_cd_cmd<I, S>(pwd: &Path, args: I) -> Result<Output>
+where
+ I: IntoIterator<Item = S>,
+ S: AsRef<OsStr>,
+{
+ let mut command = Command::new("git");
+ command.current_dir(&pwd).args(args);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "git command failed:\n {:?}\nstdout --\n{}\nstderr --\n{}",
+ command,
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr),
+ );
+ }
+ Ok(output)
+}
+
+pub fn repo_cd_cmd<I, S>(pwd: &Path, args: I) -> Result<()>
+where
+ I: IntoIterator<Item = S>,
+ S: AsRef<OsStr>,
+{
+ let mut command = Command::new("repo");
+ command.current_dir(&pwd).args(args);
+ let status = command.status()?;
+ if !status.success() {
+ bail!("repo command failed:\n {:?} \n", command)
+ }
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use rand::prelude::Rng;
+ use std::env;
+ use std::fs::File;
+
+ #[test]
+ fn test_revbump_ebuild() {
+ // Random number to append at the end of the test folder to prevent conflicts.
+ let rng: u32 = rand::thread_rng().gen();
+ let llvm_dir = env::temp_dir().join(format!("patch_sync_test_{}", rng));
+ fs::create_dir(&llvm_dir).expect("creating llvm dir in temp directory");
+
+ {
+ // With revision
+ let ebuild_name = "llvm-13.0_pre433403_p20211019-r10.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r11.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
+ }
+ {
+ // Without revision
+ let ebuild_name = "llvm-13.0_pre433403_p20211019.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r1.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
+ }
+ {
+ // With both
+ let ebuild_name = "llvm-13.0_pre433403_p20211019.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let ebuild_link_name = "llvm-13.0_pre433403_p20211019-r2.ebuild";
+ let ebuild_link_path = llvm_dir.join(ebuild_link_name);
+ File::create(&ebuild_link_path).expect("creating test ebuild link file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r3.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild link file");
+ fs::remove_file(ebuild_path).expect("removing renamed ebuild file");
+ }
+
+ fs::remove_dir(&llvm_dir).expect("removing temp test dir");
+ }
+}
diff --git a/llvm_tools/revert_checker.py b/llvm_tools/revert_checker.py
index bb9182b0..acc8b5fa 100755
--- a/llvm_tools/revert_checker.py
+++ b/llvm_tools/revert_checker.py
@@ -1,8 +1,17 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#===----------------------------------------------------------------------===##
+#
+# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+# See https://llvm.org/LICENSE.txt for license information.
+# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+#
+#===----------------------------------------------------------------------===##
+#
+# !!!!!!!!!!!! NOTE !!!!!!!!!!!!
+# This is copied directly from upstream LLVM. Please make any changes upstream,
+# rather than to this file directly. Once changes are made there, you're free
+# to integrate them here.
"""Checks for reverts of commits across a given git commit.
@@ -20,11 +29,21 @@ conflicts/etc always introduce _some_ amount of fuzziness. This script just
uses a bundle of heuristics, and is bound to ignore / incorrectly flag some
reverts. The hope is that it'll easily catch the vast majority (>90%) of them,
though.
-"""
-# pylint: disable=cros-logging-import
+This is designed to be used in one of two ways: an import in Python, or run
+directly from a shell. If you want to import this, the `find_reverts`
+function is the thing to look at. If you'd rather use this from a shell, have a
+usage example:
+
+```
+./revert_checker.py c47f97169 origin/main origin/release/12.x
+```
-from __future__ import print_function
+This checks for all reverts from the tip of origin/main to c47f97169, which are
+across the latter. It then does the same for origin/release/12.x to c47f97169.
+Duplicate reverts discovered when walking both roots (origin/main and
+origin/release/12.x) are deduplicated in output.
+"""
import argparse
import collections
@@ -32,7 +51,9 @@ import logging
import re
import subprocess
import sys
-import typing as t
+from typing import Generator, List, NamedTuple, Iterable
+
+assert sys.version_info >= (3, 6), 'Only Python 3.6+ is supported.'
# People are creative with their reverts, and heuristics are a bit difficult.
# Like 90% of of reverts have "This reverts commit ${full_sha}".
@@ -43,7 +64,7 @@ import typing as t
# starts involving human intervention, which is probably not worth it for now.
-def _try_parse_reverts_from_commit_message(commit_message: str) -> t.List[str]:
+def _try_parse_reverts_from_commit_message(commit_message: str) -> List[str]:
if not commit_message:
return []
@@ -56,9 +77,10 @@ def _try_parse_reverts_from_commit_message(commit_message: str) -> t.List[str]:
return results
-def _stream_stdout(command: t.List[str]) -> t.Generator[str, None, None]:
+def _stream_stdout(command: List[str]) -> Generator[str, None, None]:
with subprocess.Popen(
command, stdout=subprocess.PIPE, encoding='utf-8', errors='replace') as p:
+ assert p.stdout is not None # for mypy's happiness.
yield from p.stdout
@@ -73,14 +95,14 @@ def _resolve_sha(git_dir: str, sha: str) -> str:
).strip()
-_LogEntry = t.NamedTuple('_LogEntry', [
+_LogEntry = NamedTuple('_LogEntry', [
('sha', str),
- ('commit_message', t.List[str]),
+ ('commit_message', str),
])
def _log_stream(git_dir: str, root_sha: str,
- end_at_sha: str) -> t.Iterable[_LogEntry]:
+ end_at_sha: str) -> Iterable[_LogEntry]:
sep = 50 * '<>'
log_command = [
'git',
@@ -103,8 +125,6 @@ def _log_stream(git_dir: str, root_sha: str,
break
while found_commit_header:
- # crbug.com/1041148
- # pylint: disable=stop-iteration-return
sha = next(stdout_stream, None)
assert sha is not None, 'git died?'
sha = sha.rstrip()
@@ -122,52 +142,54 @@ def _log_stream(git_dir: str, root_sha: str,
yield _LogEntry(sha, '\n'.join(commit_message).rstrip())
-def _shas_between(git_dir: str, base_ref: str,
- head_ref: str) -> t.Iterable[str]:
+def _shas_between(git_dir: str, base_ref: str, head_ref: str) -> Iterable[str]:
rev_list = [
'git',
'-C',
git_dir,
'rev-list',
'--first-parent',
- '%s..%s' % (base_ref, head_ref),
+ f'{base_ref}..{head_ref}',
]
return (x.strip() for x in _stream_stdout(rev_list))
def _rev_parse(git_dir: str, ref: str) -> str:
- result = subprocess.check_output(
+ return subprocess.check_output(
['git', '-C', git_dir, 'rev-parse', ref],
encoding='utf-8',
).strip()
- return t.cast(str, result)
-Revert = t.NamedTuple('Revert', [
+Revert = NamedTuple('Revert', [
('sha', str),
('reverted_sha', str),
])
-def find_common_parent_commit(git_dir: str, ref_a: str, ref_b: str) -> str:
+def _find_common_parent_commit(git_dir: str, ref_a: str, ref_b: str) -> str:
+ """Finds the closest common parent commit between `ref_a` and `ref_b`."""
return subprocess.check_output(
['git', '-C', git_dir, 'merge-base', ref_a, ref_b],
encoding='utf-8',
).strip()
-def find_reverts(git_dir: str, across_ref: str, root: str) -> t.List[Revert]:
- """Finds reverts across `across_ref` in `git_dir`, starting from `root`."""
+def find_reverts(git_dir: str, across_ref: str, root: str) -> List[Revert]:
+ """Finds reverts across `across_ref` in `git_dir`, starting from `root`.
+
+ These reverts are returned in order of oldest reverts first.
+ """
across_sha = _rev_parse(git_dir, across_ref)
root_sha = _rev_parse(git_dir, root)
- common_ancestor = find_common_parent_commit(git_dir, across_sha, root_sha)
+ common_ancestor = _find_common_parent_commit(git_dir, across_sha, root_sha)
if common_ancestor != across_sha:
- raise ValueError("%s isn't an ancestor of %s (common ancestor: %s)" %
- (across_sha, root_sha, common_ancestor))
+ raise ValueError(f"{across_sha} isn't an ancestor of {root_sha} "
+ '(common ancestor: {common_ancestor})')
intermediate_commits = set(_shas_between(git_dir, across_sha, root_sha))
- assert across_ref not in intermediate_commits
+ assert across_sha not in intermediate_commits
logging.debug('%d commits appear between %s and %s',
len(intermediate_commits), across_sha, root_sha)
@@ -204,10 +226,14 @@ def find_reverts(git_dir: str, across_ref: str, root: str) -> t.List[Revert]:
logging.error("%s claims to revert %s -- which isn't a commit -- %s", sha,
object_type, reverted_sha)
+ # Since `all_reverts` contains reverts in log order (e.g., newer comes before
+ # older), we need to reverse this to keep with our guarantee of older =
+ # earlier in the result.
+ all_reverts.reverse()
return all_reverts
-def main(args: t.List[str]) -> int:
+def _main() -> None:
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
@@ -217,7 +243,7 @@ def main(args: t.List[str]) -> int:
parser.add_argument(
'root', nargs='+', help='Root(s) to search for commits from.')
parser.add_argument('--debug', action='store_true')
- opts = parser.parse_args(args)
+ opts = parser.parse_args()
logging.basicConfig(
format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
@@ -228,14 +254,17 @@ def main(args: t.List[str]) -> int:
# out. The overwhelmingly common case is also to have one root, and it's way
# easier to reason about output that comes in an order that's meaningful to
# git.
- all_reverts = collections.OrderedDict()
+ seen_reverts = set()
+ all_reverts = []
for root in opts.root:
for revert in find_reverts(opts.git_dir, opts.base_ref, root):
- all_reverts[revert] = None
+ if revert not in seen_reverts:
+ seen_reverts.add(revert)
+ all_reverts.append(revert)
- for revert in all_reverts.keys():
- print('%s claims to revert %s' % (revert.sha, revert.reverted_sha))
+ for revert in all_reverts:
+ print(f'{revert.sha} claims to revert {revert.reverted_sha}')
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ _main()
diff --git a/llvm_tools/revert_checker_test.py b/llvm_tools/revert_checker_test.py
deleted file mode 100755
index 16b3c3f6..00000000
--- a/llvm_tools/revert_checker_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for revert_checker."""
-
-from __future__ import print_function
-
-# pylint: disable=cros-logging-import
-import logging
-import unittest
-
-import llvm_project
-import revert_checker
-
-# pylint: disable=protected-access
-
-
-class _SilencingFilter(object):
- """Silences all log messages.
-
- Also collects info about log messages that would've been emitted.
- """
-
- def __init__(self):
- self.messages = []
-
- def filter(self, record):
- self.messages.append(record.getMessage())
- return 0
-
-
-class Test(unittest.TestCase):
- """Tests for revert_checker."""
-
- def silence_logging(self):
- root = logging.getLogger()
- filt = _SilencingFilter()
- root.addFilter(filt)
- self.addCleanup(root.removeFilter, filt)
- return filt
-
- def test_known_log_stream(self):
- start_sha = 'e241573d5972d34a323fa5c64774c4207340beb3'
- end_sha = 'a7a37517751ffb0f5529011b4ba96e67fcb27510'
- commits = [
- revert_checker._LogEntry(
- 'e241573d5972d34a323fa5c64774c4207340beb3', '\n'.join((
- '[mlir] NFC: remove IntegerValueSet / MutableIntegerSet',
- '',
- 'Summary:',
- '- these are unused and really not needed now given flat '
- 'affine',
- ' constraints',
- '',
- 'Differential Revision: https://reviews.llvm.org/D75792',
- ))),
- revert_checker._LogEntry(
- '97572fa6e9daecd648873496fd11f7d1e25a55f0',
- '[NFC] use hasAnyOperatorName and hasAnyOverloadedOperatorName '
- 'functions in clang-tidy matchers',
- ),
- ]
-
- logs = list(
- revert_checker._log_stream(
- llvm_project.get_location(),
- root_sha=start_sha,
- end_at_sha=end_sha,
- ))
- self.assertEqual(commits, logs)
-
- def test_reverted_noncommit_object_is_a_nop(self):
- log_filter = self.silence_logging()
- # c9944df916e41b1014dff5f6f75d52297b48ecdc mentions reverting a non-commit
- # object. It sits between the given base_ref and root.
- reverts = revert_checker.find_reverts(
- git_dir=llvm_project.get_location(),
- across_ref='c9944df916e41b1014dff5f6f75d52297b48ecdc~',
- root='c9944df916e41b1014dff5f6f75d52297b48ecdc')
- self.assertEqual(reverts, [])
-
- complaint = ('Failed to resolve reverted object '
- 'edd18355be574122aaa9abf58c15d8c50fb085a1')
- self.assertTrue(
- any(x.startswith(complaint) for x in log_filter.messages),
- log_filter.messages)
-
- def test_known_reverts_across_previous_llvm_next_rev(self):
- # c9944df916e41b1014dff5f6f75d52297b48ecdc mentions reverting a non-commit
- # object. It sits between the given base_ref and root.
- reverts = revert_checker.find_reverts(
- git_dir=llvm_project.get_location(),
- across_ref='c47f971694be0159ffddfee8a75ae515eba91439',
- root='9f981e9adf9c8d29bb80306daf08d2770263ade6')
- self.assertEqual(reverts, [
- revert_checker.Revert(
- sha='9f981e9adf9c8d29bb80306daf08d2770263ade6',
- reverted_sha='4060016fce3e6a0b926ee9fc59e440a612d3a2ec'),
- revert_checker.Revert(
- sha='4e0fe038f438ae1679eae9e156e1f248595b2373',
- reverted_sha='65b21282c710afe9c275778820c6e3c1cf46734b'),
- ])
-
-
-if __name__ == '__main__':
- llvm_project.ensure_up_to_date()
- unittest.main()
diff --git a/llvm_tools/update_all_tryjobs_with_auto.py b/llvm_tools/update_all_tryjobs_with_auto.py
deleted file mode 100755
index 11e67ed4..00000000
--- a/llvm_tools/update_all_tryjobs_with_auto.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Updates the status of all tryjobs to the result of `cros buildresult`."""
-
-from __future__ import print_function
-
-import argparse
-import json
-import os
-
-import chroot
-import update_tryjob_status
-
-
-def GetPathToUpdateAllTryjobsWithAutoScript():
- """Returns the absolute path to this script."""
-
- return os.path.abspath(__file__)
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments."""
-
- # Default absoute path to the chroot if not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(description=__doc__)
-
- # Add argument for the JSON file to use for the update of a tryjob.
- parser.add_argument(
- '--last_tested',
- required=True,
- help='The absolute path to the JSON file that contains the tryjobs used '
- 'for bisecting LLVM.')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- args_output = parser.parse_args()
-
- if not os.path.isfile(args_output.last_tested) or \
- not args_output.last_tested.endswith('.json'):
- raise ValueError('File does not exist or does not ending in ".json" '
- ': %s' % args_output.last_tested)
-
- return args_output
-
-
-def main():
- """Updates the status of a tryjob."""
-
- chroot.VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- with open(args_output.last_tested) as tryjobs:
- bisect_contents = json.load(tryjobs)
-
- for tryjob in bisect_contents['jobs']:
- if tryjob['status'] == update_tryjob_status.TryjobStatus.PENDING.value:
- tryjob['status'] = update_tryjob_status.GetAutoResult(
- args_output.chroot_path, tryjob['buildbucket_id'])
-
- new_file = '%s.new' % args_output.last_tested
- with open(new_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
- os.rename(new_file, args_output.last_tested)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/update_chromeos_llvm_hash.py b/llvm_tools/update_chromeos_llvm_hash.py
index e28fe690..4e9b9104 100755
--- a/llvm_tools/update_chromeos_llvm_hash.py
+++ b/llvm_tools/update_chromeos_llvm_hash.py
@@ -4,32 +4,39 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Updates the LLVM hash and uprevs the build of the specified
-
-packages.
+"""Updates the LLVM hash and uprevs the build of the specified packages.
For each package, a temporary repo is created and the changes are uploaded
for review.
"""
from __future__ import print_function
-from datetime import datetime
-from enum import Enum
import argparse
+import datetime
+import enum
import os
import re
import subprocess
-from failure_modes import FailureModes
import chroot
+import failure_modes
import get_llvm_hash
import git
import llvm_patch_management
+DEFAULT_PACKAGES = [
+ 'dev-util/lldb-server',
+ 'sys-devel/llvm',
+ 'sys-libs/compiler-rt',
+ 'sys-libs/libcxx',
+ 'sys-libs/libcxxabi',
+ 'sys-libs/llvm-libunwind',
+]
+
# Specify which LLVM hash to update
-class LLVMVariant(Enum):
+class LLVMVariant(enum.Enum):
"""Represent the LLVM hash in an ebuild file to update."""
current = 'LLVM_HASH'
@@ -41,6 +48,21 @@ class LLVMVariant(Enum):
verbose = False
+def defaultCrosRoot():
+ """Get default location of chroot_path.
+
+ The logic assumes that the cros_root is ~/chromiumos, unless llvm_tools is
+ inside of a CrOS checkout, in which case that checkout should be used.
+
+ Returns:
+ The best guess location for the cros checkout.
+ """
+ llvm_tools_path = os.path.realpath(os.path.dirname(__file__))
+ if llvm_tools_path.endswith('src/third_party/toolchain-utils/llvm_tools'):
+ return os.path.join(llvm_tools_path, '../../../../')
+ return '~/chromiumos'
+
+
def GetCommandLineArgs():
"""Parses the command line for the optional command line arguments.
@@ -51,35 +73,28 @@ def GetCommandLineArgs():
and the LLVM version to use when retrieving the LLVM hash.
"""
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
# Create parser and add optional command-line arguments.
parser = argparse.ArgumentParser(
description="Updates the build's hash for llvm-next.")
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=defaultCrosRoot(),
+ help='the path to the chroot (default: %(default)s)')
# Add argument for specific builds to uprev and update their llvm-next hash.
- parser.add_argument(
- '--update_packages',
- default=['sys-devel/llvm'],
- required=False,
- nargs='+',
- help='the ebuilds to update their hash for llvm-next ' \
- '(default: %(default)s)')
+ parser.add_argument('--update_packages',
+ default=DEFAULT_PACKAGES,
+ required=False,
+ nargs='+',
+ help='the ebuilds to update their hash for llvm-next '
+ '(default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
# Add argument for the LLVM hash to update
parser.add_argument(
@@ -91,7 +106,7 @@ def GetCommandLineArgs():
# Add argument for the LLVM version to use.
parser.add_argument(
'--llvm_version',
- type=get_llvm_hash.is_svn_option,
+ type=get_llvm_hash.IsSvnOption,
required=True,
help='which git hash to use. Either a svn revision, or one '
'of %s' % sorted(get_llvm_hash.KNOWN_HASH_SOURCES))
@@ -99,12 +114,15 @@ def GetCommandLineArgs():
# Add argument for the mode of the patch management when handling patches.
parser.add_argument(
'--failure_mode',
- default=FailureModes.FAIL.value,
- choices=[FailureModes.FAIL.value, FailureModes.CONTINUE.value,
- FailureModes.DISABLE_PATCHES.value,
- FailureModes.REMOVE_PATCHES.value],
- help='the mode of the patch manager when handling failed patches ' \
- '(default: %(default)s)')
+ default=failure_modes.FailureModes.FAIL.value,
+ choices=[
+ failure_modes.FailureModes.FAIL.value,
+ failure_modes.FailureModes.CONTINUE.value,
+ failure_modes.FailureModes.DISABLE_PATCHES.value,
+ failure_modes.FailureModes.REMOVE_PATCHES.value
+ ],
+ help='the mode of the patch manager when handling failed patches '
+ '(default: %(default)s)')
# Add argument for the patch metadata file.
parser.add_argument(
@@ -199,7 +217,6 @@ def UpdateEbuildLLVMHash(ebuild_path, llvm_variant, git_hash, svn_version):
for cur_line in ReplaceLLVMHash(ebuild_file, llvm_variant, git_hash,
svn_version):
temp_file.write(cur_line)
-
os.rename(temp_ebuild_file, ebuild_path)
# Get the path to the parent directory.
@@ -217,6 +234,9 @@ def ReplaceLLVMHash(ebuild_lines, llvm_variant, git_hash, svn_version):
llvm_variant: The LLVM hash to update.
git_hash: The new git hash.
svn_version: The SVN-style revision number of git_hash.
+
+ Yields:
+ lines of the modified ebuild file
"""
is_updated = False
llvm_regex = re.compile('^' + re.escape(llvm_variant.value) +
@@ -266,7 +286,7 @@ def UprevEbuildSymlink(symlink):
os.path.dirname(symlink), 'mv', symlink, new_symlink])
-def UprevEbuildToVersion(symlink, svn_version):
+def UprevEbuildToVersion(symlink, svn_version, git_hash):
"""Uprevs the ebuild's revision number.
Increases the revision number by 1 and stages the change in
@@ -275,31 +295,36 @@ def UprevEbuildToVersion(symlink, svn_version):
Args:
symlink: The absolute path of an ebuild symlink.
svn_version: The SVN-style revision number of git_hash.
+ git_hash: The new git hash.
Raises:
ValueError: Failed to uprev the ebuild or failed to stage the changes.
+ AssertionError: No llvm version provided for an LLVM uprev
"""
if not os.path.islink(symlink):
raise ValueError('Invalid symlink provided: %s' % symlink)
ebuild = os.path.realpath(symlink)
+ llvm_major_version = get_llvm_hash.GetLLVMMajorVersion(git_hash)
# llvm
package = os.path.basename(os.path.dirname(symlink))
if not package:
raise ValueError('Tried to uprev an unknown package')
- # llvm
if package == 'llvm':
new_ebuild, is_changed = re.subn(
- r'pre([0-9]+)_p([0-9]+)',
- 'pre%s_p%s' % (svn_version, \
- datetime.today().strftime('%Y%m%d')),
+ r'(\d+)\.(\d+)_pre([0-9]+)_p([0-9]+)',
+ '%s.\\2_pre%s_p%s' % (llvm_major_version, svn_version,
+ datetime.datetime.today().strftime('%Y%m%d')),
ebuild,
count=1)
# any other package
else:
- new_ebuild, is_changed = re.subn(
- r'pre([0-9]+)', 'pre%s' % svn_version, ebuild, count=1)
+ new_ebuild, is_changed = re.subn(r'(\d+)\.(\d+)_pre([0-9]+)',
+ '%s.\\2_pre%s' %
+ (llvm_major_version, svn_version),
+ ebuild,
+ count=1)
if not is_changed: # failed to increment the revision number
raise ValueError('Failed to uprev the ebuild.')
@@ -374,13 +399,14 @@ def StagePatchMetadataFileForCommit(patch_metadata_file_path):
"""
if not os.path.isfile(patch_metadata_file_path):
- raise ValueError(
- 'Invalid patch metadata file provided: %s' % patch_metadata_file_path)
+ raise ValueError('Invalid patch metadata file provided: %s' %
+ patch_metadata_file_path)
# Cmd to stage the patch metadata file for commit.
subprocess.check_output([
'git', '-C',
- os.path.dirname(patch_metadata_file_path), 'add', patch_metadata_file_path
+ os.path.dirname(patch_metadata_file_path), 'add',
+ patch_metadata_file_path
])
@@ -393,15 +419,18 @@ def StagePackagesPatchResultsForCommit(package_info_dict, commit_messages):
package (key).
commit_messages: The commit message that has the updated ebuilds and
upreving information.
+
+ Returns:
+ commit_messages with new additions
"""
# For each package, check if any patches for that package have
# changed, if so, add which patches have changed to the commit
# message.
for package_name, patch_info_dict in package_info_dict.items():
- if patch_info_dict['disabled_patches'] or \
- patch_info_dict['removed_patches'] or \
- patch_info_dict['modified_metadata']:
+ if (patch_info_dict['disabled_patches']
+ or patch_info_dict['removed_patches']
+ or patch_info_dict['modified_metadata']):
cur_package_header = '\nFor the package %s:' % package_name
commit_messages.append(cur_package_header)
@@ -450,9 +479,9 @@ def UpdatePackages(packages, llvm_variant, git_hash, svn_version, chroot_path,
the patches and its metadata.
mode: The mode of the patch manager when handling an applicable patch
that failed to apply.
- Ex: 'FailureModes.FAIL'
+ Ex. 'FailureModes.FAIL'
git_hash_source: The source of which git hash to use based off of.
- Ex: 'google3', 'tot', or <version> such as 365123
+ Ex. 'google3', 'tot', or <version> such as 365123
extra_commit_msg: extra test to append to the commit message.
Returns:
@@ -478,11 +507,11 @@ def UpdatePackages(packages, llvm_variant, git_hash, svn_version, chroot_path,
if llvm_variant == LLVMVariant.next:
commit_message_header = 'llvm-next'
if git_hash_source in get_llvm_hash.KNOWN_HASH_SOURCES:
- commit_message_header += (
- '/%s: upgrade to %s (r%d)' % (git_hash_source, git_hash, svn_version))
+ commit_message_header += ('/%s: upgrade to %s (r%d)' %
+ (git_hash_source, git_hash, svn_version))
else:
- commit_message_header += (
- ': upgrade to %s (r%d)' % (git_hash, svn_version))
+ commit_message_header += (': upgrade to %s (r%d)' %
+ (git_hash, svn_version))
commit_messages = [
commit_message_header + '\n',
@@ -504,7 +533,7 @@ def UpdatePackages(packages, llvm_variant, git_hash, svn_version, chroot_path,
UpdateEbuildLLVMHash(ebuild_path, llvm_variant, git_hash, svn_version)
if llvm_variant == LLVMVariant.current:
- UprevEbuildToVersion(symlink_path, svn_version)
+ UprevEbuildToVersion(symlink_path, svn_version, git_hash)
else:
UprevEbuildSymlink(symlink_path)
@@ -514,6 +543,8 @@ def UpdatePackages(packages, llvm_variant, git_hash, svn_version, chroot_path,
packages.append('%s/%s' % (parent_dir_name, cur_dir_name))
commit_messages.append('%s/%s' % (parent_dir_name, cur_dir_name))
+ EnsurePackageMaskContains(chroot_path, git_hash)
+
# Handle the patches for each package.
package_info_dict = llvm_patch_management.UpdatePackagesPatchMetadataFile(
chroot_path, svn_version, patch_metadata_file, packages, mode)
@@ -533,6 +564,31 @@ def UpdatePackages(packages, llvm_variant, git_hash, svn_version, chroot_path,
return change_list
+def EnsurePackageMaskContains(chroot_path, git_hash):
+ """Adds the major version of llvm to package.mask if it's not already present.
+
+ Args:
+ chroot_path: The absolute path to the chroot.
+ git_hash: The new git hash.
+
+ Raises:
+ FileExistsError: package.mask not found in ../../chromiumos-overlay
+ """
+
+ llvm_major_version = get_llvm_hash.GetLLVMMajorVersion(git_hash)
+
+ overlay_dir = os.path.join(chroot_path, 'src/third_party/chromiumos-overlay')
+ mask_path = os.path.join(overlay_dir,
+ 'profiles/targets/chromeos/package.mask')
+ with open(mask_path, 'r+') as mask_file:
+ mask_contents = mask_file.read()
+ expected_line = '=sys-devel/llvm-%s.0_pre*\n' % llvm_major_version
+ if expected_line not in mask_contents:
+ mask_file.write(expected_line)
+
+ subprocess.check_output(['git', '-C', overlay_dir, 'add', mask_path])
+
+
def main():
"""Updates the LLVM next hash for each package.
@@ -553,16 +609,16 @@ def main():
git_hash, svn_version = get_llvm_hash.GetLLVMHashAndVersionFromSVNOption(
git_hash_source)
- change_list = UpdatePackages(
- args_output.update_packages,
- llvm_variant,
- git_hash,
- svn_version,
- args_output.chroot_path,
- args_output.patch_metadata_file,
- FailureModes(args_output.failure_mode),
- git_hash_source,
- extra_commit_msg=None)
+ change_list = UpdatePackages(args_output.update_packages,
+ llvm_variant,
+ git_hash,
+ svn_version,
+ args_output.chroot_path,
+ args_output.patch_metadata_file,
+ failure_modes.FailureModes(
+ args_output.failure_mode),
+ git_hash_source,
+ extra_commit_msg=None)
print('Successfully updated packages to %s (%d)' % (git_hash, svn_version))
print('Gerrit URL: %s' % change_list.url)
diff --git a/llvm_tools/update_chromeos_llvm_hash_unittest.py b/llvm_tools/update_chromeos_llvm_hash_unittest.py
index 205feb0c..adb20598 100755
--- a/llvm_tools/update_chromeos_llvm_hash_unittest.py
+++ b/llvm_tools/update_chromeos_llvm_hash_unittest.py
@@ -8,8 +8,8 @@
from __future__ import print_function
-from collections import namedtuple
-from datetime import datetime
+import collections
+import datetime
import os
import re
import subprocess
@@ -18,6 +18,7 @@ import unittest.mock as mock
import chroot
import failure_modes
+import get_llvm_hash
import git
import llvm_patch_management
import test_helpers
@@ -30,6 +31,19 @@ import update_chromeos_llvm_hash
class UpdateLLVMHashTest(unittest.TestCase):
"""Test class for updating LLVM hashes of packages."""
+ @mock.patch.object(os.path, 'realpath')
+ def testDefaultCrosRootFromCrOSCheckout(self, mock_llvm_tools):
+ llvm_tools_path = '/path/to/cros/src/third_party/toolchain-utils/llvm_tools'
+ mock_llvm_tools.return_value = llvm_tools_path
+ self.assertEqual(update_chromeos_llvm_hash.defaultCrosRoot(),
+ '%s/../../../../' % llvm_tools_path)
+
+ @mock.patch.object(os.path, 'realpath')
+ def testDefaultCrosRootFromOutsideCrOSCheckout(self, mock_llvm_tools):
+ mock_llvm_tools.return_value = '~/toolchain-utils/llvm_tools'
+ self.assertEqual(update_chromeos_llvm_hash.defaultCrosRoot(),
+ '~/chromiumos')
+
# Simulate behavior of 'os.path.isfile()' when the ebuild path to a package
# does not exist.
@mock.patch.object(os.path, 'isfile', return_value=False)
@@ -67,11 +81,11 @@ class UpdateLLVMHashTest(unittest.TestCase):
# Verify the exception is raised when the ebuild file does not have
# 'LLVM_HASH'.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UpdateEbuildLLVMHash(
- ebuild_file, llvm_variant, git_hash, svn_version)
+ update_chromeos_llvm_hash.UpdateEbuildLLVMHash(ebuild_file,
+ llvm_variant, git_hash,
+ svn_version)
- self.assertEqual(
- str(err.exception), ('Failed to update %s.', 'LLVM_HASH'))
+ self.assertEqual(str(err.exception), 'Failed to update LLVM_HASH')
llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next
@@ -95,11 +109,11 @@ class UpdateLLVMHashTest(unittest.TestCase):
# Verify the exception is raised when the ebuild file does not have
# 'LLVM_NEXT_HASH'.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UpdateEbuildLLVMHash(
- ebuild_file, llvm_variant, git_hash, svn_version)
+ update_chromeos_llvm_hash.UpdateEbuildLLVMHash(ebuild_file,
+ llvm_variant, git_hash,
+ svn_version)
- self.assertEqual(
- str(err.exception), ('Failed to update %s.', 'LLVM_NEXT_HASH'))
+ self.assertEqual(str(err.exception), 'Failed to update LLVM_NEXT_HASH')
self.assertEqual(mock_isfile.call_count, 2)
@@ -179,19 +193,25 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_stage_commit_command.assert_called_once()
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(os.path, 'islink', return_value=False)
- def testFailedToUprevEbuildToVersionForInvalidSymlink(self, mock_islink):
+ def testFailedToUprevEbuildToVersionForInvalidSymlink(self, mock_islink,
+ mock_llvm_version):
symlink_path = '/path/to/chroot/package/package.ebuild'
svn_version = 1000
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
# Verify the exception is raised when a invalid symbolic link is passed in.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version,
+ git_hash)
self.assertEqual(
str(err.exception), 'Invalid symlink provided: %s' % symlink_path)
mock_islink.assert_called_once()
+ mock_llvm_version.assert_not_called()
@mock.patch.object(os.path, 'islink', return_value=False)
def testFailedToUprevEbuildSymlinkForInvalidSymlink(self, mock_islink):
@@ -206,22 +226,28 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_islink.assert_called_once()
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
# Simulate 'os.path.islink' when a symbolic link is passed in.
@mock.patch.object(os.path, 'islink', return_value=True)
# Simulate 'os.path.realpath' when a symbolic link is passed in.
@mock.patch.object(os.path, 'realpath', return_value=True)
- def testFailedToUprevEbuildToVersion(self, mock_realpath, mock_islink):
+ def testFailedToUprevEbuildToVersion(self, mock_realpath, mock_islink,
+ mock_llvm_version):
symlink_path = '/path/to/chroot/llvm/llvm_pre123_p.ebuild'
mock_realpath.return_value = '/abs/path/to/llvm/llvm_pre123_p.ebuild'
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
svn_version = 1000
# Verify the exception is raised when the symlink does not match the
# expected pattern
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version,
+ git_hash)
self.assertEqual(str(err.exception), 'Failed to uprev the ebuild.')
+ mock_llvm_version.assert_called_once_with(git_hash)
mock_islink.assert_called_once_with(symlink_path)
# Simulate 'os.path.islink' when a symbolic link is passed in.
@@ -238,17 +264,24 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_islink.assert_called_once_with(symlink_path)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(os.path, 'islink', return_value=True)
@mock.patch.object(os.path, 'realpath')
@mock.patch.object(subprocess, 'check_output', return_value=None)
def testSuccessfullyUprevEbuildToVersionLLVM(self, mock_command_output,
- mock_realpath, mock_islink):
- symlink = '/path/to/llvm/llvm_pre3_p2-r10.ebuild'
- ebuild = '/abs/path/to/llvm/llvm_pre3_p2.ebuild'
+ mock_realpath, mock_islink,
+ mock_llvm_version):
+ symlink = '/path/to/llvm/llvm-12.0_pre3_p2-r10.ebuild'
+ ebuild = '/abs/path/to/llvm/llvm-12.0_pre3_p2.ebuild'
mock_realpath.return_value = ebuild
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
svn_version = 1000
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version,
+ git_hash)
+
+ mock_llvm_version.assert_called_once_with(git_hash)
mock_islink.assert_called()
@@ -258,12 +291,8 @@ class UpdateLLVMHashTest(unittest.TestCase):
# Verify commands
symlink_dir = os.path.dirname(symlink)
- new_ebuild, _is_changed = re.subn(
- r'pre([0-9]+)_p([0-9]+)',
- 'pre%s_p%s' % (svn_version, \
- datetime.today().strftime('%Y%m%d')),
- ebuild,
- count=1)
+ timestamp = datetime.datetime.today().strftime('%Y%m%d')
+ new_ebuild = '/abs/path/to/llvm/llvm-1234.0_pre1000_p%s.ebuild' % timestamp
new_symlink = new_ebuild[:-len('.ebuild')] + '-r1.ebuild'
expected_cmd = ['git', '-C', symlink_dir, 'mv', ebuild, new_ebuild]
@@ -282,28 +311,34 @@ class UpdateLLVMHashTest(unittest.TestCase):
self.assertEqual(mock_command_output.call_args_list[3],
mock.call(expected_cmd))
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(os.path, 'islink', return_value=True)
@mock.patch.object(os.path, 'realpath')
@mock.patch.object(subprocess, 'check_output', return_value=None)
def testSuccessfullyUprevEbuildToVersionNonLLVM(self, mock_command_output,
- mock_realpath, mock_islink):
- symlink = '/path/to/compiler-rt/compiler-rt_pre3_p2-r10.ebuild'
- ebuild = '/abs/path/to/compiler-rt/compiler-rt_pre3_p2.ebuild'
+ mock_realpath, mock_islink,
+ mock_llvm_version):
+ symlink = '/abs/path/to/compiler-rt/compiler-rt-12.0_pre314159265-r4.ebuild'
+ ebuild = '/abs/path/to/compiler-rt/compiler-rt-12.0_pre314159265.ebuild'
mock_realpath.return_value = ebuild
+ mock_llvm_version.return_value = '1234'
svn_version = 1000
+ git_hash = '5678'
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version,
+ git_hash)
mock_islink.assert_called()
mock_realpath.assert_called_once_with(symlink)
+ mock_llvm_version.assert_called_once_with(git_hash)
+
mock_command_output.assert_called()
# Verify commands
symlink_dir = os.path.dirname(symlink)
- new_ebuild, _is_changed = re.subn(
- r'pre([0-9]+)', 'pre%s' % svn_version, ebuild, count=1)
+ new_ebuild = '/abs/path/to/compiler-rt/compiler-rt-1234.0_pre1000.ebuild'
new_symlink = new_ebuild[:-len('.ebuild')] + '-r1.ebuild'
expected_cmd = ['git', '-C', symlink_dir, 'mv', ebuild, new_ebuild]
@@ -350,7 +385,8 @@ class UpdateLLVMHashTest(unittest.TestCase):
# Test function to simulate 'ConvertChrootPathsToAbsolutePaths' when a
# symlink does not start with the prefix '/mnt/host/source'.
- def BadPrefixChrootPath(_chroot_path, _chroot_file_paths):
+ def BadPrefixChrootPath(*args):
+ assert len(args) == 2
raise ValueError('Invalid prefix for the chroot path: '
'%s' % package_chroot_path)
@@ -458,8 +494,7 @@ class UpdateLLVMHashTest(unittest.TestCase):
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(subprocess, 'check_output', return_value=None)
- def testSuccessfullyStagedPatchMetadataFileForCommit(self, mock_run_cmd,
- _mock_isfile):
+ def testSuccessfullyStagedPatchMetadataFileForCommit(self, mock_run_cmd, _):
patch_metadata_path = '/abs/path/to/filesdir/PATCHES.json'
@@ -549,6 +584,7 @@ class UpdateLLVMHashTest(unittest.TestCase):
self.assertEqual(mock_stage_patches_for_commit.call_count, 2)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(update_chromeos_llvm_hash,
'CreatePathDictionaryFromPackages')
@mock.patch.object(git, 'CreateBranch')
@@ -560,20 +596,19 @@ class UpdateLLVMHashTest(unittest.TestCase):
def testExceptionRaisedWhenUpdatingPackages(
self, mock_realpath, mock_delete_repo, mock_upload_changes,
mock_uprev_symlink, mock_update_llvm_next, mock_create_repo,
- mock_create_path_dict):
-
- abs_path_to_package = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- symlink_path_to_package = \
- '/some/path/to/chroot/src/path/to/package-r1.ebuild'
+ mock_create_path_dict, mock_llvm_major_version):
path_to_package_dir = '/some/path/to/chroot/src/path/to'
+ abs_path_to_package = os.path.join(path_to_package_dir, 'package.ebuild')
+ symlink_path_to_package = os.path.join(path_to_package_dir,
+ 'package-r1.ebuild')
+
+ mock_llvm_major_version.return_value = '1234'
# Test function to simulate 'CreateBranch' when successfully created the
# branch on a valid repo path.
- def SuccessfullyCreateBranchForChanges(_repo_path, branch):
+ def SuccessfullyCreateBranchForChanges(_, branch):
self.assertEqual(branch, 'update-LLVM_NEXT_HASH-a123testhash4')
- return
# Test function to simulate 'UpdateEbuildLLVMHash' when successfully
# updated the ebuild's 'LLVM_NEXT_HASH'.
@@ -581,23 +616,23 @@ class UpdateLLVMHashTest(unittest.TestCase):
self.assertEqual(ebuild_path, abs_path_to_package)
self.assertEqual(git_hash, 'a123testhash4')
self.assertEqual(svn_version, 1000)
- return
# Test function to simulate 'UprevEbuildSymlink' when the symlink to the
# ebuild does not have a revision number.
- def FailedToUprevEbuildSymlink(_symlink_path):
+ def FailedToUprevEbuildSymlink(_):
# Raises a 'ValueError' exception because the symlink did not have have a
# revision number.
raise ValueError('Failed to uprev the ebuild.')
# Test function to fail on 'UploadChanges' if the function gets called
# when an exception is raised.
- def ShouldNotExecuteUploadChanges(_repo_path, _git_hash, _commit_messages):
+ def ShouldNotExecuteUploadChanges(*args):
# Test function should not be called (i.e. execution should resume in the
# 'finally' block) because 'UprevEbuildSymlink' raised an
# exception.
- assert False, 'Failed to go to "finally" block ' \
- 'after the exception was raised.'
+ assert len(args) == 3
+ assert False, ('Failed to go to "finally" block '
+ 'after the exception was raised.')
test_package_path_dict = {symlink_path_to_package: abs_path_to_package}
@@ -627,10 +662,12 @@ class UpdateLLVMHashTest(unittest.TestCase):
# Verify exception is raised when an exception is thrown within
# the 'try' block by UprevEbuildSymlink function.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UpdatePackages(
- packages_to_update, llvm_variant, git_hash, svn_version, chroot_path,
- patch_metadata_file, failure_modes.FailureModes.FAIL, git_hash_source,
- extra_commit_msg)
+ update_chromeos_llvm_hash.UpdatePackages(packages_to_update, llvm_variant,
+ git_hash, svn_version,
+ chroot_path, patch_metadata_file,
+ failure_modes.FailureModes.FAIL,
+ git_hash_source,
+ extra_commit_msg)
self.assertEqual(str(err.exception), 'Failed to uprev the ebuild.')
@@ -639,8 +676,9 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_create_repo.assert_called_once_with(path_to_package_dir, branch)
- mock_update_llvm_next.assert_called_once_with(
- abs_path_to_package, llvm_variant, git_hash, svn_version)
+ mock_update_llvm_next.assert_called_once_with(abs_path_to_package,
+ llvm_variant, git_hash,
+ svn_version)
mock_uprev_symlink.assert_called_once_with(symlink_path_to_package)
@@ -648,6 +686,8 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_delete_repo.assert_called_once_with(path_to_package_dir, branch)
+ @mock.patch.object(update_chromeos_llvm_hash, 'EnsurePackageMaskContains')
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(update_chromeos_llvm_hash,
'CreatePathDictionaryFromPackages')
@mock.patch.object(git, 'CreateBranch')
@@ -658,23 +698,22 @@ class UpdateLLVMHashTest(unittest.TestCase):
@mock.patch.object(llvm_patch_management, 'UpdatePackagesPatchMetadataFile')
@mock.patch.object(update_chromeos_llvm_hash,
'StagePatchMetadataFileForCommit')
- def testSuccessfullyUpdatedPackages(
- self, mock_stage_patch_file, mock_update_package_metadata_file,
- mock_delete_repo, mock_upload_changes, mock_uprev_symlink,
- mock_update_llvm_next, mock_create_repo, mock_create_path_dict):
-
- abs_path_to_package = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- symlink_path_to_package = \
- '/some/path/to/chroot/src/path/to/package-r1.ebuild'
+ def testSuccessfullyUpdatedPackages(self, mock_stage_patch_file,
+ mock_update_package_metadata_file,
+ mock_delete_repo, mock_upload_changes,
+ mock_uprev_symlink, mock_update_llvm_next,
+ mock_create_repo, mock_create_path_dict,
+ mock_llvm_version, mock_mask_contains):
path_to_package_dir = '/some/path/to/chroot/src/path/to'
+ abs_path_to_package = os.path.join(path_to_package_dir, 'package.ebuild')
+ symlink_path_to_package = os.path.join(path_to_package_dir,
+ 'package-r1.ebuild')
# Test function to simulate 'CreateBranch' when successfully created the
# branch for the changes to be made to the ebuild files.
- def SuccessfullyCreateBranchForChanges(_repo_path, branch):
+ def SuccessfullyCreateBranchForChanges(_, branch):
self.assertEqual(branch, 'update-LLVM_NEXT_HASH-a123testhash5')
- return
# Test function to simulate 'UploadChanges' after a successfull update of
# 'LLVM_NEXT_HASH" of the ebuild file.
@@ -683,7 +722,6 @@ class UpdateLLVMHashTest(unittest.TestCase):
'/some/path/to/chroot/src/path/to/package.ebuild')
self.assertEqual(git_hash, 'a123testhash5')
self.assertEqual(svn_version, 1000)
- return
# Test function to simulate 'UprevEbuildSymlink' when successfully
# incremented the revision number by 1.
@@ -691,8 +729,6 @@ class UpdateLLVMHashTest(unittest.TestCase):
self.assertEqual(symlink_path,
'/some/path/to/chroot/src/path/to/package-r1.ebuild')
- return
-
# Test function to simulate 'UpdatePackagesPatchMetadataFile()' when the
# patch results contains a disabled patch in 'disable_patches' mode.
def RetrievedPatchResults(chroot_path, svn_version, patch_metadata_file,
@@ -704,7 +740,7 @@ class UpdateLLVMHashTest(unittest.TestCase):
self.assertListEqual(packages, ['path/to'])
self.assertEqual(mode, failure_modes.FailureModes.DISABLE_PATCHES)
- PatchInfo = namedtuple('PatchInfo', [
+ PatchInfo = collections.namedtuple('PatchInfo', [
'applied_patches', 'failed_patches', 'non_applicable_patches',
'disabled_patches', 'removed_patches', 'modified_metadata'
])
@@ -727,9 +763,9 @@ class UpdateLLVMHashTest(unittest.TestCase):
# Test function to simulate 'UploadChanges()' when successfully created a
# commit for the changes made to the packages and their patches and
# retrieved the change list of the commit.
- def SuccessfullyUploadedChanges(_repo_path, _branch, _commit_messages):
+ def SuccessfullyUploadedChanges(*args):
+ assert len(args) == 3
commit_url = 'https://some_name/path/to/commit/+/12345'
-
return git.CommitContents(url=commit_url, cl_number=12345)
test_package_path_dict = {symlink_path_to_package: abs_path_to_package}
@@ -746,6 +782,8 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_uprev_symlink.side_effect = SuccessfullyUprevedEbuildSymlink
mock_update_package_metadata_file.side_effect = RetrievedPatchResults
mock_upload_changes.side_effect = SuccessfullyUploadedChanges
+ mock_llvm_version.return_value = '1234'
+ mock_mask_contains.reurn_value = None
packages_to_update = ['test-packages/package1']
llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next
@@ -772,11 +810,14 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_create_repo.assert_called_once_with(path_to_package_dir, branch)
- mock_update_llvm_next.assert_called_once_with(
- abs_path_to_package, llvm_variant, git_hash, svn_version)
+ mock_update_llvm_next.assert_called_once_with(abs_path_to_package,
+ llvm_variant, git_hash,
+ svn_version)
mock_uprev_symlink.assert_called_once_with(symlink_path_to_package)
+ mock_mask_contains.assert_called_once_with(chroot_path, git_hash)
+
expected_commit_messages = [
'llvm-next/tot: upgrade to a123testhash5 (r1000)\n',
'The following packages have been updated:', 'path/to',
@@ -796,6 +837,48 @@ class UpdateLLVMHashTest(unittest.TestCase):
mock_delete_repo.assert_called_once_with(path_to_package_dir, branch)
+ @mock.patch.object(subprocess, 'check_output', return_value=None)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
+ def testEnsurePackageMaskContainsExisting(self, mock_llvm_version,
+ mock_git_add):
+ chroot_path = 'absolute/path/to/chroot'
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
+ with mock.patch(
+ 'update_chromeos_llvm_hash.open',
+ mock.mock_open(read_data='\n=sys-devel/llvm-1234.0_pre*\n'),
+ create=True) as mock_file:
+ update_chromeos_llvm_hash.EnsurePackageMaskContains(chroot_path, git_hash)
+ handle = mock_file()
+ handle.write.assert_not_called()
+ mock_llvm_version.assert_called_once_with(git_hash)
+
+ overlay_dir = 'absolute/path/to/chroot/src/third_party/chromiumos-overlay'
+ mask_path = overlay_dir + '/profiles/targets/chromeos/package.mask'
+ mock_git_add.assert_called_once_with(
+ ['git', '-C', overlay_dir, 'add', mask_path])
+
+ @mock.patch.object(subprocess, 'check_output', return_value=None)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
+ def testEnsurePackageMaskContainsNotExisting(self, mock_llvm_version,
+ mock_git_add):
+ chroot_path = 'absolute/path/to/chroot'
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
+ with mock.patch(
+ 'update_chromeos_llvm_hash.open',
+ mock.mock_open(read_data='nothing relevant'),
+ create=True) as mock_file:
+ update_chromeos_llvm_hash.EnsurePackageMaskContains(chroot_path, git_hash)
+ handle = mock_file()
+ handle.write.assert_called_once_with('=sys-devel/llvm-1234.0_pre*\n')
+ mock_llvm_version.assert_called_once_with(git_hash)
+
+ overlay_dir = 'absolute/path/to/chroot/src/third_party/chromiumos-overlay'
+ mask_path = overlay_dir + '/profiles/targets/chromeos/package.mask'
+ mock_git_add.assert_called_once_with(
+ ['git', '-C', overlay_dir, 'add', mask_path])
+
if __name__ == '__main__':
unittest.main()
diff --git a/llvm_tools/update_packages_and_run_tests.py b/llvm_tools/update_packages_and_run_tests.py
index b54ba651..2e4a9058 100755
--- a/llvm_tools/update_packages_and_run_tests.py
+++ b/llvm_tools/update_packages_and_run_tests.py
@@ -51,10 +51,9 @@ def GetCommandLineArgs():
'of updating the packages')
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=cros_root,
+ help='the path to the chroot (default: %(default)s)')
# Add argument to choose between llvm and llvm-next.
parser.add_argument(
@@ -71,65 +70,58 @@ def GetCommandLineArgs():
'arguments.')
# Add argument for the LLVM version to use.
- parser.add_argument(
- '--llvm_version',
- type=get_llvm_hash.is_svn_option,
- required=True,
- help='which git hash of LLVM to find '
- '{google3, ToT, <svn_version>} '
- '(default: finds the git hash of the google3 LLVM '
- 'version)')
+ parser.add_argument('--llvm_version',
+ type=get_llvm_hash.IsSvnOption,
+ required=True,
+ help='which git hash of LLVM to find '
+ '{google3, ToT, <svn_version>} '
+ '(default: finds the git hash of the google3 LLVM '
+ 'version)')
# Add argument to add reviewers for the created CL.
- parser.add_argument(
- '--reviewers',
- nargs='+',
- default=[],
- help='The reviewers for the package update changelist')
+ parser.add_argument('--reviewers',
+ nargs='+',
+ default=[],
+ help='The reviewers for the package update changelist')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
subparsers = parser.add_subparsers(dest='subparser_name')
subparser_names = []
# Testing with the tryjobs.
tryjob_subparser = subparsers.add_parser('tryjobs')
subparser_names.append('tryjobs')
- tryjob_subparser.add_argument(
- '--builders',
- required=True,
- nargs='+',
- default=[],
- help='builders to use for the tryjob testing')
+ tryjob_subparser.add_argument('--builders',
+ required=True,
+ nargs='+',
+ default=[],
+ help='builders to use for the tryjob testing')
# Add argument for custom options for the tryjob.
- tryjob_subparser.add_argument(
- '--options',
- required=False,
- nargs='+',
- default=[],
- help='options to use for the tryjob testing')
+ tryjob_subparser.add_argument('--options',
+ required=False,
+ nargs='+',
+ default=[],
+ help='options to use for the tryjob testing')
# Testing with the recipe builders
recipe_subparser = subparsers.add_parser('recipe')
subparser_names.append('recipe')
- recipe_subparser.add_argument(
- '--options',
- required=False,
- nargs='+',
- default=[],
- help='options passed to the recipe builders')
-
- recipe_subparser.add_argument(
- '--builders',
- required=True,
- nargs='+',
- default=[],
- help='recipe builders to launch')
+ recipe_subparser.add_argument('--options',
+ required=False,
+ nargs='+',
+ default=[],
+ help='options passed to the recipe builders')
+
+ recipe_subparser.add_argument('--builders',
+ required=True,
+ nargs='+',
+ default=[],
+ help='recipe builders to launch')
# Testing with CQ.
cq_subparser = subparsers.add_parser('cq')
@@ -276,10 +268,12 @@ def RunTryJobs(cl_number, extra_change_lists, options, builders, chroot_path):
test_output = json.loads(out)
+ buildbucket_id = int(test_output[0]['id'])
+
tests.append({
'launch_time': str(GetCurrentTimeInUTC()),
- 'link': str(test_output[0]['url']),
- 'buildbucket_id': int(test_output[0]['buildbucket_id']),
+ 'link': 'http://ci.chromium.org/b/%s' % buildbucket_id,
+ 'buildbucket_id': buildbucket_id,
'extra_cls': extra_change_lists,
'options': options,
'builder': [builder]
@@ -358,7 +352,8 @@ def GetCQDependString(dependent_cls):
return None
# Cq-Depend must start a new paragraph prefixed with "Cq-Depend".
- return '\nCq-Depend: ' + ', '.join(('chromium:%s' % i) for i in dependent_cls)
+ return '\nCq-Depend: ' + ', '.join(
+ ('chromium:%s' % i) for i in dependent_cls)
def GetCQIncludeTrybotsString(trybot):
@@ -400,11 +395,6 @@ def main():
args_output = GetCommandLineArgs()
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
-
patch_metadata_file = 'PATCHES.json'
svn_option = args_output.llvm_version
@@ -418,8 +408,8 @@ def main():
# If --last_tested is specified, check if the current run has the same
# arguments last time --last_tested is used.
if args_output.last_tested:
- chroot_file_paths = chroot.GetChrootEbuildPaths(args_output.chroot_path,
- update_packages)
+ chroot_file_paths = chroot.GetChrootEbuildPaths(
+ args_output.chroot_path, update_chromeos_llvm_hash.DEFAULT_PACKAGES)
arg_dict = {
'svn_version': svn_version,
'ebuilds': chroot_file_paths,
@@ -447,7 +437,7 @@ def main():
extra_commit_msg += cq_trybot_msg
change_list = update_chromeos_llvm_hash.UpdatePackages(
- update_packages,
+ update_chromeos_llvm_hash.DEFAULT_PACKAGES,
llvm_variant,
git_hash,
svn_version,
@@ -472,9 +462,10 @@ def main():
for test in tests:
print(test)
elif args_output.subparser_name == 'recipe':
- tests = StartRecipeBuilders(
- change_list.cl_number, args_output.extra_change_lists,
- args_output.options, args_output.builders, args_output.chroot_path)
+ tests = StartRecipeBuilders(change_list.cl_number,
+ args_output.extra_change_lists,
+ args_output.options, args_output.builders,
+ args_output.chroot_path)
print('Tests:')
for test in tests:
print(test)
diff --git a/llvm_tools/update_packages_and_run_tests_unittest.py b/llvm_tools/update_packages_and_run_tests_unittest.py
index d852893a..11f2b7f8 100755
--- a/llvm_tools/update_packages_and_run_tests_unittest.py
+++ b/llvm_tools/update_packages_and_run_tests_unittest.py
@@ -46,8 +46,7 @@ class UpdatePackagesAndRunTryjobsTest(unittest.TestCase):
def testMatchedLastTestedFile(self):
with test_helpers.CreateTemporaryFile() as last_tested_file:
arg_dict = {
- 'svn_version':
- 1234,
+ 'svn_version': 1234,
'ebuilds': [
'/path/to/package1-r2.ebuild',
'/path/to/package2/package2-r3.ebuild'
@@ -104,8 +103,9 @@ class UpdatePackagesAndRunTryjobsTest(unittest.TestCase):
]
self.assertEqual(
- update_packages_and_run_tests.GetTryJobCommand(
- change_list, extra_cls, options, builder), expected_cmd)
+ update_packages_and_run_tests.GetTryJobCommand(change_list, extra_cls,
+ options, builder),
+ expected_cmd)
@mock.patch.object(
update_packages_and_run_tests,
@@ -123,9 +123,9 @@ class UpdatePackagesAndRunTryjobsTest(unittest.TestCase):
]
bb_id = '1234'
- url = 'https://some_tryjob_url.com'
+ url = 'http://ci.chromium.org/b/%s' % bb_id
- mock_cmd.return_value = json.dumps([{'buildbucket_id': bb_id, 'url': url}])
+ mock_cmd.return_value = json.dumps([{'id': bb_id, 'url': url}])
chroot_path = '/some/path/to/chroot'
cl = 900
diff --git a/llvm_tools/update_tryjob_status.py b/llvm_tools/update_tryjob_status.py
index f1500364..f25fadca 100755
--- a/llvm_tools/update_tryjob_status.py
+++ b/llvm_tools/update_tryjob_status.py
@@ -16,7 +16,6 @@ import subprocess
import sys
import chroot
-from subprocess_helpers import ChrootRunCommand
from test_helpers import CreateTemporaryJsonFile
@@ -32,17 +31,6 @@ class TryjobStatus(enum.Enum):
# determines the 'status' value of the tryjob).
CUSTOM_SCRIPT = 'custom_script'
- # Uses the result returned by 'cros buildresult'.
- AUTO = 'auto'
-
-
-class BuilderStatus(enum.Enum):
- """Actual values given via 'cros buildresult'."""
-
- PASS = 'pass'
- FAIL = 'fail'
- RUNNING = 'running'
-
class CustomScriptStatus(enum.Enum):
"""Exit code values of a custom script."""
@@ -66,12 +54,6 @@ custom_script_exit_value_mapping = {
CustomScriptStatus.SKIP.value: TryjobStatus.SKIP.value
}
-builder_status_mapping = {
- BuilderStatus.PASS.value: TryjobStatus.GOOD.value,
- BuilderStatus.FAIL.value: TryjobStatus.BAD.value,
- BuilderStatus.RUNNING.value: TryjobStatus.PENDING.value
-}
-
def GetCommandLineArgs():
"""Parses the command line for the command line arguments."""
@@ -106,12 +88,6 @@ def GetCommandLineArgs():
type=int,
help='The revision to set its status.')
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
# Add argument for the custom script to execute for the 'custom_script'
# option in '--set_status'.
parser.add_argument(
@@ -123,13 +99,14 @@ def GetCommandLineArgs():
args_output = parser.parse_args()
- if not os.path.isfile(args_output.status_file) or \
- not args_output.status_file.endswith('.json'):
+ if not (os.path.isfile(
+ args_output.status_file and
+ not args_output.status_file.endswith('.json'))):
raise ValueError('File does not exist or does not ending in ".json" '
': %s' % args_output.status_file)
- if args_output.set_status == TryjobStatus.CUSTOM_SCRIPT.value and \
- not args_output.custom_script:
+ if (args_output.set_status == TryjobStatus.CUSTOM_SCRIPT.value and
+ not args_output.custom_script):
raise ValueError('Please provide the absolute path to the script to '
'execute.')
@@ -165,35 +142,6 @@ def FindTryjobIndex(revision, tryjobs_list):
return None
-def GetStatusFromCrosBuildResult(chroot_path, buildbucket_id):
- """Retrieves the 'status' using 'cros buildresult'."""
-
- get_buildbucket_id_cmd = [
- 'cros', 'buildresult', '--buildbucket-id',
- str(buildbucket_id), '--report', 'json'
- ]
-
- tryjob_json = ChrootRunCommand(chroot_path, get_buildbucket_id_cmd)
-
- tryjob_contents = json.loads(tryjob_json)
-
- return str(tryjob_contents['%d' % buildbucket_id]['status'])
-
-
-def GetAutoResult(chroot_path, buildbucket_id):
- """Returns the conversion of the result of 'cros buildresult'."""
-
- # Calls 'cros buildresult' to get the status of the tryjob.
- build_result = GetStatusFromCrosBuildResult(chroot_path, buildbucket_id)
-
- # The string returned by 'cros buildresult' might not be in the mapping.
- if build_result not in builder_status_mapping:
- raise ValueError(
- '"cros buildresult" return value is invalid: %s' % build_result)
-
- return builder_status_mapping[build_result]
-
-
def GetCustomScriptResult(custom_script, status_file, tryjob_contents):
"""Returns the conversion of the exit code of the custom script.
@@ -245,18 +193,15 @@ def GetCustomScriptResult(custom_script, status_file, tryjob_contents):
return custom_script_exit_value_mapping[exec_script_cmd_obj.returncode]
-def UpdateTryjobStatus(revision, set_status, status_file, chroot_path,
- custom_script):
+def UpdateTryjobStatus(revision, set_status, status_file, custom_script):
"""Updates a tryjob's 'status' field based off of 'set_status'.
Args:
revision: The revision associated with the tryjob.
set_status: What to update the 'status' field to.
Ex: TryjobStatus.Good, TryjobStatus.BAD, TryjobStatus.PENDING, or
- TryjobStatus.AUTO where TryjobStatus.AUTO uses the result of
- 'cros buildresult'.
+ TryjobStatus.
status_file: The .JSON file that contains the tryjobs.
- chroot_path: The absolute path to the chroot (used by 'cros buildresult').
custom_script: The absolute path to a script that will be executed which
will determine the 'status' value of the tryjob.
"""
@@ -282,8 +227,8 @@ def UpdateTryjobStatus(revision, set_status, status_file, chroot_path,
# 'FindTryjobIndex()' returns None if the revision was not found.
if tryjob_index is None:
- raise ValueError(
- 'Unable to find tryjob for %d in %s' % (revision, status_file))
+ raise ValueError('Unable to find tryjob for %d in %s' %
+ (revision, status_file))
# Set 'status' depending on 'set_status' for the tryjob.
if set_status == TryjobStatus.GOOD:
@@ -292,9 +237,6 @@ def UpdateTryjobStatus(revision, set_status, status_file, chroot_path,
bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.BAD.value
elif set_status == TryjobStatus.PENDING:
bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.PENDING.value
- elif set_status == TryjobStatus.AUTO:
- bisect_contents['jobs'][tryjob_index]['status'] = GetAutoResult(
- chroot_path, bisect_contents['jobs'][tryjob_index]['buildbucket_id'])
elif set_status == TryjobStatus.SKIP:
bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.SKIP.value
elif set_status == TryjobStatus.CUSTOM_SCRIPT:
@@ -315,8 +257,7 @@ def main():
args_output = GetCommandLineArgs()
UpdateTryjobStatus(args_output.revision, TryjobStatus(args_output.set_status),
- args_output.status_file, args_output.chroot_path,
- args_output.custom_script)
+ args_output.status_file, args_output.custom_script)
if __name__ == '__main__':
diff --git a/llvm_tools/update_tryjob_status_unittest.py b/llvm_tools/update_tryjob_status_unittest.py
index b5e6556c..c42c6718 100755
--- a/llvm_tools/update_tryjob_status_unittest.py
+++ b/llvm_tools/update_tryjob_status_unittest.py
@@ -31,14 +31,13 @@ class UpdateTryjobStatusTest(unittest.TestCase):
'cl': 'https://some_link_to_tryjob.com',
'status': 'good',
'buildbucket_id': 91835
- },
- {
- 'rev': 1000,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'pending',
- 'buildbucket_id': 10931
- }]
+ }, {
+ 'rev': 1000,
+ 'url': 'https://some_url_to_CL.com',
+ 'cl': 'https://some_link_to_tryjob.com',
+ 'status': 'pending',
+ 'buildbucket_id': 10931
+ }]
expected_index = 0
@@ -55,87 +54,19 @@ class UpdateTryjobStatusTest(unittest.TestCase):
'cl': 'https://some_link_to_tryjob.com',
'status': 'bad',
'buildbucket_id': 390
- },
- {
- 'rev': 10,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'skip',
- 'buildbucket_id': 10
- }]
+ }, {
+ 'rev': 10,
+ 'url': 'https://some_url_to_CL.com',
+ 'cl': 'https://some_link_to_tryjob.com',
+ 'status': 'skip',
+ 'buildbucket_id': 10
+ }]
revision_to_find = 250
self.assertIsNone(
update_tryjob_status.FindTryjobIndex(revision_to_find, test_tryjobs))
- # Simulate the behavior of `ChrootRunCommand()` when executing a command
- # inside the chroot.
- @mock.patch.object(update_tryjob_status, 'ChrootRunCommand')
- def testGetStatusFromCrosBuildResult(self, mock_chroot_command):
- tryjob_contents = {
- '192': {
- 'status': 'good',
- 'CleanUpChroot': 'pass',
- 'artifacts_url': None
- }
- }
-
- # Use the test function to simulate 'ChrootRunCommand()' behavior.
- mock_chroot_command.return_value = json.dumps(tryjob_contents)
-
- buildbucket_id = 192
-
- chroot_path = '/some/path/to/chroot'
-
- self.assertEqual(
- update_tryjob_status.GetStatusFromCrosBuildResult(
- chroot_path, buildbucket_id), 'good')
-
- expected_cmd = [
- 'cros', 'buildresult', '--buildbucket-id',
- str(buildbucket_id), '--report', 'json'
- ]
-
- mock_chroot_command.assert_called_once_with(chroot_path, expected_cmd)
-
- # Simulate the behavior of `GetStatusFromCrosBuildResult()` when `cros
- # buildresult` returned a string that is not in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetStatusFromCrosBuildResult',
- return_value='querying')
- def testInvalidCrosBuildResultValue(self, mock_cros_buildresult):
- chroot_path = '/some/path/to/chroot'
- buildbucket_id = 50
-
- # Verify the exception is raised when the return value of `cros buildresult`
- # is not in the `builder_status_mapping`.
- with self.assertRaises(ValueError) as err:
- update_tryjob_status.GetAutoResult(chroot_path, buildbucket_id)
-
- self.assertEqual(
- str(err.exception),
- '"cros buildresult" return value is invalid: querying')
-
- mock_cros_buildresult.assert_called_once_with(chroot_path, buildbucket_id)
-
- # Simulate the behavior of `GetStatusFromCrosBuildResult()` when `cros
- # buildresult` returned a string that is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetStatusFromCrosBuildResult',
- return_value=update_tryjob_status.BuilderStatus.PASS.value)
- def testValidCrosBuildResultValue(self, mock_cros_buildresult):
- chroot_path = '/some/path/to/chroot'
- buildbucket_id = 100
-
- self.assertEqual(
- update_tryjob_status.GetAutoResult(chroot_path, buildbucket_id),
- TryjobStatus.GOOD.value)
-
- mock_cros_buildresult.assert_called_once_with(chroot_path, buildbucket_id)
-
@mock.patch.object(subprocess, 'Popen')
# Simulate the behavior of `os.rename()` when successfully renamed a file.
@mock.patch.object(os, 'rename', return_value=None)
@@ -186,8 +117,9 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# does not match any of the exit codes in the mapping of
# `custom_script_exit_value_mapping`.
with self.assertRaises(ValueError) as err:
- update_tryjob_status.GetCustomScriptResult(
- custom_script_path, status_file_path, tryjob_contents)
+ update_tryjob_status.GetCustomScriptResult(custom_script_path,
+ status_file_path,
+ tryjob_contents)
self.assertEqual(str(err.exception), expected_error_message)
@@ -212,8 +144,8 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# `Popen.communicate()` returns a tuple of `stdout` and `stderr`.
mock_exec_custom_script.return_value.communicate.return_value = (None, None)
- mock_exec_custom_script.return_value.returncode = \
- CustomScriptStatus.GOOD.value
+ mock_exec_custom_script.return_value.returncode = (
+ CustomScriptStatus.GOOD.value)
tryjob_contents = {
'status': 'good',
@@ -226,8 +158,9 @@ class UpdateTryjobStatusTest(unittest.TestCase):
status_file_path = '/abs/path/to/status_file.json'
self.assertEqual(
- update_tryjob_status.GetCustomScriptResult(
- custom_script_path, status_file_path, tryjob_contents),
+ update_tryjob_status.GetCustomScriptResult(custom_script_path,
+ status_file_path,
+ tryjob_contents),
TryjobStatus.GOOD.value)
mock_exec_custom_script.assert_called_once()
@@ -247,16 +180,14 @@ class UpdateTryjobStatusTest(unittest.TestCase):
revision_to_update = 369412
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
# Verify the exception is raised when the `status_file` does not have any
# `jobs` (empty).
with self.assertRaises(SystemExit) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, TryjobStatus.GOOD, temp_json_file, chroot_path,
- custom_script)
+ update_tryjob_status.UpdateTryjobStatus(revision_to_update,
+ TryjobStatus.GOOD,
+ temp_json_file, custom_script)
self.assertEqual(str(err.exception), 'No tryjobs in %s' % temp_json_file)
@@ -283,16 +214,14 @@ class UpdateTryjobStatusTest(unittest.TestCase):
revision_to_update = 369416
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
# Verify the exception is raised when the `status_file` does not have any
# `jobs` (empty).
with self.assertRaises(ValueError) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, TryjobStatus.SKIP, temp_json_file, chroot_path,
- custom_script)
+ update_tryjob_status.UpdateTryjobStatus(revision_to_update,
+ TryjobStatus.SKIP,
+ temp_json_file, custom_script)
self.assertEqual(
str(err.exception), 'Unable to find tryjob for %d in %s' %
@@ -324,13 +253,11 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(revision_to_update,
TryjobStatus.GOOD, temp_json_file,
- chroot_path, custom_script)
+ custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -365,13 +292,11 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(revision_to_update,
TryjobStatus.BAD, temp_json_file,
- chroot_path, custom_script)
+ custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -407,13 +332,11 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(
revision_to_update, update_tryjob_status.TryjobStatus.SKIP,
- temp_json_file, chroot_path, custom_script)
+ temp_json_file, custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -448,13 +371,11 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(
revision_to_update, update_tryjob_status.TryjobStatus.PENDING,
- temp_json_file, chroot_path, custom_script)
+ temp_json_file, custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -465,69 +386,12 @@ class UpdateTryjobStatusTest(unittest.TestCase):
mock_find_tryjob_index.assert_called_once()
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- # Simulate the behavior of `GetAutoResult()` when `cros buildresult` returns
- # a value that is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetAutoResult',
- return_value=TryjobStatus.GOOD.value)
- def testSuccessfullyUpdatedTryjobStatusToAuto(self, mock_get_auto_result,
- mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- path_to_chroot = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, update_tryjob_status.TryjobStatus.AUTO,
- temp_json_file, path_to_chroot, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- update_tryjob_status.TryjobStatus.GOOD.value)
-
- mock_get_auto_result.assert_called_once_with(
- path_to_chroot,
- bisect_test_contents['jobs'][tryjob_index]['buildbucket_id'])
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
@mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- # Simulate the behavior of `GetCustomScriptResult()` when the custom script
- # exit code is in the mapping.
@mock.patch.object(
update_tryjob_status,
'GetCustomScriptResult',
return_value=TryjobStatus.SKIP.value)
- def testSuccessfullyUpdatedTryjobStatusToAuto(
+ def testUpdatedTryjobStatusToAutoPassedWithCustomScript(
self, mock_get_custom_script_result, mock_find_tryjob_index):
bisect_test_contents = {
'start': 369410,
@@ -550,13 +414,11 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- path_to_chroot = '/abs/path/to/chroot'
-
custom_script_path = '/abs/path/to/custom_script.py'
update_tryjob_status.UpdateTryjobStatus(
revision_to_update, update_tryjob_status.TryjobStatus.CUSTOM_SCRIPT,
- temp_json_file, path_to_chroot, custom_script_path)
+ temp_json_file, custom_script_path)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -593,8 +455,6 @@ class UpdateTryjobStatusTest(unittest.TestCase):
revision_to_update = 369411
- path_to_chroot = '/abs/path/to/chroot'
-
nonexistent_update_status = 'revert_status'
custom_script = None
@@ -602,9 +462,9 @@ class UpdateTryjobStatusTest(unittest.TestCase):
# Verify the exception is raised when the `set_status` command line
# argument does not exist in the mapping.
with self.assertRaises(ValueError) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, nonexistent_update_status, temp_json_file,
- path_to_chroot, custom_script)
+ update_tryjob_status.UpdateTryjobStatus(revision_to_update,
+ nonexistent_update_status,
+ temp_json_file, custom_script)
self.assertEqual(
str(err.exception),
diff --git a/llvm_tools/upload_lexan_crashes_to_forcey.py b/llvm_tools/upload_lexan_crashes_to_forcey.py
index b93f51a7..61bf6b7d 100755
--- a/llvm_tools/upload_lexan_crashes_to_forcey.py
+++ b/llvm_tools/upload_lexan_crashes_to_forcey.py
@@ -6,8 +6,6 @@
"""Fetches and submits the latest test-cases from Lexan's crash bucket."""
-# pylint: disable=cros-logging-import
-
import argparse
import contextlib
import datetime
@@ -101,21 +99,29 @@ def temp_dir() -> Generator[str, None, None]:
shutil.rmtree(loc)
+def download_and_unpack_test_case(gs_url: str, tempdir: str) -> None:
+ suffix = os.path.splitext(gs_url)[1]
+ target_name = 'test_case' + suffix
+ target = os.path.join(tempdir, target_name)
+ subprocess.run(['gsutil.py', 'cp', gs_url, target], check=True)
+ subprocess.run(['tar', 'xaf', target_name], check=True, cwd=tempdir)
+ os.unlink(target)
+
+
def submit_test_case(gs_url: str, cr_tool: str) -> None:
logging.info('Submitting %s', gs_url)
- suffix = os.path.splitext(gs_url)[1]
with temp_dir() as tempdir:
- target_name = 'test_case' + suffix
- target = os.path.join(tempdir, target_name)
- subprocess.run(['gsutil.py', 'cp', gs_url, target], check=True)
- subprocess.run(['tar', 'xaf', target_name], check=True, cwd=tempdir)
- os.unlink(target)
+ download_and_unpack_test_case(gs_url, tempdir)
# Sometimes (e.g., in
# gs://chrome-clang-crash-reports/v1/2020/03/27/
# chromium.clang-ToTiOS-12754-GTXToolKit-2bfcde.tgz)
# we'll get `.crash` files. Unclear why, but let's filter them out anyway.
- repro_files = [x for x in os.listdir(tempdir) if not x.endswith('.crash')]
+ repro_files = [
+ os.path.join(tempdir, x)
+ for x in os.listdir(tempdir)
+ if not x.endswith('.crash')
+ ]
assert len(repro_files) == 2, repro_files
if repro_files[0].endswith('.sh'):
sh_file, src_file = repro_files
@@ -124,6 +130,13 @@ def submit_test_case(gs_url: str, cr_tool: str) -> None:
src_file, sh_file = repro_files
assert sh_file.endswith('.sh'), repro_files
+ # Peephole: lexan got a crash upload with a way old clang. Ignore it.
+ with open(sh_file, encoding='utf-8') as f:
+ if 'Crash reproducer for clang version 9.0.0' in f.read():
+ logging.warning('Skipping upload for %s; seems to be with an old clang',
+ gs_url)
+ return
+
subprocess.run(
[
cr_tool,
diff --git a/llvm_tools/upload_lexan_crashes_to_forcey_test.py b/llvm_tools/upload_lexan_crashes_to_forcey_test.py
index 3c9c0d4b..937cbf8e 100755
--- a/llvm_tools/upload_lexan_crashes_to_forcey_test.py
+++ b/llvm_tools/upload_lexan_crashes_to_forcey_test.py
@@ -7,6 +7,7 @@
"""Tests for upload_lexan_crashes_to_forcey."""
import datetime
+import os
import unittest
import unittest.mock
@@ -117,6 +118,29 @@ class Test(unittest.TestCase):
),
])
+ @unittest.mock.patch(
+ 'upload_lexan_crashes_to_forcey.download_and_unpack_test_case')
+ @unittest.mock.patch('subprocess.run')
+ def test_test_case_submission_functions(self, subprocess_run_mock,
+ download_and_unpack_mock):
+ mock_gs_url = 'gs://foo/bar/baz'
+
+ def side_effect(gs_url: str, tempdir: str) -> None:
+ self.assertEqual(gs_url, mock_gs_url)
+
+ with open(os.path.join(tempdir, 'test_case.c'), 'w') as f:
+ # All we need is an empty file here.
+ pass
+
+ with open(
+ os.path.join(tempdir, 'test_case.sh'), 'w', encoding='utf-8') as f:
+ f.write('# Crash reproducer for clang version 9.0.0 (...)\n')
+ f.write('clang something or other\n')
+
+ download_and_unpack_mock.side_effect = side_effect
+ upload_lexan_crashes_to_forcey.submit_test_case(mock_gs_url, '4c')
+ subprocess_run_mock.assert_not_called()
+
if __name__ == '__main__':
unittest.main()