aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordan R Abrahams-Whitehead <ajordanr@google.com>2022-03-04 01:01:06 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2022-03-04 01:01:06 +0000
commit178e686da38f18bbc35b0f1903c93effa94bc6b4 (patch)
treeb0522edde1d3c5356c95eb1ee2eae3e87befa1f3
parent9d023268dcf468f9d4bbd874545c9c230e7549b3 (diff)
parent8a12172a247b7fee03c37d0b5ef5cfe818c8ee4d (diff)
downloadtoolchain-utils-android13-qpr2-s3-release.tar.gz
Merging 25 commit(s) from Chromium's toolchain-utils am: 9090b1a17b am: c9cb1157f1 am: 8a12172a24t_frc_odp_330442040t_frc_odp_330442000t_frc_ase_330444010android-platform-13.0.0_r1android-13.0.0_r83android-13.0.0_r82android-13.0.0_r81android-13.0.0_r80android-13.0.0_r79android-13.0.0_r78android-13.0.0_r77android-13.0.0_r76android-13.0.0_r75android-13.0.0_r74android-13.0.0_r73android-13.0.0_r72android-13.0.0_r71android-13.0.0_r70android-13.0.0_r69android-13.0.0_r68android-13.0.0_r67android-13.0.0_r66android-13.0.0_r65android-13.0.0_r64android-13.0.0_r63android-13.0.0_r62android-13.0.0_r61android-13.0.0_r60android-13.0.0_r59android-13.0.0_r58android-13.0.0_r57android-13.0.0_r56android-13.0.0_r55android-13.0.0_r54android-13.0.0_r53android-13.0.0_r52android-13.0.0_r51android-13.0.0_r50android-13.0.0_r49android-13.0.0_r48android-13.0.0_r47android-13.0.0_r46android-13.0.0_r45android-13.0.0_r44android-13.0.0_r43android-13.0.0_r42android-13.0.0_r41android-13.0.0_r40android-13.0.0_r39android-13.0.0_r38android-13.0.0_r37android-13.0.0_r36android-13.0.0_r35android-13.0.0_r34android-13.0.0_r33android-13.0.0_r32android-13.0.0_r30android-13.0.0_r29android-13.0.0_r28android-13.0.0_r27android-13.0.0_r24android-13.0.0_r23android-13.0.0_r22android-13.0.0_r21android-13.0.0_r20android-13.0.0_r19android-13.0.0_r18android-13.0.0_r17android-13.0.0_r16aml_go_odp_330912000aml_go_ads_330915100aml_go_ads_330915000aml_go_ads_330913000android13-qpr3-s9-releaseandroid13-qpr3-s8-releaseandroid13-qpr3-s7-releaseandroid13-qpr3-s6-releaseandroid13-qpr3-s5-releaseandroid13-qpr3-s4-releaseandroid13-qpr3-s3-releaseandroid13-qpr3-s2-releaseandroid13-qpr3-s14-releaseandroid13-qpr3-s13-releaseandroid13-qpr3-s12-releaseandroid13-qpr3-s11-releaseandroid13-qpr3-s10-releaseandroid13-qpr3-s1-releaseandroid13-qpr3-releaseandroid13-qpr3-c-s8-releaseandroid13-qpr3-c-s7-releaseandroid13-qpr3-c-s6-releaseandroid13-qpr3-c-s5-releaseandroid13-qpr3-c-s4-releaseandroid13-qpr3-c-s3-releaseandroid13-qpr3-c-s2-releaseandroid13-qpr3-c-s12-releaseandroid13-qpr3-c-s11-releaseandroid13-qpr3-c-s10-releaseandroid13-qpr3-c-s1-releaseandroid13-qpr2-s9-releaseandroid13-qpr2-s8-releaseandroid13-qpr2-s7-releaseandroid13-qpr2-s6-releaseandroid13-qpr2-s5-releaseandroid13-qpr2-s3-releaseandroid13-qpr2-s2-releaseandroid13-qpr2-s12-releaseandroid13-qpr2-s11-releaseandroid13-qpr2-s10-releaseandroid13-qpr2-s1-releaseandroid13-qpr2-releaseandroid13-qpr2-b-s1-releaseandroid13-qpr1-s8-releaseandroid13-qpr1-s7-releaseandroid13-qpr1-s6-releaseandroid13-qpr1-s5-releaseandroid13-qpr1-s4-releaseandroid13-qpr1-s3-releaseandroid13-qpr1-s2-releaseandroid13-qpr1-s1-releaseandroid13-qpr1-releaseandroid13-platform-releaseandroid13-mainline-go-adservices-releaseandroid13-frc-odp-releaseandroid13-devandroid13-d4-s2-releaseandroid13-d4-s1-releaseandroid13-d4-releaseandroid13-d3-s1-releaseandroid13-d2-release
Original change: https://android-review.googlesource.com/c/platform/external/toolchain-utils/+/2006491 Change-Id: Iec01482f09cfb65d135808f846754e9aedeeca4f
-rw-r--r--afdo_metadata/kernel_afdo.json8
-rw-r--r--compiler_wrapper/ccache_flag.go7
-rw-r--r--compiler_wrapper/ccache_flag_test.go13
-rwxr-xr-xcros_utils/command_executer.py142
-rwxr-xr-xllvm_tools/get_upstream_patch.py11
-rwxr-xr-xllvm_tools/nightly_revert_checker.py21
-rwxr-xr-xllvm_tools/patch_manager.py9
-rwxr-xr-xllvm_tools/patch_manager_unittest.py112
-rw-r--r--llvm_tools/patch_sync/Cargo.lock9
-rw-r--r--llvm_tools/patch_sync/Cargo.toml3
-rw-r--r--llvm_tools/patch_sync/src/android_utils.rs62
-rw-r--r--llvm_tools/patch_sync/src/main.rs253
-rw-r--r--llvm_tools/patch_sync/src/patch_parsing.rs161
-rw-r--r--llvm_tools/patch_sync/src/version_control.rs282
-rwxr-xr-xrust_tools/rust_watch.py2
-rwxr-xr-xrust_tools/rust_watch_test.py6
16 files changed, 865 insertions, 236 deletions
diff --git a/afdo_metadata/kernel_afdo.json b/afdo_metadata/kernel_afdo.json
index a3275004..49e19277 100644
--- a/afdo_metadata/kernel_afdo.json
+++ b/afdo_metadata/kernel_afdo.json
@@ -1,14 +1,14 @@
{
"chromeos-kernel-4_4": {
- "name": "R99-14388.8-1640601492"
+ "name": "R100-14516.0-1645439511"
},
"chromeos-kernel-4_14": {
- "name": "R99-14407.0-1640601365"
+ "name": "R100-14516.0-1645439661"
},
"chromeos-kernel-4_19": {
- "name": "R99-14388.8-1640601108"
+ "name": "R100-14516.0-1645439606"
},
"chromeos-kernel-5_4": {
- "name": "R99-14397.0-1640601195"
+ "name": "R100-14516.0-1645439482"
}
}
diff --git a/compiler_wrapper/ccache_flag.go b/compiler_wrapper/ccache_flag.go
index 265b8fc2..02fb43ac 100644
--- a/compiler_wrapper/ccache_flag.go
+++ b/compiler_wrapper/ccache_flag.go
@@ -19,6 +19,13 @@ func processCCacheFlag(builder *commandBuilder) {
return arg.value
})
+ // Disable ccache during portage's src_configure phase. Using ccache here is generally a
+ // waste of time, since these files are very small. Experimentally, this speeds up
+ // configuring by ~13%.
+ if val, present := builder.env.getenv("EBUILD_PHASE"); present && val == "configure" {
+ useCCache = false
+ }
+
if builder.cfg.useCCache && useCCache {
// Note: we used to also set CCACHE_BASEDIR but don't do it
// anymore for reasons outlined in crrev.com/c/2103170.
diff --git a/compiler_wrapper/ccache_flag_test.go b/compiler_wrapper/ccache_flag_test.go
index 50205312..d6eeb926 100644
--- a/compiler_wrapper/ccache_flag_test.go
+++ b/compiler_wrapper/ccache_flag_test.go
@@ -174,3 +174,16 @@ func TestRusagePreventsCCache(t *testing.T) {
}
})
}
+
+func TestCcacheIsDisabledInSrcConfigure(t *testing.T) {
+ withCCacheEnabledTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
+ ctx.env = append(ctx.env, "EBUILD_PHASE=configure")
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
+ t.Error(err)
+ }
+ })
+}
diff --git a/cros_utils/command_executer.py b/cros_utils/command_executer.py
index aeedf3ea..cc0f3372 100755
--- a/cros_utils/command_executer.py
+++ b/cros_utils/command_executer.py
@@ -103,14 +103,13 @@ class CommandExecuter(object):
p = None
try:
# pylint: disable=bad-option-value, subprocess-popen-preexec-fn
- p = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- shell=True,
- preexec_fn=os.setsid,
- executable='/bin/bash',
- env=env)
+ p = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ preexec_fn=os.setsid,
+ executable='/bin/bash',
+ env=env)
full_stdout = ''
full_stderr = ''
@@ -159,16 +158,17 @@ class CommandExecuter(object):
if p.poll() is not None:
if terminated_time is None:
terminated_time = time.time()
- elif (terminated_timeout is not None and
- time.time() - terminated_time > terminated_timeout):
+ elif (terminated_timeout is not None
+ and time.time() - terminated_time > terminated_timeout):
if self.logger:
self.logger.LogWarning(
'Timeout of %s seconds reached since '
- 'process termination.' % terminated_timeout, print_to_console)
+ 'process termination.' % terminated_timeout,
+ print_to_console)
break
- if (command_timeout is not None and
- time.time() - started_time > command_timeout):
+ if (command_timeout is not None
+ and time.time() - started_time > command_timeout):
os.killpg(os.getpgid(p.pid), signal.SIGTERM)
if self.logger:
self.logger.LogWarning(
@@ -242,9 +242,11 @@ class CommandExecuter(object):
return command
def WriteToTempShFile(self, contents):
- with tempfile.NamedTemporaryFile(
- 'w', encoding='utf-8', delete=False, prefix=os.uname()[1],
- suffix='.sh') as f:
+ with tempfile.NamedTemporaryFile('w',
+ encoding='utf-8',
+ delete=False,
+ prefix=os.uname()[1],
+ suffix='.sh') as f:
f.write('#!/bin/bash\n')
f.write(contents)
f.flush()
@@ -292,16 +294,15 @@ class CommandExecuter(object):
machine, port = machine.split(':')
# Write all commands to a file.
command_file = self.WriteToTempShFile(cmd)
- retval = self.CopyFiles(
- command_file,
- command_file,
- dest_machine=machine,
- dest_port=port,
- command_terminator=command_terminator,
- chromeos_root=chromeos_root,
- dest_cros=True,
- recursive=False,
- print_to_console=print_to_console)
+ retval = self.CopyFiles(command_file,
+ command_file,
+ dest_machine=machine,
+ dest_port=port,
+ command_terminator=command_terminator,
+ chromeos_root=chromeos_root,
+ dest_cros=True,
+ recursive=False,
+ print_to_console=print_to_console)
if retval:
if self.logger:
self.logger.LogError('Could not run remote command on machine.'
@@ -311,13 +312,12 @@ class CommandExecuter(object):
command = self.RemoteAccessInitCommand(chromeos_root, machine, port)
command += '\nremote_sh bash %s' % command_file
command += '\nl_retval=$?; echo "$REMOTE_OUT"; exit $l_retval'
- retval = self.RunCommandGeneric(
- command,
- return_output,
- command_terminator=command_terminator,
- command_timeout=command_timeout,
- terminated_timeout=terminated_timeout,
- print_to_console=print_to_console)
+ retval = self.RunCommandGeneric(command,
+ return_output,
+ command_terminator=command_terminator,
+ command_timeout=command_timeout,
+ terminated_timeout=terminated_timeout,
+ print_to_console=print_to_console)
if return_output:
connect_signature = ('Initiating first contact with remote host\n' +
'Connection OK\n')
@@ -372,13 +372,13 @@ class CommandExecuter(object):
if self.logger:
self.logger.LogCmd(command, print_to_console=print_to_console)
- with tempfile.NamedTemporaryFile(
- 'w',
- encoding='utf-8',
- delete=False,
- dir=os.path.join(chromeos_root, 'src/scripts'),
- suffix='.sh',
- prefix='in_chroot_cmd') as f:
+ with tempfile.NamedTemporaryFile('w',
+ encoding='utf-8',
+ delete=False,
+ dir=os.path.join(chromeos_root,
+ 'src/scripts'),
+ suffix='.sh',
+ prefix='in_chroot_cmd') as f:
f.write('#!/bin/bash\n')
f.write(command)
f.write('\n')
@@ -393,7 +393,11 @@ class CommandExecuter(object):
if return_output:
ret = self.RunCommand(
'cd %s; cros_sdk %s -- true' % (chromeos_root, cros_sdk_options),
- env=env)
+ env=env,
+ # Give this command a long time to execute; it might involve setting
+ # the chroot up, or running fstrim on its image file. Both of these
+ # operations can take well over the timeout default of 10 seconds.
+ terminated_timeout=5 * 60)
if ret:
return (ret, '', '')
@@ -402,14 +406,13 @@ class CommandExecuter(object):
command = ("cd %s; cros_sdk %s -- bash -c '%s/%s'" %
(chromeos_root, cros_sdk_options, CHROMEOS_SCRIPTS_DIR,
os.path.basename(command_file)))
- ret = self.RunCommandGeneric(
- command,
- return_output,
- command_terminator=command_terminator,
- command_timeout=command_timeout,
- terminated_timeout=terminated_timeout,
- print_to_console=print_to_console,
- env=env)
+ ret = self.RunCommandGeneric(command,
+ return_output,
+ command_terminator=command_terminator,
+ command_timeout=command_timeout,
+ terminated_timeout=terminated_timeout,
+ print_to_console=print_to_console,
+ env=env)
os.remove(command_file)
return ret
@@ -445,11 +448,10 @@ class CommandExecuter(object):
username=None,
command_terminator=None):
cmd = ' ;\n'.join(cmdlist)
- return self.RunCommand(
- cmd,
- machine=machine,
- username=username,
- command_terminator=command_terminator)
+ return self.RunCommand(cmd,
+ machine=machine,
+ username=username,
+ command_terminator=command_terminator)
def CopyFiles(self,
src,
@@ -505,12 +507,11 @@ class CommandExecuter(object):
else:
command += rsync_prefix + 'root@%s:%s %s' % (cros_machine, src, dest)
- return self.RunCommand(
- command,
- machine=host_machine,
- username=host_user,
- command_terminator=command_terminator,
- print_to_console=print_to_console)
+ return self.RunCommand(command,
+ machine=host_machine,
+ username=host_user,
+ command_terminator=command_terminator,
+ print_to_console=print_to_console)
if dest_machine == src_machine:
command = 'rsync -a %s %s' % (src, dest)
@@ -519,12 +520,11 @@ class CommandExecuter(object):
src_machine = os.uname()[1]
src_user = getpass.getuser()
command = 'rsync -a %s@%s:%s %s' % (src_user, src_machine, src, dest)
- return self.RunCommand(
- command,
- machine=dest_machine,
- username=dest_user,
- command_terminator=command_terminator,
- print_to_console=print_to_console)
+ return self.RunCommand(command,
+ machine=dest_machine,
+ username=dest_user,
+ command_terminator=command_terminator,
+ print_to_console=print_to_console)
def RunCommand2(self,
cmd,
@@ -593,8 +593,9 @@ class CommandExecuter(object):
def notify_line(self):
p = self._buf.find('\n')
while p >= 0:
- self._line_consumer(
- line=self._buf[:p + 1], output=self._name, pobject=self._pobject)
+ self._line_consumer(line=self._buf[:p + 1],
+ output=self._name,
+ pobject=self._pobject)
if p < len(self._buf) - 1:
self._buf = self._buf[p + 1:]
p = self._buf.find('\n')
@@ -606,8 +607,9 @@ class CommandExecuter(object):
def notify_eos(self):
# Notify end of stream. The last line may not end with a '\n'.
if self._buf != '':
- self._line_consumer(
- line=self._buf, output=self._name, pobject=self._pobject)
+ self._line_consumer(line=self._buf,
+ output=self._name,
+ pobject=self._pobject)
self._buf = ''
if self.log_level == 'verbose':
diff --git a/llvm_tools/get_upstream_patch.py b/llvm_tools/get_upstream_patch.py
index 7a4be3eb..5669b023 100755
--- a/llvm_tools/get_upstream_patch.py
+++ b/llvm_tools/get_upstream_patch.py
@@ -96,15 +96,18 @@ def add_patch(patches_json_path: str, patches_dir: str,
cwd=llvm_dir,
encoding='utf-8')
+ end_vers = rev.number if isinstance(rev, git_llvm_rev.Rev) else None
patch_props = {
'rel_patch_path': rel_patch_path,
- 'start_version': start_version.number,
'metadata': {
'title': commit_subject.strip(),
'info': [],
},
'platforms': sorted(platforms),
- 'end_version': rev.number if isinstance(rev, git_llvm_rev.Rev) else None,
+ 'version_range': {
+ 'from': start_version.number,
+ 'until': end_vers,
+ },
}
patches_json.append(patch_props)
@@ -346,8 +349,8 @@ def _convert_patch(llvm_config: git_llvm_rev.LLVMConfig,
is_differential=is_differential)
-def _get_duplicate_shas(
- patches: t.List[ParsedPatch]) -> t.List[t.Tuple[ParsedPatch, ParsedPatch]]:
+def _get_duplicate_shas(patches: t.List[ParsedPatch]
+ ) -> t.List[t.Tuple[ParsedPatch, ParsedPatch]]:
"""Return a list of Patches which have duplicate SHA's"""
return [(left, right) for i, left in enumerate(patches)
for right in patches[i + 1:] if left.sha == right.sha]
diff --git a/llvm_tools/nightly_revert_checker.py b/llvm_tools/nightly_revert_checker.py
index 5e878816..89485088 100755
--- a/llvm_tools/nightly_revert_checker.py
+++ b/llvm_tools/nightly_revert_checker.py
@@ -24,10 +24,11 @@ import typing as t
import cros_utils.email_sender as email_sender
import cros_utils.tiny_render as tiny_render
+
import get_llvm_hash
+import get_upstream_patch
import git_llvm_rev
import revert_checker
-import get_upstream_patch
State = t.Any
@@ -38,11 +39,19 @@ def _find_interesting_android_shas(android_llvm_toolchain_dir: str
'toolchain/llvm-project')
def get_llvm_merge_base(branch: str) -> str:
- return subprocess.check_output(
+ head_sha = subprocess.check_output(
+ ['git', 'rev-parse', branch],
+ cwd=llvm_project,
+ encoding='utf-8',
+ ).strip()
+ merge_base = subprocess.check_output(
['git', 'merge-base', branch, 'aosp/upstream-main'],
cwd=llvm_project,
encoding='utf-8',
).strip()
+ logging.info('Merge-base for %s (HEAD == %s) and upstream-main is %s',
+ branch, head_sha, merge_base)
+ return merge_base
main_legacy = get_llvm_merge_base('aosp/master-legacy') # nocheck
testing_upstream = get_llvm_merge_base('aosp/testing-upstream')
@@ -51,6 +60,9 @@ def _find_interesting_android_shas(android_llvm_toolchain_dir: str
# If these are the same SHA, there's no point in tracking both.
if main_legacy != testing_upstream:
result.append(('testing-upstream', testing_upstream))
+ else:
+ logging.info('main-legacy and testing-upstream are identical; ignoring '
+ 'the latter.')
return result
@@ -230,12 +242,15 @@ def do_cherrypick(chroot_path: str, llvm_dir: str,
seen.add(friendly_name)
for sha, reverted_sha in reverts:
try:
+ # We upload reverts for all platforms by default, since there's no
+ # real reason for them to be CrOS-specific.
get_upstream_patch.get_from_upstream(chroot_path=chroot_path,
create_cl=True,
start_sha=reverted_sha,
patches=[sha],
reviewers=reviewers,
- cc=cc)
+ cc=cc,
+ platforms=())
except get_upstream_patch.CherrypickError as e:
logging.info('%s, skipping...', str(e))
return new_state
diff --git a/llvm_tools/patch_manager.py b/llvm_tools/patch_manager.py
index 3c83fa96..f2d6b322 100755
--- a/llvm_tools/patch_manager.py
+++ b/llvm_tools/patch_manager.py
@@ -212,8 +212,13 @@ def GetPatchMetadata(patch_dict):
"""
# Get the metadata values of a patch if possible.
- start_version = patch_dict.get('start_version', 0)
- end_version = patch_dict.get('end_version', None)
+ # FIXME(b/221489531): Remove start_version & end_version
+ if 'version_range' in patch_dict:
+ start_version = patch_dict['version_range'].get('from', 0)
+ end_version = patch_dict['version_range'].get('until', None)
+ else:
+ start_version = patch_dict.get('start_version', 0)
+ end_version = patch_dict.get('end_version', None)
is_critical = patch_dict.get('is_critical', False)
return start_version, end_version, is_critical
diff --git a/llvm_tools/patch_manager_unittest.py b/llvm_tools/patch_manager_unittest.py
index 62947ed1..69bb683e 100755
--- a/llvm_tools/patch_manager_unittest.py
+++ b/llvm_tools/patch_manager_unittest.py
@@ -182,7 +182,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_stdout.patch',
- 'end_version': 1000
+ 'version_range': {
+ 'until': 1000,
+ }
}
self.assertEqual(
@@ -275,7 +277,9 @@ class PatchManagerTest(unittest.TestCase):
patch = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ },
}]
abs_patch_path = '/abs/path/to/filesdir/PATCHES'
@@ -293,13 +297,17 @@ class PatchManagerTest(unittest.TestCase):
test_updated_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}]
expected_patch_metadata = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}
with CreateTemporaryJsonFile() as json_test_file:
@@ -335,7 +343,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': rel_patch_path,
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}]
with CreateTemporaryJsonFile() as json_test_file:
@@ -379,7 +389,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': rel_patch_path,
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000,
+ },
}]
with CreateTemporaryJsonFile() as json_test_file:
@@ -415,28 +427,36 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1250
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1250
+ }
}
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 900
+ 'version_range': {
+ 'from': 20,
+ 'until': 900
+ }
}
test_patch_metadata = [
@@ -520,28 +540,36 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 2000
+ 'version_range': {
+ 'from': 20,
+ 'until': 2000
+ }
}
test_patch_metadata = [
@@ -654,8 +682,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -665,7 +695,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -674,8 +706,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -684,8 +718,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 1400
+ 'version_range': {
+ 'from': 20,
+ 'until': 1400
+ }
}
test_patch_metadata = [
@@ -786,8 +822,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -797,7 +835,9 @@ class PatchManagerTest(unittest.TestCase):
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000,
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -806,8 +846,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -816,8 +858,10 @@ class PatchManagerTest(unittest.TestCase):
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 1600,
- 'end_version': 2000
+ 'version_range': {
+ 'from': 1600,
+ 'until': 2000
+ }
}
test_patch_metadata = [
diff --git a/llvm_tools/patch_sync/Cargo.lock b/llvm_tools/patch_sync/Cargo.lock
index 63a9fcf8..1ad74a77 100644
--- a/llvm_tools/patch_sync/Cargo.lock
+++ b/llvm_tools/patch_sync/Cargo.lock
@@ -162,11 +162,12 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "patch_sync"
-version = "0.1.0"
+version = "1.1.0"
dependencies = [
"anyhow",
"rand",
"regex",
+ "scopeguard",
"serde",
"serde_json",
"sha2",
@@ -286,6 +287,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
name = "serde"
version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/llvm_tools/patch_sync/Cargo.toml b/llvm_tools/patch_sync/Cargo.toml
index 43082627..ed33d5ca 100644
--- a/llvm_tools/patch_sync/Cargo.toml
+++ b/llvm_tools/patch_sync/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "patch_sync"
-version = "0.1.0"
+version = "1.1.0"
authors = ["Jordan R Abrahams-Whitehead <ajordanr@google.com>"]
edition = "2018"
@@ -15,6 +15,7 @@ serde_json = "1.0"
sha2 = "0.9"
structopt = "0.3"
time = "0.3"
+scopeguard = "1.1.0"
[dev-dependencies]
rand = "0.8"
diff --git a/llvm_tools/patch_sync/src/android_utils.rs b/llvm_tools/patch_sync/src/android_utils.rs
new file mode 100644
index 00000000..77cb4b8a
--- /dev/null
+++ b/llvm_tools/patch_sync/src/android_utils.rs
@@ -0,0 +1,62 @@
+use std::path::Path;
+use std::process::Command;
+
+use anyhow::{bail, ensure, Result};
+
+const LLVM_ANDROID_REL_PATH: &str = "toolchain/llvm_android";
+
+/// Return the Android checkout's current llvm version.
+///
+/// This uses android_version.get_svn_revision_number, a python function
+/// that can't be executed directly. We spawn a Python3 program
+/// to run it and get the result from that.
+pub fn get_android_llvm_version(android_checkout: &Path) -> Result<String> {
+ let mut command = new_android_cmd(android_checkout, "python3")?;
+ command.args([
+ "-c",
+ "import android_version; print(android_version.get_svn_revision_number(), end='')",
+ ]);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "could not get android llvm version: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ let out_string = String::from_utf8(output.stdout)?.trim().to_string();
+ Ok(out_string)
+}
+
+/// Sort the Android patches using the cherrypick_cl.py Android utility.
+///
+/// This assumes that:
+/// 1. There exists a python script called cherrypick_cl.py
+/// 2. That calling it with the given arguments sorts the PATCHES.json file.
+/// 3. Calling it does nothing besides sorting the PATCHES.json file.
+///
+/// We aren't doing our own sorting because we shouldn't have to update patch_sync along
+/// with cherrypick_cl.py any time they change the __lt__ implementation.
+pub fn sort_android_patches(android_checkout: &Path) -> Result<()> {
+ let mut command = new_android_cmd(android_checkout, "python3")?;
+ command.args(["cherrypick_cl.py", "--reason", "patch_sync sorting"]);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "could not sort: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ Ok(())
+}
+
+fn new_android_cmd(android_checkout: &Path, cmd: &str) -> Result<Command> {
+ let mut command = Command::new(cmd);
+ let llvm_android_dir = android_checkout.join(LLVM_ANDROID_REL_PATH);
+ ensure!(
+ llvm_android_dir.is_dir(),
+ "can't make android command; {} is not a directory",
+ llvm_android_dir.display()
+ );
+ command.current_dir(llvm_android_dir);
+ Ok(command)
+}
diff --git a/llvm_tools/patch_sync/src/main.rs b/llvm_tools/patch_sync/src/main.rs
index 081ce01a..c244f1c0 100644
--- a/llvm_tools/patch_sync/src/main.rs
+++ b/llvm_tools/patch_sync/src/main.rs
@@ -1,63 +1,110 @@
+mod android_utils;
mod patch_parsing;
mod version_control;
+use std::borrow::ToOwned;
+use std::collections::BTreeSet;
+use std::path::{Path, PathBuf};
+
use anyhow::{Context, Result};
-use std::path::PathBuf;
use structopt::StructOpt;
+use patch_parsing::{filter_patches_by_platform, PatchCollection, PatchDictSchema};
+use version_control::RepoSetupContext;
+
fn main() -> Result<()> {
match Opt::from_args() {
Opt::Show {
cros_checkout_path,
android_checkout_path,
sync,
- } => show_subcmd(cros_checkout_path, android_checkout_path, sync),
+ keep_unmerged,
+ } => show_subcmd(ShowOpt {
+ cros_checkout_path,
+ android_checkout_path,
+ sync,
+ keep_unmerged,
+ }),
Opt::Transpose {
cros_checkout_path,
+ cros_reviewers,
old_cros_ref,
android_checkout_path,
+ android_reviewers,
old_android_ref,
sync,
verbose,
dry_run,
no_commit,
+ wip,
+ disable_cq,
} => transpose_subcmd(TransposeOpt {
cros_checkout_path,
+ cros_reviewers: cros_reviewers
+ .map(|r| r.split(',').map(ToOwned::to_owned).collect())
+ .unwrap_or_default(),
old_cros_ref,
android_checkout_path,
+ android_reviewers: android_reviewers
+ .map(|r| r.split(',').map(ToOwned::to_owned).collect())
+ .unwrap_or_default(),
old_android_ref,
sync,
verbose,
dry_run,
no_commit,
+ wip,
+ disable_cq,
}),
}
}
-fn show_subcmd(
+struct ShowOpt {
cros_checkout_path: PathBuf,
android_checkout_path: PathBuf,
+ keep_unmerged: bool,
sync: bool,
-) -> Result<()> {
- let ctx = version_control::RepoSetupContext {
+}
+
+fn show_subcmd(args: ShowOpt) -> Result<()> {
+ let ShowOpt {
+ cros_checkout_path,
+ android_checkout_path,
+ keep_unmerged,
+ sync,
+ } = args;
+ let ctx = RepoSetupContext {
cros_checkout: cros_checkout_path,
android_checkout: android_checkout_path,
sync_before: sync,
+ wip_mode: true, // Has no effect, as we're not making changes
+ enable_cq: false, // Has no effect, as we're not uploading anything
};
ctx.setup()?;
- let cros_patches_path = ctx.cros_patches_path();
- let android_patches_path = ctx.android_patches_path();
- let cur_cros_collection = patch_parsing::PatchCollection::parse_from_file(&cros_patches_path)
- .context("could not parse cros PATCHES.json")?;
- let cur_android_collection =
- patch_parsing::PatchCollection::parse_from_file(&android_patches_path)
- .context("could not parse android PATCHES.json")?;
+ let make_collection = |platform: &str, patches_fp: &Path| -> Result<PatchCollection> {
+ let parsed_collection = PatchCollection::parse_from_file(patches_fp)
+ .with_context(|| format!("could not parse {} PATCHES.json", platform))?;
+ Ok(if keep_unmerged {
+ parsed_collection
+ } else {
+ filter_patches_by_platform(&parsed_collection, platform).map_patches(|p| {
+ // Need to do this platforms creation as Rust 1.55 cannot use "from".
+ let mut platforms = BTreeSet::new();
+ platforms.insert(platform.to_string());
+ PatchDictSchema {
+ platforms,
+ ..p.clone()
+ }
+ })
+ })
+ };
+ let cur_cros_collection = make_collection("chromiumos", &ctx.cros_patches_path())?;
+ let cur_android_collection = make_collection("android", &ctx.android_patches_path())?;
let merged = cur_cros_collection.union(&cur_android_collection)?;
println!("{}", merged.serialize_patches()?);
Ok(())
}
-#[allow(dead_code)]
struct TransposeOpt {
cros_checkout_path: PathBuf,
old_cros_ref: String,
@@ -67,59 +114,147 @@ struct TransposeOpt {
verbose: bool,
dry_run: bool,
no_commit: bool,
+ cros_reviewers: Vec<String>,
+ android_reviewers: Vec<String>,
+ wip: bool,
+ disable_cq: bool,
}
fn transpose_subcmd(args: TransposeOpt) -> Result<()> {
- let ctx = version_control::RepoSetupContext {
+ let ctx = RepoSetupContext {
cros_checkout: args.cros_checkout_path,
android_checkout: args.android_checkout_path,
sync_before: args.sync,
+ wip_mode: args.wip,
+ enable_cq: !args.disable_cq,
};
ctx.setup()?;
let cros_patches_path = ctx.cros_patches_path();
let android_patches_path = ctx.android_patches_path();
- // Chromium OS Patches ----------------------------------------------------
- let mut cur_cros_collection =
- patch_parsing::PatchCollection::parse_from_file(&cros_patches_path)
- .context("parsing cros PATCHES.json")?;
- let new_cros_patches: patch_parsing::PatchCollection = {
- let cros_old_patches_json = ctx.old_cros_patch_contents(&args.old_cros_ref)?;
- let old_cros_collection = patch_parsing::PatchCollection::parse_from_str(
- cros_patches_path.parent().unwrap().to_path_buf(),
- &cros_old_patches_json,
- )?;
- cur_cros_collection.subtract(&old_cros_collection)?
- };
+ // Get new Patches -------------------------------------------------------
+ let (cur_cros_collection, new_cros_patches) = patch_parsing::new_patches(
+ &cros_patches_path,
+ &ctx.old_cros_patch_contents(&args.old_cros_ref)?,
+ "chromiumos",
+ )
+ .context("finding new patches for chromiumos")?;
+ let (cur_android_collection, new_android_patches) = patch_parsing::new_patches(
+ &android_patches_path,
+ &ctx.old_android_patch_contents(&args.old_android_ref)?,
+ "android",
+ )
+ .context("finding new patches for android")?;
+
+ // Have to ignore patches that are already at the destination, even if
+ // the patches are new.
+ let new_cros_patches = new_cros_patches.subtract(&cur_android_collection)?;
+ let new_android_patches = new_android_patches.subtract(&cur_cros_collection)?;
- // Android Patches -------------------------------------------------------
- let mut cur_android_collection =
- patch_parsing::PatchCollection::parse_from_file(&android_patches_path)
- .context("parsing android PATCHES.json")?;
- let new_android_patches: patch_parsing::PatchCollection = {
- let android_old_patches_json = ctx.old_android_patch_contents(&args.old_android_ref)?;
- let old_android_collection = patch_parsing::PatchCollection::parse_from_str(
- android_patches_path.parent().unwrap().to_path_buf(),
- &android_old_patches_json,
- )?;
- cur_android_collection.subtract(&old_android_collection)?
+ // Need to do an extra filtering step for Android, as AOSP doesn't
+ // want patches outside of the start/end bounds.
+ let android_llvm_version: u64 = {
+ let android_llvm_version_str =
+ android_utils::get_android_llvm_version(&ctx.android_checkout)?;
+ android_llvm_version_str.parse::<u64>().with_context(|| {
+ format!(
+ "converting llvm version to u64: '{}'",
+ android_llvm_version_str
+ )
+ })?
};
+ let new_android_patches = new_android_patches.filter_patches(|p| {
+ match (p.get_start_version(), p.get_end_version()) {
+ (Some(start), Some(end)) => start <= android_llvm_version && android_llvm_version < end,
+ (Some(start), None) => start <= android_llvm_version,
+ (None, Some(end)) => android_llvm_version < end,
+ (None, None) => true,
+ }
+ });
+
+ if args.verbose {
+ display_patches("New patches from Chromium OS", &new_cros_patches);
+ display_patches("New patches from Android", &new_android_patches);
+ }
+
+ if args.dry_run {
+ println!("--dry-run specified; skipping modifications");
+ return Ok(());
+ }
+
+ modify_repos(
+ &ctx,
+ args.no_commit,
+ ModifyOpt {
+ new_cros_patches,
+ cur_cros_collection,
+ cros_reviewers: args.cros_reviewers,
+ new_android_patches,
+ cur_android_collection,
+ android_reviewers: args.android_reviewers,
+ },
+ )
+}
+struct ModifyOpt {
+ new_cros_patches: PatchCollection,
+ cur_cros_collection: PatchCollection,
+ cros_reviewers: Vec<String>,
+ new_android_patches: PatchCollection,
+ cur_android_collection: PatchCollection,
+ android_reviewers: Vec<String>,
+}
+
+fn modify_repos(ctx: &RepoSetupContext, no_commit: bool, opt: ModifyOpt) -> Result<()> {
+ // Cleanup on scope exit.
+ scopeguard::defer! {
+ ctx.cleanup();
+ }
// Transpose Patches -----------------------------------------------------
- new_cros_patches.transpose_write(&mut cur_cros_collection)?;
- new_android_patches.transpose_write(&mut cur_android_collection)?;
+ let mut cur_android_collection = opt.cur_android_collection;
+ let mut cur_cros_collection = opt.cur_cros_collection;
+ if !opt.new_cros_patches.is_empty() {
+ opt.new_cros_patches
+ .transpose_write(&mut cur_android_collection)?;
+ }
+ if !opt.new_android_patches.is_empty() {
+ opt.new_android_patches
+ .transpose_write(&mut cur_cros_collection)?;
+ }
- if !args.no_commit {
+ if no_commit {
+ println!("--no-commit specified; not committing or uploading");
return Ok(());
}
// Commit and upload for review ------------------------------------------
- ctx.cros_repo_upload()
- .context("uploading chromiumos changes")?;
- ctx.android_repo_upload()
- .context("uploading android changes")?;
+ // Note we want to check if the android patches are empty for CrOS, and
+ // vice versa. This is a little counterintuitive.
+ if !opt.new_android_patches.is_empty() {
+ ctx.cros_repo_upload(&opt.cros_reviewers)
+ .context("uploading chromiumos changes")?;
+ }
+ if !opt.new_cros_patches.is_empty() {
+ if let Err(e) = android_utils::sort_android_patches(&ctx.android_checkout) {
+ eprintln!(
+ "Couldn't sort Android patches; continuing. Caused by: {}",
+ e
+ );
+ }
+ ctx.android_repo_upload(&opt.android_reviewers)
+ .context("uploading android changes")?;
+ }
Ok(())
}
+fn display_patches(prelude: &str, collection: &PatchCollection) {
+ println!("{}", prelude);
+ if collection.patches.is_empty() {
+ println!(" [No Patches]");
+ return;
+ }
+ println!("{}", collection);
+}
+
#[derive(Debug, structopt::StructOpt)]
#[structopt(name = "patch_sync", about = "A pipeline for syncing the patch code")]
enum Opt {
@@ -130,6 +265,12 @@ enum Opt {
cros_checkout_path: PathBuf,
#[structopt(parse(from_os_str))]
android_checkout_path: PathBuf,
+
+ /// Keep a patch's platform field even if it's not merged at that platform.
+ #[structopt(long)]
+ keep_unmerged: bool,
+
+ /// Run repo sync before transposing.
#[structopt(short, long)]
sync: bool,
},
@@ -140,6 +281,11 @@ enum Opt {
#[structopt(long = "cros-checkout", parse(from_os_str))]
cros_checkout_path: PathBuf,
+ /// Emails to send review requests to during Chromium OS upload.
+ /// Comma separated.
+ #[structopt(long = "cros-rev")]
+ cros_reviewers: Option<String>,
+
/// Git ref (e.g. hash) for the ChromiumOS overlay to use as the base.
#[structopt(long = "overlay-base-ref")]
old_cros_ref: String,
@@ -148,6 +294,11 @@ enum Opt {
#[structopt(long = "aosp-checkout", parse(from_os_str))]
android_checkout_path: PathBuf,
+ /// Emails to send review requests to during Android upload.
+ /// Comma separated.
+ #[structopt(long = "aosp-rev")]
+ android_reviewers: Option<String>,
+
/// Git ref (e.g. hash) for the llvm_android repo to use as the base.
#[structopt(long = "aosp-base-ref")]
old_android_ref: String,
@@ -161,13 +312,21 @@ enum Opt {
verbose: bool,
/// Do not change any files. Useful in combination with `--verbose`
- /// Implies `--no-commit` and `--no-upload`.
+ /// Implies `--no-commit`.
#[structopt(long)]
dry_run: bool,
- /// Do not commit any changes made.
- /// Implies `--no-upload`.
+ /// Do not commit or upload any changes made.
#[structopt(long)]
no_commit: bool,
+
+ /// Upload and send things for review, but mark as WIP and send no
+ /// emails.
+ #[structopt(long)]
+ wip: bool,
+
+ /// Don't run CQ if set. Only has an effect if uploading.
+ #[structopt(long)]
+ disable_cq: bool,
},
}
diff --git a/llvm_tools/patch_sync/src/patch_parsing.rs b/llvm_tools/patch_sync/src/patch_parsing.rs
index 733451ae..124f0d6f 100644
--- a/llvm_tools/patch_sync/src/patch_parsing.rs
+++ b/llvm_tools/patch_sync/src/patch_parsing.rs
@@ -8,13 +8,43 @@ use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
/// JSON serde struct.
+// FIXME(b/221489531): Remove when we clear out start_version and
+// end_version.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PatchDictSchema {
- pub rel_patch_path: String,
- pub start_version: Option<u64>,
+ /// [deprecated(since = "1.1", note = "Use version_range")]
+ #[serde(skip_serializing_if = "Option::is_none")]
pub end_version: Option<u64>,
- pub platforms: BTreeSet<String>,
pub metadata: Option<BTreeMap<String, serde_json::Value>>,
+ #[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
+ pub platforms: BTreeSet<String>,
+ pub rel_patch_path: String,
+ /// [deprecated(since = "1.1", note = "Use version_range")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub start_version: Option<u64>,
+ pub version_range: Option<VersionRange>,
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct VersionRange {
+ pub from: Option<u64>,
+ pub until: Option<u64>,
+}
+
+// FIXME(b/221489531): Remove when we clear out start_version and
+// end_version.
+impl PatchDictSchema {
+ pub fn get_start_version(&self) -> Option<u64> {
+ self.version_range
+ .map(|x| x.from)
+ .unwrap_or(self.start_version)
+ }
+
+ pub fn get_end_version(&self) -> Option<u64> {
+ self.version_range
+ .map(|x| x.until)
+ .unwrap_or(self.end_version)
+ }
}
/// Struct to keep track of patches and their relative paths.
@@ -44,14 +74,29 @@ impl PatchCollection {
})
}
- #[allow(dead_code)]
+ /// Copy this collection with patches filtered by given criterion.
+ pub fn filter_patches(&self, f: impl FnMut(&PatchDictSchema) -> bool) -> Self {
+ Self {
+ patches: self.patches.iter().cloned().filter(f).collect(),
+ workdir: self.workdir.clone(),
+ }
+ }
+
+ /// Map over the patches.
+ pub fn map_patches(&self, f: impl FnMut(&PatchDictSchema) -> PatchDictSchema) -> Self {
+ Self {
+ patches: self.patches.iter().map(f).collect(),
+ workdir: self.workdir.clone(),
+ }
+ }
+
/// Return true if the collection is tracking any patches.
pub fn is_empty(&self) -> bool {
self.patches.is_empty()
}
/// Compute the set-set subtraction, returning a new `PatchCollection` which
- /// keeps the minuend's wordir.
+ /// keeps the minuend's workdir.
pub fn subtract(&self, subtrahend: &Self) -> Result<Self> {
let mut new_patches = Vec::new();
// This is O(n^2) when it could be much faster, but n is always going to be less
@@ -121,6 +166,7 @@ impl PatchCollection {
end_version: p.end_version,
platforms: new_platforms,
metadata: p.metadata.clone(),
+ version_range: p.version_range,
});
// iii.
*merged = true;
@@ -187,11 +233,77 @@ impl PatchCollection {
Ok(std::str::from_utf8(&serialization_buffer)?.to_string())
}
+ /// Return whether a given patch actually exists on the file system.
+ pub fn patch_exists(&self, patch: &PatchDictSchema) -> bool {
+ self.workdir.join(&patch.rel_patch_path).exists()
+ }
+
fn hash_from_rel_patch(&self, patch: &PatchDictSchema) -> Result<String> {
hash_from_patch_path(&self.workdir.join(&patch.rel_patch_path))
}
}
+impl std::fmt::Display for PatchCollection {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ for (i, p) in self.patches.iter().enumerate() {
+ let title = p
+ .metadata
+ .as_ref()
+ .and_then(|x| x.get("title"))
+ .and_then(serde_json::Value::as_str)
+ .unwrap_or("[No Title]");
+ let path = self.workdir.join(&p.rel_patch_path);
+ writeln!(f, "* {}", title)?;
+ if i == self.patches.len() - 1 {
+ write!(f, " {}", path.display())?;
+ } else {
+ writeln!(f, " {}", path.display())?;
+ }
+ }
+ Ok(())
+ }
+}
+
+/// Generate a PatchCollection incorporating only the diff between current patches and old patch
+/// contents.
+pub fn new_patches(
+ patches_path: &Path,
+ old_patch_contents: &str,
+ platform: &str,
+) -> Result<(PatchCollection, PatchCollection)> {
+ let cur_collection = PatchCollection::parse_from_file(patches_path)
+ .with_context(|| format!("parsing {} PATCHES.json", platform))?;
+ let cur_collection = filter_patches_by_platform(&cur_collection, platform);
+ let cur_collection = cur_collection.filter_patches(|p| cur_collection.patch_exists(p));
+ let new_patches: PatchCollection = {
+ let old_collection = PatchCollection::parse_from_str(
+ patches_path.parent().unwrap().to_path_buf(),
+ old_patch_contents,
+ )?;
+ let old_collection = old_collection.filter_patches(|p| old_collection.patch_exists(p));
+ cur_collection.subtract(&old_collection)?
+ };
+ let new_patches = new_patches.map_patches(|p| {
+ let mut platforms = BTreeSet::new();
+ platforms.extend(["android".to_string(), "chromiumos".to_string()]);
+ PatchDictSchema {
+ platforms: platforms.union(&p.platforms).cloned().collect(),
+ ..p.to_owned()
+ }
+ });
+ Ok((cur_collection, new_patches))
+}
+
+/// Create a new collection with only the patches that apply to the
+/// given platform.
+///
+/// If there's no platform listed, the patch should still apply if the patch file exists.
+pub fn filter_patches_by_platform(collection: &PatchCollection, platform: &str) -> PatchCollection {
+ collection.filter_patches(|p| {
+ p.platforms.contains(platform) || (p.platforms.is_empty() && collection.patch_exists(p))
+ })
+}
+
/// Get the hash from the patch file contents.
///
/// Not every patch file actually contains its own hash,
@@ -219,7 +331,7 @@ fn hash_from_patch(patch_contents: impl Read) -> Result<String> {
}
fn hash_from_patch_path(patch: &Path) -> Result<String> {
- let f = File::open(patch)?;
+ let f = File::open(patch).with_context(|| format!("opening patch file {}", patch.display()))?;
hash_from_patch(f)
}
@@ -240,6 +352,7 @@ fn copy_create_parents(from: &Path, to: &Path) -> Result<()> {
#[cfg(test)]
mod test {
+
use super::*;
/// Test we can extract the hash from patch files.
@@ -275,6 +388,10 @@ mod test {
rel_patch_path: "a".into(),
metadata: None,
platforms: BTreeSet::from(["x".into()]),
+ version_range: Some(VersionRange {
+ from: Some(0),
+ until: Some(1),
+ }),
};
let patch2 = PatchDictSchema {
rel_patch_path: "b".into(),
@@ -310,4 +427,36 @@ mod test {
vec!["x", "y"]
);
}
+
+ #[test]
+ fn test_union_empties() {
+ let patch1 = PatchDictSchema {
+ start_version: Some(0),
+ end_version: Some(1),
+ rel_patch_path: "a".into(),
+ metadata: None,
+ platforms: Default::default(),
+ version_range: Some(VersionRange {
+ from: Some(0),
+ until: Some(1),
+ }),
+ };
+ let collection1 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1.clone()],
+ };
+ let collection2 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1],
+ };
+ let union = collection1
+ .union_helper(
+ &collection2,
+ |p| Ok(p.rel_patch_path.to_string()),
+ |p| Ok(p.rel_patch_path.to_string()),
+ )
+ .expect("could not create union");
+ assert_eq!(union.patches.len(), 1);
+ assert_eq!(union.patches[0].platforms.len(), 0);
+ }
}
diff --git a/llvm_tools/patch_sync/src/version_control.rs b/llvm_tools/patch_sync/src/version_control.rs
index 3dc5aae9..e07d39d6 100644
--- a/llvm_tools/patch_sync/src/version_control.rs
+++ b/llvm_tools/patch_sync/src/version_control.rs
@@ -8,6 +8,10 @@ use std::process::{Command, Output};
const CHROMIUMOS_OVERLAY_REL_PATH: &str = "src/third_party/chromiumos-overlay";
const ANDROID_LLVM_REL_PATH: &str = "toolchain/llvm_android";
+const CROS_MAIN_BRANCH: &str = "main";
+const ANDROID_MAIN_BRANCH: &str = "master"; // nocheck
+const WORK_BRANCH_NAME: &str = "__patch_sync_tmp";
+
/// Context struct to keep track of both Chromium OS and Android checkouts.
#[derive(Debug)]
pub struct RepoSetupContext {
@@ -15,18 +19,30 @@ pub struct RepoSetupContext {
pub android_checkout: PathBuf,
/// Run `repo sync` before doing any comparisons.
pub sync_before: bool,
+ pub wip_mode: bool,
+ pub enable_cq: bool,
}
impl RepoSetupContext {
pub fn setup(&self) -> Result<()> {
if self.sync_before {
+ {
+ let crpp = self.cros_patches_path();
+ let cros_git = crpp.parent().unwrap();
+ git_cd_cmd(cros_git, ["checkout", CROS_MAIN_BRANCH])?;
+ }
+ {
+ let anpp = self.android_patches_path();
+ let android_git = anpp.parent().unwrap();
+ git_cd_cmd(android_git, ["checkout", ANDROID_MAIN_BRANCH])?;
+ }
repo_cd_cmd(&self.cros_checkout, &["sync", CHROMIUMOS_OVERLAY_REL_PATH])?;
repo_cd_cmd(&self.android_checkout, &["sync", ANDROID_LLVM_REL_PATH])?;
}
Ok(())
}
- pub fn cros_repo_upload(&self) -> Result<()> {
+ pub fn cros_repo_upload<S: AsRef<str>>(&self, reviewers: &[S]) -> Result<()> {
let llvm_dir = self
.cros_checkout
.join(&CHROMIUMOS_OVERLAY_REL_PATH)
@@ -37,48 +53,154 @@ impl RepoSetupContext {
llvm_dir.display()
);
Self::rev_bump_llvm(&llvm_dir)?;
+ let mut extra_args = Vec::new();
+ for reviewer in reviewers {
+ extra_args.push("--re");
+ extra_args.push(reviewer.as_ref());
+ }
+ if self.wip_mode {
+ extra_args.push("--wip");
+ extra_args.push("--no-emails");
+ }
+ if self.enable_cq {
+ extra_args.push("--label=Commit-Queue+1");
+ }
Self::repo_upload(
&self.cros_checkout,
CHROMIUMOS_OVERLAY_REL_PATH,
- &Self::build_commit_msg("android", "chromiumos", "BUG=None\nTEST=CQ"),
+ &Self::build_commit_msg(
+ "llvm: Synchronize patches from android",
+ "android",
+ "chromiumos",
+ "BUG=None\nTEST=CQ",
+ ),
+ extra_args,
)
}
- pub fn android_repo_upload(&self) -> Result<()> {
+ pub fn android_repo_upload<S: AsRef<str>>(&self, reviewers: &[S]) -> Result<()> {
+ let mut extra_args = Vec::new();
+ for reviewer in reviewers {
+ extra_args.push("--re");
+ extra_args.push(reviewer.as_ref());
+ }
+ if self.wip_mode {
+ extra_args.push("--wip");
+ extra_args.push("--no-emails");
+ }
+ if self.enable_cq {
+ extra_args.push("--label=Presubmit-Ready+1");
+ }
Self::repo_upload(
&self.android_checkout,
ANDROID_LLVM_REL_PATH,
- &Self::build_commit_msg("chromiumos", "android", "Test: N/A"),
+ &Self::build_commit_msg(
+ "Synchronize patches from chromiumos",
+ "chromiumos",
+ "android",
+ "Test: N/A",
+ ),
+ extra_args,
)
}
- fn repo_upload(path: &Path, git_wd: &str, commit_msg: &str) -> Result<()> {
- // TODO(ajordanr): Need to clean up if there's any failures during upload.
- let git_path = &path.join(&git_wd);
- ensure!(
- git_path.is_dir(),
- "git_path {} is not a directory",
- git_path.display()
- );
- repo_cd_cmd(path, &["start", "patch_sync_branch", git_wd])?;
- git_cd_cmd(git_path, &["add", "."])?;
- git_cd_cmd(git_path, &["commit", "-m", commit_msg])?;
- repo_cd_cmd(path, &["upload", "-y", "--verify", git_wd])?;
+ fn cros_cleanup(&self) -> Result<()> {
+ let git_path = self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH);
+ Self::cleanup_branch(&git_path, CROS_MAIN_BRANCH, WORK_BRANCH_NAME)
+ .with_context(|| format!("cleaning up branch {}", WORK_BRANCH_NAME))?;
+ Ok(())
+ }
+
+ fn android_cleanup(&self) -> Result<()> {
+ let git_path = self.android_checkout.join(ANDROID_LLVM_REL_PATH);
+ Self::cleanup_branch(&git_path, ANDROID_MAIN_BRANCH, WORK_BRANCH_NAME)
+ .with_context(|| format!("cleaning up branch {}", WORK_BRANCH_NAME))?;
Ok(())
}
+ /// Wrapper around cleanups to ensure both get run, even if errors appear.
+ pub fn cleanup(&self) {
+ if let Err(e) = self.cros_cleanup() {
+ eprintln!("Failed to clean up chromiumos, continuing: {}", e);
+ }
+ if let Err(e) = self.android_cleanup() {
+ eprintln!("Failed to clean up android, continuing: {}", e);
+ }
+ }
+
+ /// Get the Android path to the PATCHES.json file
pub fn android_patches_path(&self) -> PathBuf {
self.android_checkout
.join(&ANDROID_LLVM_REL_PATH)
.join("patches/PATCHES.json")
}
+ /// Get the Chromium OS path to the PATCHES.json file
pub fn cros_patches_path(&self) -> PathBuf {
self.cros_checkout
.join(&CHROMIUMOS_OVERLAY_REL_PATH)
.join("sys-devel/llvm/files/PATCHES.json")
}
+ /// Return the contents of the old PATCHES.json from Chromium OS
+ pub fn old_cros_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH),
+ Path::new("sys-devel/llvm/files/PATCHES.json"),
+ )
+ }
+
+ /// Return the contents of the old PATCHES.json from android
+ pub fn old_android_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.android_checkout.join(ANDROID_LLVM_REL_PATH),
+ Path::new("patches/PATCHES.json"),
+ )
+ }
+
+ fn repo_upload<'a, I: IntoIterator<Item = &'a str>>(
+ checkout_path: &Path,
+ subproject_git_wd: &'a str,
+ commit_msg: &str,
+ extra_flags: I,
+ ) -> Result<()> {
+ let git_path = &checkout_path.join(&subproject_git_wd);
+ ensure!(
+ git_path.is_dir(),
+ "git_path {} is not a directory",
+ git_path.display()
+ );
+ repo_cd_cmd(
+ checkout_path,
+ &["start", WORK_BRANCH_NAME, subproject_git_wd],
+ )?;
+ let base_args = ["upload", "--br", WORK_BRANCH_NAME, "-y", "--verify"];
+ let new_args = base_args
+ .iter()
+ .copied()
+ .chain(extra_flags)
+ .chain(["--", subproject_git_wd]);
+ git_cd_cmd(git_path, &["add", "."])
+ .and_then(|_| git_cd_cmd(git_path, &["commit", "-m", commit_msg]))
+ .and_then(|_| repo_cd_cmd(checkout_path, new_args))?;
+ Ok(())
+ }
+
+ /// Clean up the git repo after we're done with it.
+ fn cleanup_branch(git_path: &Path, base_branch: &str, rm_branch: &str) -> Result<()> {
+ git_cd_cmd(git_path, ["restore", "."])?;
+ git_cd_cmd(git_path, ["clean", "-fd"])?;
+ git_cd_cmd(git_path, ["checkout", base_branch])?;
+ // It's acceptable to be able to not delete the branch. This may be
+ // because the branch does not exist, which is an expected result.
+ // Since this is a very common case, we won't report any failures related
+ // to this command failure as it'll pollute the stderr logs.
+ let _ = git_cd_cmd(git_path, ["branch", "-D", rm_branch]);
+ Ok(())
+ }
+
/// Increment LLVM's revision number
fn rev_bump_llvm(llvm_dir: &Path) -> Result<PathBuf> {
let ebuild = find_ebuild(llvm_dir)
@@ -108,28 +230,7 @@ impl RepoSetupContext {
Ok(new_path)
}
- /// Return the contents of the old PATCHES.json from Chromium OS
- #[allow(dead_code)]
- pub fn old_cros_patch_contents(&self, hash: &str) -> Result<String> {
- Self::old_file_contents(
- hash,
- &self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH),
- Path::new("sys-devel/llvm/files/PATCHES.json"),
- )
- }
-
- /// Return the contents of the old PATCHES.json from android
- #[allow(dead_code)]
- pub fn old_android_patch_contents(&self, hash: &str) -> Result<String> {
- Self::old_file_contents(
- hash,
- &self.android_checkout.join(ANDROID_LLVM_REL_PATH),
- Path::new("patches/PATCHES.json"),
- )
- }
-
/// Return the contents of an old file in git
- #[allow(dead_code)]
fn old_file_contents(hash: &str, pwd: &Path, file: &Path) -> Result<String> {
let git_ref = format!(
"{}:{}",
@@ -146,31 +247,57 @@ impl RepoSetupContext {
}
/// Create the commit message
- fn build_commit_msg(from: &str, to: &str, footer: &str) -> String {
+ fn build_commit_msg(subj: &str, from: &str, to: &str, footer: &str) -> String {
format!(
- "[patch_sync] Synchronize patches from {}\n\n\
- Copies new PATCHES.json changes from {} to {}\n\n{}",
- from, from, to, footer
+ "[patch_sync] {}\n\n\
+Copies new PATCHES.json changes from {} to {}.\n
+For questions about this job, contact chromeos-toolchain@google.com\n\n
+{}",
+ subj, from, to, footer
)
}
}
/// Return the path of an ebuild located within the given directory.
fn find_ebuild(dir: &Path) -> Result<PathBuf> {
- // TODO(ajordanr): Maybe use OnceCell for this regex?
- let ebuild_matcher = Regex::new(r"(-r[0-9]+)?\.ebuild").unwrap();
- for entry in fs::read_dir(dir)? {
- let path = entry?.path();
- if let Some(name) = path.file_name() {
- if ebuild_matcher.is_match(
- name.to_str()
- .ok_or_else(|| anyhow!("converting filepath to UTF-8"))?,
- ) {
- return Ok(path);
- }
+ // The logic here is that we create an iterator over all file paths to ebuilds
+ // with _pre in the name. Then we sort those ebuilds based on their revision numbers.
+ // Then we return the highest revisioned one.
+
+ let ebuild_rev_matcher = Regex::new(r"-r([0-9]+)\.ebuild").unwrap();
+ // For LLVM ebuilds, we only want to check for ebuilds that have this in their file name.
+ let per_heuristic = "_pre";
+ // Get an iterator over all ebuilds with a _per in the file name.
+ let ebuild_candidates = fs::read_dir(dir)?.filter_map(|entry| {
+ let entry = entry.ok()?;
+ let path = entry.path();
+ if path.extension()? != "ebuild" {
+ // Not an ebuild, ignore.
+ return None;
}
- }
- bail!("could not find ebuild")
+ let stem = path.file_stem()?.to_str()?;
+ if stem.contains(per_heuristic) {
+ return Some(path);
+ }
+ None
+ });
+ let try_parse_ebuild_rev = |path: PathBuf| -> Option<(u64, PathBuf)> {
+ let name = path.file_name()?;
+ if let Some(rev_match) = ebuild_rev_matcher.captures(name.to_str()?) {
+ let rev_str = rev_match.get(1)?;
+ let rev_num = rev_str.as_str().parse::<u64>().ok()?;
+ return Some((rev_num, path));
+ }
+ // If it doesn't have a revision, then it's revision 0.
+ Some((0, path))
+ };
+ let mut sorted_candidates: Vec<_> =
+ ebuild_candidates.filter_map(try_parse_ebuild_rev).collect();
+ sorted_candidates.sort_unstable_by_key(|x| x.0);
+ let highest_rev_ebuild = sorted_candidates
+ .pop()
+ .ok_or_else(|| anyhow!("could not find ebuild"))?;
+ Ok(highest_rev_ebuild.1)
}
/// Run a given git command from inside a specified git dir.
@@ -179,9 +306,16 @@ where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
- let output = Command::new("git").current_dir(&pwd).args(args).output()?;
+ let mut command = Command::new("git");
+ command.current_dir(&pwd).args(args);
+ let output = command.output()?;
if !output.status.success() {
- bail!("git command failed")
+ bail!(
+ "git command failed:\n {:?}\nstdout --\n{}\nstderr --\n{}",
+ command,
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr),
+ );
}
Ok(output)
}
@@ -191,9 +325,11 @@ where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
- let status = Command::new("repo").current_dir(&pwd).args(args).status()?;
+ let mut command = Command::new("repo");
+ command.current_dir(&pwd).args(args);
+ let status = command.status()?;
if !status.success() {
- bail!("repo command failed")
+ bail!("repo command failed:\n {:?} \n", command)
}
Ok(())
}
@@ -219,7 +355,11 @@ mod test {
File::create(&ebuild_path).expect("creating test ebuild file");
let new_ebuild_path =
RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
- assert!(new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r11.ebuild"));
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r11.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
}
{
@@ -229,9 +369,31 @@ mod test {
File::create(&ebuild_path).expect("creating test ebuild file");
let new_ebuild_path =
RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
- assert!(new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r1.ebuild"));
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r1.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
}
+ {
+ // With both
+ let ebuild_name = "llvm-13.0_pre433403_p20211019.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let ebuild_link_name = "llvm-13.0_pre433403_p20211019-r2.ebuild";
+ let ebuild_link_path = llvm_dir.join(ebuild_link_name);
+ File::create(&ebuild_link_path).expect("creating test ebuild link file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r3.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild link file");
+ fs::remove_file(ebuild_path).expect("removing renamed ebuild file");
+ }
fs::remove_dir(&llvm_dir).expect("removing temp test dir");
}
diff --git a/rust_tools/rust_watch.py b/rust_tools/rust_watch.py
index db6ae71b..c347d2c6 100755
--- a/rust_tools/rust_watch.py
+++ b/rust_tools/rust_watch.py
@@ -241,7 +241,7 @@ def maybe_compose_bug(
if newest_release == old_state.last_seen_release:
return None
- title = f'New rustc release detected: v{newest_release}'
+ title = f'[Rust] Update to {newest_release}'
body = ('A new release has been detected; we should probably roll to it. '
"Please see go/crostc-rust-rotation for who's turn it is.")
return title, body
diff --git a/rust_tools/rust_watch_test.py b/rust_tools/rust_watch_test.py
index 30bacbb9..583a9125 100755
--- a/rust_tools/rust_watch_test.py
+++ b/rust_tools/rust_watch_test.py
@@ -129,7 +129,7 @@ class Test(unittest.TestCase):
),
newest_release=rust_watch.RustReleaseVersion(1, 0, 1),
)
- self.assertEqual(title, 'New rustc release detected: v1.0.1')
+ self.assertEqual(title, '[Rust] Update to 1.0.1')
self.assertTrue(body.startswith('A new release has been detected;'))
title, body = rust_watch.maybe_compose_bug(
@@ -139,7 +139,7 @@ class Test(unittest.TestCase):
),
newest_release=rust_watch.RustReleaseVersion(1, 1, 0),
)
- self.assertEqual(title, 'New rustc release detected: v1.1.0')
+ self.assertEqual(title, '[Rust] Update to 1.1.0')
self.assertTrue(body.startswith('A new release has been detected;'))
title, body = rust_watch.maybe_compose_bug(
@@ -149,7 +149,7 @@ class Test(unittest.TestCase):
),
newest_release=rust_watch.RustReleaseVersion(2, 0, 0),
)
- self.assertEqual(title, 'New rustc release detected: v2.0.0')
+ self.assertEqual(title, '[Rust] Update to 2.0.0')
self.assertTrue(body.startswith('A new release has been detected;'))
def test_compose_bug_does_nothing_when_no_new_updates_exist(self):