aboutsummaryrefslogtreecommitdiff
path: root/llvm_tools/patch_sync/src
diff options
context:
space:
mode:
authorStephen Hines <srhines@google.com>2022-01-06 08:57:50 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2022-01-06 08:57:50 +0000
commit9d023268dcf468f9d4bbd874545c9c230e7549b3 (patch)
treecf4cdad04e1c146844e2a790910cf9cd1b97082a /llvm_tools/patch_sync/src
parentb36814ef2f8c03cd7d60fd39b1fef69c2910469d (diff)
parentcfc899b23937f6aec9ea2de4967dd8d299f3dc6d (diff)
downloadtoolchain-utils-9d023268dcf468f9d4bbd874545c9c230e7549b3.tar.gz
Merging 32 commit(s) from Chromium's toolchain-utils am: 7bc245bad9 am: 6323407a0d am: 38574f5e98 am: cfc899b239
Original change: https://android-review.googlesource.com/c/platform/external/toolchain-utils/+/1937961 Change-Id: I1a093ad26060edd9fd26476d0d5d3afff274306a
Diffstat (limited to 'llvm_tools/patch_sync/src')
-rw-r--r--llvm_tools/patch_sync/src/main.rs173
-rw-r--r--llvm_tools/patch_sync/src/patch_parsing.rs313
-rw-r--r--llvm_tools/patch_sync/src/version_control.rs238
3 files changed, 724 insertions, 0 deletions
diff --git a/llvm_tools/patch_sync/src/main.rs b/llvm_tools/patch_sync/src/main.rs
new file mode 100644
index 00000000..081ce01a
--- /dev/null
+++ b/llvm_tools/patch_sync/src/main.rs
@@ -0,0 +1,173 @@
+mod patch_parsing;
+mod version_control;
+
+use anyhow::{Context, Result};
+use std::path::PathBuf;
+use structopt::StructOpt;
+
+fn main() -> Result<()> {
+ match Opt::from_args() {
+ Opt::Show {
+ cros_checkout_path,
+ android_checkout_path,
+ sync,
+ } => show_subcmd(cros_checkout_path, android_checkout_path, sync),
+ Opt::Transpose {
+ cros_checkout_path,
+ old_cros_ref,
+ android_checkout_path,
+ old_android_ref,
+ sync,
+ verbose,
+ dry_run,
+ no_commit,
+ } => transpose_subcmd(TransposeOpt {
+ cros_checkout_path,
+ old_cros_ref,
+ android_checkout_path,
+ old_android_ref,
+ sync,
+ verbose,
+ dry_run,
+ no_commit,
+ }),
+ }
+}
+
+fn show_subcmd(
+ cros_checkout_path: PathBuf,
+ android_checkout_path: PathBuf,
+ sync: bool,
+) -> Result<()> {
+ let ctx = version_control::RepoSetupContext {
+ cros_checkout: cros_checkout_path,
+ android_checkout: android_checkout_path,
+ sync_before: sync,
+ };
+ ctx.setup()?;
+ let cros_patches_path = ctx.cros_patches_path();
+ let android_patches_path = ctx.android_patches_path();
+ let cur_cros_collection = patch_parsing::PatchCollection::parse_from_file(&cros_patches_path)
+ .context("could not parse cros PATCHES.json")?;
+ let cur_android_collection =
+ patch_parsing::PatchCollection::parse_from_file(&android_patches_path)
+ .context("could not parse android PATCHES.json")?;
+ let merged = cur_cros_collection.union(&cur_android_collection)?;
+ println!("{}", merged.serialize_patches()?);
+ Ok(())
+}
+
+#[allow(dead_code)]
+struct TransposeOpt {
+ cros_checkout_path: PathBuf,
+ old_cros_ref: String,
+ android_checkout_path: PathBuf,
+ old_android_ref: String,
+ sync: bool,
+ verbose: bool,
+ dry_run: bool,
+ no_commit: bool,
+}
+
+fn transpose_subcmd(args: TransposeOpt) -> Result<()> {
+ let ctx = version_control::RepoSetupContext {
+ cros_checkout: args.cros_checkout_path,
+ android_checkout: args.android_checkout_path,
+ sync_before: args.sync,
+ };
+ ctx.setup()?;
+ let cros_patches_path = ctx.cros_patches_path();
+ let android_patches_path = ctx.android_patches_path();
+
+ // Chromium OS Patches ----------------------------------------------------
+ let mut cur_cros_collection =
+ patch_parsing::PatchCollection::parse_from_file(&cros_patches_path)
+ .context("parsing cros PATCHES.json")?;
+ let new_cros_patches: patch_parsing::PatchCollection = {
+ let cros_old_patches_json = ctx.old_cros_patch_contents(&args.old_cros_ref)?;
+ let old_cros_collection = patch_parsing::PatchCollection::parse_from_str(
+ cros_patches_path.parent().unwrap().to_path_buf(),
+ &cros_old_patches_json,
+ )?;
+ cur_cros_collection.subtract(&old_cros_collection)?
+ };
+
+ // Android Patches -------------------------------------------------------
+ let mut cur_android_collection =
+ patch_parsing::PatchCollection::parse_from_file(&android_patches_path)
+ .context("parsing android PATCHES.json")?;
+ let new_android_patches: patch_parsing::PatchCollection = {
+ let android_old_patches_json = ctx.old_android_patch_contents(&args.old_android_ref)?;
+ let old_android_collection = patch_parsing::PatchCollection::parse_from_str(
+ android_patches_path.parent().unwrap().to_path_buf(),
+ &android_old_patches_json,
+ )?;
+ cur_android_collection.subtract(&old_android_collection)?
+ };
+
+ // Transpose Patches -----------------------------------------------------
+ new_cros_patches.transpose_write(&mut cur_cros_collection)?;
+ new_android_patches.transpose_write(&mut cur_android_collection)?;
+
+ if !args.no_commit {
+ return Ok(());
+ }
+ // Commit and upload for review ------------------------------------------
+ ctx.cros_repo_upload()
+ .context("uploading chromiumos changes")?;
+ ctx.android_repo_upload()
+ .context("uploading android changes")?;
+ Ok(())
+}
+
+#[derive(Debug, structopt::StructOpt)]
+#[structopt(name = "patch_sync", about = "A pipeline for syncing the patch code")]
+enum Opt {
+ /// Show a combined view of the PATCHES.json file, without making any changes.
+ #[allow(dead_code)]
+ Show {
+ #[structopt(parse(from_os_str))]
+ cros_checkout_path: PathBuf,
+ #[structopt(parse(from_os_str))]
+ android_checkout_path: PathBuf,
+ #[structopt(short, long)]
+ sync: bool,
+ },
+ /// Transpose patches from two PATCHES.json files
+ /// to each other.
+ Transpose {
+ /// Path to the ChromiumOS source repo checkout.
+ #[structopt(long = "cros-checkout", parse(from_os_str))]
+ cros_checkout_path: PathBuf,
+
+ /// Git ref (e.g. hash) for the ChromiumOS overlay to use as the base.
+ #[structopt(long = "overlay-base-ref")]
+ old_cros_ref: String,
+
+ /// Path to the Android Open Source Project source repo checkout.
+ #[structopt(long = "aosp-checkout", parse(from_os_str))]
+ android_checkout_path: PathBuf,
+
+ /// Git ref (e.g. hash) for the llvm_android repo to use as the base.
+ #[structopt(long = "aosp-base-ref")]
+ old_android_ref: String,
+
+ /// Run repo sync before transposing.
+ #[structopt(short, long)]
+ sync: bool,
+
+ /// Print information to stdout
+ #[structopt(short, long)]
+ verbose: bool,
+
+ /// Do not change any files. Useful in combination with `--verbose`
+ /// Implies `--no-commit` and `--no-upload`.
+ #[structopt(long)]
+ dry_run: bool,
+
+ /// Do not commit any changes made.
+ /// Implies `--no-upload`.
+ #[structopt(long)]
+ no_commit: bool,
+ },
+}
diff --git a/llvm_tools/patch_sync/src/patch_parsing.rs b/llvm_tools/patch_sync/src/patch_parsing.rs
new file mode 100644
index 00000000..733451ae
--- /dev/null
+++ b/llvm_tools/patch_sync/src/patch_parsing.rs
@@ -0,0 +1,313 @@
+use std::collections::{BTreeMap, BTreeSet};
+use std::fs::{copy, File};
+use std::io::{BufRead, BufReader, Read, Write};
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use serde::{Deserialize, Serialize};
+use sha2::{Digest, Sha256};
+
+/// JSON serde struct.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PatchDictSchema {
+ pub rel_patch_path: String,
+ pub start_version: Option<u64>,
+ pub end_version: Option<u64>,
+ pub platforms: BTreeSet<String>,
+ pub metadata: Option<BTreeMap<String, serde_json::Value>>,
+}
+
+/// Struct to keep track of patches and their relative paths.
+#[derive(Debug, Clone)]
+pub struct PatchCollection {
+ pub patches: Vec<PatchDictSchema>,
+ pub workdir: PathBuf,
+}
+
+impl PatchCollection {
+ /// Create a `PatchCollection` from a PATCHES.
+ pub fn parse_from_file(json_file: &Path) -> Result<Self> {
+ Ok(Self {
+ patches: serde_json::from_reader(File::open(json_file)?)?,
+ workdir: json_file
+ .parent()
+ .ok_or_else(|| anyhow!("failed to get json_file parent"))?
+ .to_path_buf(),
+ })
+ }
+
+ /// Create a `PatchCollection` from a string literal and a workdir.
+ pub fn parse_from_str(workdir: PathBuf, contents: &str) -> Result<Self> {
+ Ok(Self {
+ patches: serde_json::from_str(contents).context("parsing from str")?,
+ workdir,
+ })
+ }
+
+ #[allow(dead_code)]
+ /// Return true if the collection is tracking any patches.
+ pub fn is_empty(&self) -> bool {
+ self.patches.is_empty()
+ }
+
+ /// Compute the set-set subtraction, returning a new `PatchCollection` which
+ /// keeps the minuend's wordir.
+ pub fn subtract(&self, subtrahend: &Self) -> Result<Self> {
+ let mut new_patches = Vec::new();
+ // This is O(n^2) when it could be much faster, but n is always going to be less
+ // than 1k and speed is not important here.
+ for our_patch in &self.patches {
+ let found_in_sub = subtrahend.patches.iter().any(|sub_patch| {
+ let hash1 = subtrahend
+ .hash_from_rel_patch(sub_patch)
+ .expect("getting hash from subtrahend patch");
+ let hash2 = self
+ .hash_from_rel_patch(our_patch)
+ .expect("getting hash from our patch");
+ hash1 == hash2
+ });
+ if !found_in_sub {
+ new_patches.push(our_patch.clone());
+ }
+ }
+ Ok(Self {
+ patches: new_patches,
+ workdir: self.workdir.clone(),
+ })
+ }
+
+ pub fn union(&self, other: &Self) -> Result<Self> {
+ self.union_helper(
+ other,
+ |p| self.hash_from_rel_patch(p),
+ |p| other.hash_from_rel_patch(p),
+ )
+ }
+
+ fn union_helper(
+ &self,
+ other: &Self,
+ our_hash_f: impl Fn(&PatchDictSchema) -> Result<String>,
+ their_hash_f: impl Fn(&PatchDictSchema) -> Result<String>,
+ ) -> Result<Self> {
+ // 1. For all our patches:
+ // a. If there exists a matching patch hash from `other`:
+ // i. Create a new patch with merged platform info,
+ // ii. add the new patch to our new collection.
+ // iii. Mark the other patch as "merged"
+ // b. Otherwise, copy our patch to the new collection
+ // 2. For all unmerged patches from the `other`
+ // a. Copy their patch into the new collection
+ let mut combined_patches = Vec::new();
+ let mut other_merged = vec![false; other.patches.len()];
+
+ // 1.
+ for p in &self.patches {
+ let our_hash = our_hash_f(p)?;
+ let mut found = false;
+ // a.
+ for (idx, merged) in other_merged.iter_mut().enumerate() {
+ if !*merged {
+ let other_p = &other.patches[idx];
+ let their_hash = their_hash_f(other_p)?;
+ if our_hash == their_hash {
+ // i.
+ let new_platforms =
+ p.platforms.union(&other_p.platforms).cloned().collect();
+ // ii.
+ combined_patches.push(PatchDictSchema {
+ rel_patch_path: p.rel_patch_path.clone(),
+ start_version: p.start_version,
+ end_version: p.end_version,
+ platforms: new_platforms,
+ metadata: p.metadata.clone(),
+ });
+ // iii.
+ *merged = true;
+ found = true;
+ break;
+ }
+ }
+ }
+ // b.
+ if !found {
+ combined_patches.push(p.clone());
+ }
+ }
+ // 2.
+ // Add any remaining, other-only patches.
+ for (idx, merged) in other_merged.iter().enumerate() {
+ if !*merged {
+ combined_patches.push(other.patches[idx].clone());
+ }
+ }
+
+ Ok(Self {
+ workdir: self.workdir.clone(),
+ patches: combined_patches,
+ })
+ }
+
+ /// Copy all patches from this collection into another existing collection, and write that
+ /// to the existing collection's file.
+ pub fn transpose_write(&self, existing_collection: &mut Self) -> Result<()> {
+ for p in &self.patches {
+ let original_file_path = self.workdir.join(&p.rel_patch_path);
+ let copy_file_path = existing_collection.workdir.join(&p.rel_patch_path);
+ copy_create_parents(&original_file_path, &copy_file_path)?;
+ existing_collection.patches.push(p.clone());
+ }
+ existing_collection.write_patches_json("PATCHES.json")
+ }
+
+ /// Write out the patch collection contents to a PATCHES.json file.
+ fn write_patches_json(&self, filename: &str) -> Result<()> {
+ let write_path = self.workdir.join(filename);
+ let mut new_patches_file = File::create(&write_path)
+ .with_context(|| format!("writing to {}", write_path.display()))?;
+ new_patches_file.write_all(self.serialize_patches()?.as_bytes())?;
+ Ok(())
+ }
+
+ pub fn serialize_patches(&self) -> Result<String> {
+ let mut serialization_buffer = Vec::<u8>::new();
+ // Four spaces to indent json serialization.
+ let mut serializer = serde_json::Serializer::with_formatter(
+ &mut serialization_buffer,
+ serde_json::ser::PrettyFormatter::with_indent(b" "),
+ );
+ self.patches
+ .serialize(&mut serializer)
+ .context("serializing patches to JSON")?;
+ // Append a newline at the end if not present. This is necessary to get
+ // past some pre-upload hooks.
+ if serialization_buffer.last() != Some(&b'\n') {
+ serialization_buffer.push(b'\n');
+ }
+ Ok(std::str::from_utf8(&serialization_buffer)?.to_string())
+ }
+
+ fn hash_from_rel_patch(&self, patch: &PatchDictSchema) -> Result<String> {
+ hash_from_patch_path(&self.workdir.join(&patch.rel_patch_path))
+ }
+}
+
+/// Get the hash from the patch file contents.
+///
+/// Not every patch file actually contains its own hash,
+/// we must compute the hash ourselves when it's not found.
+fn hash_from_patch(patch_contents: impl Read) -> Result<String> {
+ let mut reader = BufReader::new(patch_contents);
+ let mut buf = String::new();
+ reader.read_line(&mut buf)?;
+ let mut first_line_iter = buf.trim().split(' ').fuse();
+ let (fst_word, snd_word) = (first_line_iter.next(), first_line_iter.next());
+ if let (Some("commit" | "From"), Some(hash_str)) = (fst_word, snd_word) {
+ // If the first line starts with either "commit" or "From", the following
+ // text is almost certainly a commit hash.
+ Ok(hash_str.to_string())
+ } else {
+ // This is an annoying case where the patch isn't actually a commit.
+ // So we'll hash the entire file, and hope that's sufficient.
+ let mut hasher = Sha256::new();
+ hasher.update(&buf); // Have to hash the first line.
+ reader.read_to_string(&mut buf)?;
+ hasher.update(buf); // Hash the rest of the file.
+ let sha = hasher.finalize();
+ Ok(format!("{:x}", &sha))
+ }
+}
+
+fn hash_from_patch_path(patch: &Path) -> Result<String> {
+ let f = File::open(patch)?;
+ hash_from_patch(f)
+}
+
+/// Copy a file from one path to another, and create any parent
+/// directories along the way.
+fn copy_create_parents(from: &Path, to: &Path) -> Result<()> {
+ let to_parent = to
+ .parent()
+ .with_context(|| format!("getting parent of {}", to.display()))?;
+ if !to_parent.exists() {
+ std::fs::create_dir_all(to_parent)?;
+ }
+
+ copy(&from, &to)
+ .with_context(|| format!("copying file from {} to {}", &from.display(), &to.display()))?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ /// Test we can extract the hash from patch files.
+ #[test]
+ fn test_hash_from_patch() {
+ // Example git patch from Gerrit
+ let desired_hash = "004be4037e1e9c6092323c5c9268acb3ecf9176c";
+ let test_file_contents = "commit 004be4037e1e9c6092323c5c9268acb3ecf9176c\n\
+ Author: An Author <some_email>\n\
+ Date: Thu Aug 6 12:34:16 2020 -0700";
+ assert_eq!(
+ &hash_from_patch(test_file_contents.as_bytes()).unwrap(),
+ desired_hash
+ );
+
+ // Example git patch from upstream
+ let desired_hash = "6f85225ef3791357f9b1aa097b575b0a2b0dff48";
+ let test_file_contents = "From 6f85225ef3791357f9b1aa097b575b0a2b0dff48\n\
+ Mon Sep 17 00:00:00 2001\n\
+ From: Another Author <another_email>\n\
+ Date: Wed, 18 Aug 2021 15:03:03 -0700";
+ assert_eq!(
+ &hash_from_patch(test_file_contents.as_bytes()).unwrap(),
+ desired_hash
+ );
+ }
+
+ #[test]
+ fn test_union() {
+ let patch1 = PatchDictSchema {
+ start_version: Some(0),
+ end_version: Some(1),
+ rel_patch_path: "a".into(),
+ metadata: None,
+ platforms: BTreeSet::from(["x".into()]),
+ };
+ let patch2 = PatchDictSchema {
+ rel_patch_path: "b".into(),
+ platforms: BTreeSet::from(["x".into(), "y".into()]),
+ ..patch1.clone()
+ };
+ let patch3 = PatchDictSchema {
+ platforms: BTreeSet::from(["z".into(), "x".into()]),
+ ..patch1.clone()
+ };
+ let collection1 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1, patch2],
+ };
+ let collection2 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch3],
+ };
+ let union = collection1
+ .union_helper(
+ &collection2,
+ |p| Ok(p.rel_patch_path.to_string()),
+ |p| Ok(p.rel_patch_path.to_string()),
+ )
+ .expect("could not create union");
+ assert_eq!(union.patches.len(), 2);
+ assert_eq!(
+ union.patches[0].platforms.iter().collect::<Vec<&String>>(),
+ vec!["x", "z"]
+ );
+ assert_eq!(
+ union.patches[1].platforms.iter().collect::<Vec<&String>>(),
+ vec!["x", "y"]
+ );
+ }
+}
diff --git a/llvm_tools/patch_sync/src/version_control.rs b/llvm_tools/patch_sync/src/version_control.rs
new file mode 100644
index 00000000..3dc5aae9
--- /dev/null
+++ b/llvm_tools/patch_sync/src/version_control.rs
@@ -0,0 +1,238 @@
+use anyhow::{anyhow, bail, ensure, Context, Result};
+use regex::Regex;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+
+const CHROMIUMOS_OVERLAY_REL_PATH: &str = "src/third_party/chromiumos-overlay";
+const ANDROID_LLVM_REL_PATH: &str = "toolchain/llvm_android";
+
+/// Context struct to keep track of both Chromium OS and Android checkouts.
+#[derive(Debug)]
+pub struct RepoSetupContext {
+ pub cros_checkout: PathBuf,
+ pub android_checkout: PathBuf,
+ /// Run `repo sync` before doing any comparisons.
+ pub sync_before: bool,
+}
+
+impl RepoSetupContext {
+ pub fn setup(&self) -> Result<()> {
+ if self.sync_before {
+ repo_cd_cmd(&self.cros_checkout, &["sync", CHROMIUMOS_OVERLAY_REL_PATH])?;
+ repo_cd_cmd(&self.android_checkout, &["sync", ANDROID_LLVM_REL_PATH])?;
+ }
+ Ok(())
+ }
+
+ pub fn cros_repo_upload(&self) -> Result<()> {
+ let llvm_dir = self
+ .cros_checkout
+ .join(&CHROMIUMOS_OVERLAY_REL_PATH)
+ .join("sys-devel/llvm");
+ ensure!(
+ llvm_dir.is_dir(),
+ "CrOS LLVM dir {} is not a directory",
+ llvm_dir.display()
+ );
+ Self::rev_bump_llvm(&llvm_dir)?;
+ Self::repo_upload(
+ &self.cros_checkout,
+ CHROMIUMOS_OVERLAY_REL_PATH,
+ &Self::build_commit_msg("android", "chromiumos", "BUG=None\nTEST=CQ"),
+ )
+ }
+
+ pub fn android_repo_upload(&self) -> Result<()> {
+ Self::repo_upload(
+ &self.android_checkout,
+ ANDROID_LLVM_REL_PATH,
+ &Self::build_commit_msg("chromiumos", "android", "Test: N/A"),
+ )
+ }
+
+ fn repo_upload(path: &Path, git_wd: &str, commit_msg: &str) -> Result<()> {
+ // TODO(ajordanr): Need to clean up if there's any failures during upload.
+ let git_path = &path.join(&git_wd);
+ ensure!(
+ git_path.is_dir(),
+ "git_path {} is not a directory",
+ git_path.display()
+ );
+ repo_cd_cmd(path, &["start", "patch_sync_branch", git_wd])?;
+ git_cd_cmd(git_path, &["add", "."])?;
+ git_cd_cmd(git_path, &["commit", "-m", commit_msg])?;
+ repo_cd_cmd(path, &["upload", "-y", "--verify", git_wd])?;
+ Ok(())
+ }
+
+ pub fn android_patches_path(&self) -> PathBuf {
+ self.android_checkout
+ .join(&ANDROID_LLVM_REL_PATH)
+ .join("patches/PATCHES.json")
+ }
+
+ pub fn cros_patches_path(&self) -> PathBuf {
+ self.cros_checkout
+ .join(&CHROMIUMOS_OVERLAY_REL_PATH)
+ .join("sys-devel/llvm/files/PATCHES.json")
+ }
+
+ /// Increment LLVM's revision number
+ fn rev_bump_llvm(llvm_dir: &Path) -> Result<PathBuf> {
+ let ebuild = find_ebuild(llvm_dir)
+ .with_context(|| format!("finding ebuild in {} to rev bump", llvm_dir.display()))?;
+ let ebuild_dir = ebuild.parent().unwrap();
+ let suffix_matcher = Regex::new(r"-r([0-9]+)\.ebuild").unwrap();
+ let ebuild_name = ebuild
+ .file_name()
+ .unwrap()
+ .to_str()
+ .ok_or_else(|| anyhow!("converting ebuild filename to utf-8"))?;
+ let new_path = if let Some(captures) = suffix_matcher.captures(ebuild_name) {
+ let full_suffix = captures.get(0).unwrap().as_str();
+ let cur_version = captures.get(1).unwrap().as_str().parse::<u32>().unwrap();
+ let new_filename =
+ ebuild_name.replace(full_suffix, &format!("-r{}.ebuild", cur_version + 1_u32));
+ let new_path = ebuild_dir.join(new_filename);
+ fs::rename(&ebuild, &new_path)?;
+ new_path
+ } else {
+ // File did not end in a revision. We should append -r1 to the end.
+ let new_filename = ebuild.file_stem().unwrap().to_string_lossy() + "-r1.ebuild";
+ let new_path = ebuild_dir.join(new_filename.as_ref());
+ fs::rename(&ebuild, &new_path)?;
+ new_path
+ };
+ Ok(new_path)
+ }
+
+ /// Return the contents of the old PATCHES.json from Chromium OS
+ #[allow(dead_code)]
+ pub fn old_cros_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH),
+ Path::new("sys-devel/llvm/files/PATCHES.json"),
+ )
+ }
+
+ /// Return the contents of the old PATCHES.json from android
+ #[allow(dead_code)]
+ pub fn old_android_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.android_checkout.join(ANDROID_LLVM_REL_PATH),
+ Path::new("patches/PATCHES.json"),
+ )
+ }
+
+ /// Return the contents of an old file in git
+ #[allow(dead_code)]
+ fn old_file_contents(hash: &str, pwd: &Path, file: &Path) -> Result<String> {
+ let git_ref = format!(
+ "{}:{}",
+ hash,
+ file.to_str()
+ .ok_or_else(|| anyhow!("failed to convert filepath to str"))?
+ );
+ let output = git_cd_cmd(pwd, &["show", &git_ref])?;
+ if !output.status.success() {
+ bail!("could not get old file contents for {}", &git_ref)
+ }
+ String::from_utf8(output.stdout)
+ .with_context(|| format!("converting {} file contents to UTF-8", &git_ref))
+ }
+
+ /// Create the commit message
+ fn build_commit_msg(from: &str, to: &str, footer: &str) -> String {
+ format!(
+ "[patch_sync] Synchronize patches from {}\n\n\
+ Copies new PATCHES.json changes from {} to {}\n\n{}",
+ from, from, to, footer
+ )
+ }
+}
+
+/// Return the path of an ebuild located within the given directory.
+fn find_ebuild(dir: &Path) -> Result<PathBuf> {
+ // TODO(ajordanr): Maybe use OnceCell for this regex?
+ let ebuild_matcher = Regex::new(r"(-r[0-9]+)?\.ebuild").unwrap();
+ for entry in fs::read_dir(dir)? {
+ let path = entry?.path();
+ if let Some(name) = path.file_name() {
+ if ebuild_matcher.is_match(
+ name.to_str()
+ .ok_or_else(|| anyhow!("converting filepath to UTF-8"))?,
+ ) {
+ return Ok(path);
+ }
+ }
+ }
+ bail!("could not find ebuild")
+}
+
+/// Run a given git command from inside a specified git dir.
+pub fn git_cd_cmd<I, S>(pwd: &Path, args: I) -> Result<Output>
+where
+ I: IntoIterator<Item = S>,
+ S: AsRef<OsStr>,
+{
+ let output = Command::new("git").current_dir(&pwd).args(args).output()?;
+ if !output.status.success() {
+ bail!("git command failed")
+ }
+ Ok(output)
+}
+
+pub fn repo_cd_cmd<I, S>(pwd: &Path, args: I) -> Result<()>
+where
+ I: IntoIterator<Item = S>,
+ S: AsRef<OsStr>,
+{
+ let status = Command::new("repo").current_dir(&pwd).args(args).status()?;
+ if !status.success() {
+ bail!("repo command failed")
+ }
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use rand::prelude::Rng;
+ use std::env;
+ use std::fs::File;
+
+ #[test]
+ fn test_revbump_ebuild() {
+ // Random number to append at the end of the test folder to prevent conflicts.
+ let rng: u32 = rand::thread_rng().gen();
+ let llvm_dir = env::temp_dir().join(format!("patch_sync_test_{}", rng));
+ fs::create_dir(&llvm_dir).expect("creating llvm dir in temp directory");
+
+ {
+ // With revision
+ let ebuild_name = "llvm-13.0_pre433403_p20211019-r10.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r11.ebuild"));
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
+ }
+ {
+ // Without revision
+ let ebuild_name = "llvm-13.0_pre433403_p20211019.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r1.ebuild"));
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
+ }
+
+ fs::remove_dir(&llvm_dir).expect("removing temp test dir");
+ }
+}