summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYifan Hong <elsk@google.com>2024-02-28 12:02:17 -0800
committerYifan Hong <elsk@google.com>2024-03-06 01:13:22 +0000
commit85e8d4762cb46442c4ba145c022f9cb0c96b02ce (patch)
treeadb641ab787f0a1227488d79506b20b2f6c7aa12
parentdd37d255392cf7a7fd1bb6aaa94c855315614267 (diff)
downloadbuild-85e8d4762cb46442c4ba145c022f9cb0c96b02ce.tar.gz
kleaf: Update kernel_prebuilt_repo to support local prebuilts.
This is to prepare for DDKv2 developement, where GKI prebuilts are downloaded via extensions. In addition, DDKv2 supports building against local prebuilts, so support that. This is a complete re-write of kernel_prebuilt_repo while supporting the original API for legacy WORKSPACE usage. Caveat: This change disables parallel download. However: - The list of release blockers for 7.1.0 indicates that it may take some time for the official release - There are no known users for --use_prebuilt_gki on android15 branches - Even if there are, the downsides are small; it makes a one-time download take longer. (However, downloads are subject to bandwidth constraints in the first place). Test: TH Test: bazel build @gki_prebuilts//... --config=internet --use_prebuilt_gki=11508345 Bug: 291918087 Change-Id: Ib2672e36f79709a2a0e0f035c0530aba52d736c0
-rw-r--r--kleaf/impl/kernel_prebuilt_repo.bzl408
-rw-r--r--kleaf/workspace.bzl3
2 files changed, 225 insertions, 186 deletions
diff --git a/kleaf/impl/kernel_prebuilt_repo.bzl b/kleaf/impl/kernel_prebuilt_repo.bzl
index ec81c52..21f21bb 100644
--- a/kleaf/impl/kernel_prebuilt_repo.bzl
+++ b/kleaf/impl/kernel_prebuilt_repo.bzl
@@ -23,18 +23,23 @@ load(
visibility("//build/kernel/kleaf/...")
_BUILD_NUM_ENV_VAR = "KLEAF_DOWNLOAD_BUILD_NUMBER_MAP"
+ARTIFACT_URL_FMT = "https://androidbuildinternal.googleapis.com/android/internal/build/v3/builds/{build_number}/{target}/attempts/latest/artifacts/{filename}/url?redirect=true"
-def _sanitize_repo_name(x):
- """Sanitize x so it can be used as a repository name.
+def _bool_to_str(b):
+ """Turns boolean to string."""
- Replacing invalid characters (those not in `[A-Za-z0-9-_.]`) with `_`.
- """
- ret = ""
- for c in x.elems():
- if not c.isalnum() and not c in "-_.":
- c = "_"
- ret += c
- return ret
+ # We can't use str() because bool(str(False)) != False
+ return "True" if b else ""
+
+def _str_to_bool(s):
+ """Turns string to boolean."""
+
+ # We can't use bool() because bool(str(False)) != False
+ if s == "True":
+ return True
+ if not s:
+ return False
+ fail("Invalid value {}".format(s))
def _parse_env(repository_ctx, var_name, expected_key):
"""
@@ -68,131 +73,233 @@ def _parse_env(repository_ctx, var_name, expected_key):
return value
return None
-_ARTIFACT_URL_FMT = "https://androidbuildinternal.googleapis.com/android/internal/build/v3/builds/{build_number}/{target}/attempts/latest/artifacts/{filename}/url?redirect=true"
-
-def _download_artifact_repo_impl(repository_ctx):
- workspace_file = """workspace(name = "{}")
-""".format(repository_ctx.name)
- repository_ctx.file("WORKSPACE.bazel", workspace_file, executable = False)
-
- build_number = _get_build_number(repository_ctx)
- if not build_number:
- _handle_no_build_number(repository_ctx)
- else:
- _download_from_build_number(repository_ctx, build_number)
-
def _get_build_number(repository_ctx):
"""Gets the value of build number, setting defaults if necessary."""
- build_number = _parse_env(repository_ctx, _BUILD_NUM_ENV_VAR, repository_ctx.attr.parent_repo)
+ build_number = _parse_env(repository_ctx, _BUILD_NUM_ENV_VAR, repository_ctx.attr.apparent_name)
if not build_number:
build_number = repository_ctx.attr.build_number
return build_number
-def _handle_no_build_number(repository_ctx):
- """Handles the case where the build number cannot be found."""
-
- SAMPLE_BUILD_NUMBER = "8077484"
- if repository_ctx.attr.parent_repo == "gki_prebuilts":
- msg = """
-ERROR: {parent_repo}: No build_number specified. Fix by specifying `--use_prebuilt_gki=<build_number>"`, e.g.
- bazel build --use_prebuilt_gki={build_number} @{parent_repo}//{filename}
-""".format(
- filename = repository_ctx.attr.filename,
- parent_repo = repository_ctx.attr.parent_repo,
- build_number = SAMPLE_BUILD_NUMBER,
- )
+def _infer_download_config(target):
+ """Returns inferred `download_config` and `mandatory` from target."""
+ chosen_mapping = None
+ for mapping in CI_TARGET_MAPPING.values():
+ if mapping["target"] == target:
+ chosen_mapping = mapping
+ if not chosen_mapping:
+ fail("auto_download_config with {} is not supported yet.".format(target))
- else:
- msg = """
-ERROR: {parent_repo}: No build_number specified.
-
-Fix by one of the following:
-- Specify `build_number` attribute in {parent_repo}
-- Specify `--action_env={build_num_var}="{parent_repo}=<build_number>"`, e.g.
- bazel build \\
- --action_env={build_num_var}="{parent_repo}={build_number}" \\
- @{parent_repo}//{filename}
-""".format(
- filename = repository_ctx.attr.filename,
- parent_repo = repository_ctx.attr.parent_repo,
- build_number = SAMPLE_BUILD_NUMBER,
- build_num_var = _BUILD_NUM_ENV_VAR,
- )
- build_file = """
-load("{fail_bzl}", "fail_rule")
+ download_config = {}
+ mandatory = {}
-fail_rule(
- name = "file",
- message = \"\"\"{msg}\"\"\"
-)
-""".format(
- fail_bzl = Label(":fail.bzl"),
- msg = msg,
- )
+ for out in chosen_mapping["outs"]:
+ download_config[out] = out
+ mandatory[out] = True
- repository_ctx.file("file/BUILD.bazel", build_file, executable = False)
+ protected_modules = chosen_mapping["protected_modules"]
+ download_config[protected_modules] = protected_modules
+ mandatory[protected_modules] = False
-def _download_from_build_number(repository_ctx, build_number):
- local_filename = repository_ctx.attr.local_filename
- remote_filename = repository_ctx.attr.remote_filename_fmt.format(
+ for config in GKI_DOWNLOAD_CONFIGS:
+ config_mandatory = config.get("mandatory", True)
+ for out in config.get("outs", []):
+ download_config[out] = out
+ mandatory[out] = config_mandatory
+ for out, remote_filename_fmt in config.get("outs_mapping", {}).items():
+ download_config[out] = remote_filename_fmt
+ mandatory[out] = config_mandatory
+
+ mandatory = {key: _bool_to_str(value) for key, value in mandatory.items()}
+
+ return download_config, mandatory
+
+_true_future = struct(wait = lambda: struct(success = True))
+_false_future = struct(wait = lambda: struct(success = False))
+
+def _symlink_local_file(repository_ctx, local_path, remote_filename, file_mandatory):
+ """Creates symlink in local_path that points to remote_filename.
+
+ Returns:
+ a future object, with `wait()` function that returns a struct containing:
+
+ - Either a boolean, `success`, indicating whether the file exists or not.
+ If the file does not exist and `file_mandatory == True`,
+ either this function or `wait()` throws build error.
+ - Or a string, `fail_later`, an error message for an error that should
+ be postponed to the analysis phase when the target is requested.
+ """
+ artifact_path = repository_ctx.workspace_root.get_child(repository_ctx.attr.local_artifact_path).get_child(remote_filename)
+ if artifact_path.exists:
+ repository_ctx.symlink(artifact_path, local_path)
+ return _true_future
+ if file_mandatory:
+ fail("{}: {} does not exist".format(repository_ctx.attr.name, artifact_path))
+ return _false_future
+
+def _download_remote_file(repository_ctx, local_path, remote_filename, file_mandatory):
+ """Download `remote_filename` to `local_path`.
+
+ Returns:
+ a future object, with `wait()` function that returns a struct containing:
+
+ - Either a boolean, `success`, indicating whether the file is downloaded
+ successfully.
+ If the file fails to download and `file_mandatory == True`,
+ either this function or `wait()` throws build error.
+ - Or a string, `fail_later`, an error message for an error that should
+ be postponed to the analysis phase when the target is requested.
+ """
+ build_number = _get_build_number(repository_ctx)
+ artifact_url = repository_ctx.attr.artifact_url_fmt.format(
build_number = build_number,
+ target = repository_ctx.attr.target,
+ filename = remote_filename,
)
- # If there's a "/" in the remote filename, escape
- remote_filename = remote_filename.replace("/", "%2F")
-
- # Download the requested file
- urls = [repository_ctx.attr.artifact_url_fmt.format(
- build_number = build_number,
+ url_with_fake_build_number = repository_ctx.attr.artifact_url_fmt.format(
+ build_number = "__FAKE_BUILD_NUMBER_PLACEHOLDER__",
target = repository_ctx.attr.target,
filename = remote_filename,
- )]
- download_path = repository_ctx.path("file/{}".format(local_filename))
- download_info = repository_ctx.download(
- url = urls,
- output = download_path,
- allow_fail = repository_ctx.attr.allow_fail,
)
+ if not build_number and artifact_url != url_with_fake_build_number:
+ return struct(wait = lambda: struct(
+ fail_later = repr("ERROR: No build_number specified for @@{}".format(repository_ctx.attr.name)),
+ ))
+
+ # TODO(b/325494748): With bazel 7.1.0, use parallel download
+ download_status = repository_ctx.download(
+ url = artifact_url,
+ output = local_path,
+ allow_fail = not file_mandatory,
+ # block = False,
+ )
+ return _true_future if download_status.success else _false_future
+
+def _kernel_prebuilt_repo_impl(repository_ctx):
+ download_config = repository_ctx.attr.download_config
+ mandatory = repository_ctx.attr.mandatory
+ if repository_ctx.attr.auto_download_config:
+ if download_config:
+ fail("{}: download_config should not be set when auto_download_config is True".format(
+ repository_ctx.attr.name,
+ ))
+ if mandatory:
+ fail("{}: mandatory should not be set when auto_download_config is True".format(
+ repository_ctx.attr.name,
+ ))
+ download_config, mandatory = _infer_download_config(repository_ctx.attr.target)
+
+ futures = {}
+ for local_filename, remote_filename_fmt in download_config.items():
+ local_path = repository_ctx.path(_join(local_filename, _basename(local_filename)))
+ remote_filename = remote_filename_fmt.format(
+ build_number = repository_ctx.attr.build_number,
+ target = repository_ctx.attr.target,
+ )
+ file_mandatory = _str_to_bool(mandatory.get(local_filename, _bool_to_str(True)))
+
+ if repository_ctx.attr.local_artifact_path:
+ download = _symlink_local_file
+ else:
+ download = _download_remote_file
+
+ futures[local_filename] = download(
+ repository_ctx = repository_ctx,
+ local_path = local_path,
+ remote_filename = remote_filename,
+ file_mandatory = file_mandatory,
+ )
+
+ download_statuses = {}
+ for local_filename, future in futures.items():
+ download_statuses[local_filename] = future.wait()
- # Define the filegroup to contain the file.
- # If failing and it is allowed, set filegroup to empty
- if not download_info.success and repository_ctx.attr.allow_fail:
- srcs = ""
- else:
- srcs = '"{}"'.format(local_filename)
-
- build_file = """filegroup(
- name="file",
- srcs=[{srcs}],
- visibility=["@{parent_repo}//{local_filename}:__pkg__"],
+ for local_filename, download_status in download_statuses.items():
+ msg_repr = getattr(download_status, "fail_later", None)
+ if msg_repr:
+ fmt = """\
+load("{fail_bzl}", "fail_rule")
+
+fail_rule(
+ name = {local_filename_repr},
+ message = {msg_repr},
)
-""".format(
- srcs = srcs,
- local_filename = local_filename,
- parent_repo = repository_ctx.attr.parent_repo,
- )
- repository_ctx.file("file/BUILD.bazel", build_file, executable = False)
+"""
+ elif download_status.success:
+ fmt = """\
+exports_files(
+ [{local_filename_repr}],
+ visibility = ["//visibility:public"],
+)
+"""
+ else:
+ fmt = """\
+filegroup(
+ name = {local_filename_repr},
+ srcs = [],
+ visibility = ["//visibility:public"],
+)
+"""
+ content = fmt.format(
+ local_filename_repr = repr(_basename(local_filename)),
+ fail_bzl = Label("//build/kernel/kleaf:fail.bzl"),
+ msg_repr = msg_repr,
+ )
+ repository_ctx.file(_join(local_filename, "BUILD.bazel"), content)
+
+ repository_ctx.file("""WORKSPACE.bazel""", """\
+workspace({})
+""".format(repr(repository_ctx.attr.name)))
-_download_artifact_repo = repository_rule(
- implementation = _download_artifact_repo_impl,
+kernel_prebuilt_repo = repository_rule(
+ implementation = _kernel_prebuilt_repo_impl,
attrs = {
+ "local_artifact_path": attr.string(
+ doc = """Directory to local artifacts.
+
+ If set, `artifact_url_fmt` is ignored.
+
+ Only the root module may call `declare()` with this attribute set.
+
+ If relative, it is interpreted against workspace root.
+
+ If absolute, this is similar to setting `artifact_url_fmt` to
+ `file://<absolute local_artifact_path>/{filename}`, but avoids
+ using `download()`. Files are symlinked not copied, and
+ `--config=internet` is not necessary.
+ """,
+ ),
"build_number": attr.string(
doc = "the default build number to use if the environment variable is not set.",
),
- "parent_repo": attr.string(doc = "Name of the parent `download_artifacts_repo`"),
- "local_filename": attr.string(
- doc = "Filename and target name used locally to refer to the file.",
+ "apparent_name": attr.string(doc = "apparant repo name", mandatory = True),
+ "auto_download_config": attr.bool(
+ doc = """If `True`, infer `download_config` and `mandatory`
+ from `target`.""",
),
- "remote_filename_fmt": attr.string(
- doc = """Format string of the filename on the download location..
+ "download_config": attr.string_dict(
+ doc = """Configure the list of files to download.
- The filename is determined by `remote_filename_fmt.format(...)`, with the following keys:
+ Key: local file name.
- - `build_number`: the environment variable or the `build_number` attribute
+ Value: remote file name format string, with the following anchors:
+ * {build_number}
+ * {target}
""",
),
"target": attr.string(doc = "Name of target on the download location, e.g. `kernel_aarch64`"),
- "allow_fail": attr.bool(),
+ "mandatory": attr.string_dict(
+ doc = """Configure whether files are mandatory.
+
+ Key: local file name.
+
+ Value: Whether the file is mandatory.
+
+ If a file name is not found in the dictionary, default
+ value is `True`. If mandatory, failure to download the
+ file results in a build failure.
+ """,
+ ),
"artifact_url_fmt": attr.string(
doc = """API endpoint for Android CI artifacts.
@@ -203,7 +310,7 @@ _download_artifact_repo = repository_rule(
Its default value is the API endpoint for http://ci.android.com.
""",
- default = _ARTIFACT_URL_FMT,
+ default = ARTIFACT_URL_FMT,
),
},
environ = [
@@ -212,87 +319,16 @@ _download_artifact_repo = repository_rule(
)
# Avoid dependency to paths, since we do not necessary have skylib loaded yet.
+# TODO(b/276493276): Use paths once we migrate to bzlmod completely.
def _basename(s):
return s.split("/")[-1]
-def _alias_repo_impl(repository_ctx):
- workspace_file = """workspace(name = "{}")
-""".format(repository_ctx.name)
- repository_ctx.file("WORKSPACE.bazel", workspace_file, executable = False)
-
- for local_filename, actual in repository_ctx.attr.aliases.items():
- build_file = """\
-alias(
- name="{local_file_basename}",
- actual="{actual}",
- visibility=["//visibility:public"]
-)
-""".format(local_file_basename = _basename(local_filename), actual = actual)
- repository_ctx.file("{}/BUILD.bazel".format(local_filename), build_file, executable = False)
+def _join(path, *others):
+ ret = path
-_alias_repo = repository_rule(
- implementation = _alias_repo_impl,
- attrs = {
- "aliases": attr.string_dict(doc = """
- - Keys: local filename.
- - Value: label to the actual target.
- """),
- },
- environ = [
- _BUILD_NUM_ENV_VAR,
- ],
-)
-
-def kernel_prebuilt_repo(
- name,
- artifact_url_fmt,
- build_number = None):
- """Define a repository that downloads kernel prebuilts.
-
- Args:
- name: name of repository
- artifact_url_fmt: see [`define_kleaf_workspace.artifact_url_fmt`](#define_kleaf_workspace-artifact_url_fmt)
- build_number: build number on [ci.android.com](http://ci.android.com)
- """
- mapping = CI_TARGET_MAPPING[name]
- target = mapping["target"]
-
- files = {out: {} for out in mapping["outs"]}
- optional_files = {mapping["protected_modules"]: {}}
- for config in GKI_DOWNLOAD_CONFIGS:
- if config.get("mandatory", True):
- files_dict = files
- else:
- files_dict = optional_files
+ for other in others:
+ if not ret.endswith("/"):
+ ret += "/"
+ ret += other
- files_dict.update({out: {} for out in config.get("outs", [])})
-
- for out, remote_filename_fmt in config.get("outs_mapping", {}).items():
- file_metadata = {"remote_filename_fmt": remote_filename_fmt}
- files_dict.update({out: file_metadata})
-
- for files_dict, allow_fail in ((files, False), (optional_files, True)):
- for local_filename, file_metadata in files_dict.items():
- # Need a repo for each file because repository_ctx.download is blocking. Defining multiple
- # repos allows downloading in parallel.
- # e.g. @gki_prebuilts_vmlinux
- _download_artifact_repo(
- name = name + "_" + _sanitize_repo_name(local_filename),
- parent_repo = name,
- local_filename = local_filename,
- build_number = build_number,
- target = target,
- remote_filename_fmt = file_metadata.get("remote_filename_fmt", local_filename),
- allow_fail = allow_fail,
- artifact_url_fmt = artifact_url_fmt,
- )
-
- # Define a repo named @gki_prebuilts that contains aliases to individual files, e.g.
- # @gki_prebuilts//vmlinux
- _alias_repo(
- name = name,
- aliases = {
- local_filename: "@" + name + "_" + _sanitize_repo_name(local_filename) + "//file"
- for local_filename in (list(files.keys()) + list(optional_files.keys()))
- },
- )
+ return ret
diff --git a/kleaf/workspace.bzl b/kleaf/workspace.bzl
index 00e50a4..f215cf0 100644
--- a/kleaf/workspace.bzl
+++ b/kleaf/workspace.bzl
@@ -168,7 +168,10 @@ WARNING: define_kleaf_workspace() should be called with common_kernel_package={}
for repo_name in CI_TARGET_MAPPING:
kernel_prebuilt_repo(
name = repo_name,
+ apparent_name = repo_name,
artifact_url_fmt = artifact_url_fmt,
+ auto_download_config = True,
+ target = CI_TARGET_MAPPING[repo_name]["target"],
)
maybe(