aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-05-11 05:22:39 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-05-11 05:22:39 +0000
commit6df3aebcdf09d861aa0df1d66908c097dbd95d7c (patch)
treefb0c6ce129ee56d3cf68dcc9c812c9959822597d
parentdc4c38f9f2586ec3dc0ccd611caa589b6452603e (diff)
parentcbb2e714a2890890991d72b3d8ce86f210c34e5c (diff)
downloadtreble-android13-mainline-networking-release.tar.gz
Change-Id: Icc6a053b740b1dd00e40e0eb6861629e8b013d8a
-rw-r--r--build/sandbox/build_android_sandboxed.py57
-rwxr-xr-xbuild/sandbox/build_android_target.sh12
-rw-r--r--build/sandbox/config.py64
-rw-r--r--build/sandbox/config_test.py64
-rw-r--r--build/sandbox/nsjail.py12
-rw-r--r--build/sandbox/nsjail_test.py2
-rw-r--r--build/sandbox/rbe.py4
-rw-r--r--build/sandbox/sample_config.xml14
-rw-r--r--fetcher/fetcher_lib.py3
-rw-r--r--gki/Android.bp50
-rw-r--r--gki/repack_gki.py141
-rw-r--r--gki/repack_gki_lib.py164
-rw-r--r--split/Android.bp3
-rw-r--r--split/manifest_split.py25
-rw-r--r--split/manifest_split_test.py25
-rwxr-xr-xvf/merge.sh34
16 files changed, 553 insertions, 121 deletions
diff --git a/build/sandbox/build_android_sandboxed.py b/build/sandbox/build_android_sandboxed.py
index f6a1b57..8518074 100644
--- a/build/sandbox/build_android_sandboxed.py
+++ b/build/sandbox/build_android_sandboxed.py
@@ -23,10 +23,19 @@ _DEFAULT_COMMAND_WRAPPER = \
'/src/tools/treble/build/sandbox/build_android_target.sh'
-def build(build_target, variant, nsjail_bin, chroot, dist_dir, build_id,
- max_cpus, build_goals, config_file=None,
- command_wrapper=_DEFAULT_COMMAND_WRAPPER, use_rbe=False,
- readonly_bind_mount=None, env=[]):
+def build(build_target,
+ variant,
+ nsjail_bin,
+ chroot,
+ dist_dir,
+ build_id,
+ max_cpus,
+ build_goals,
+ config_file=None,
+ command_wrapper=_DEFAULT_COMMAND_WRAPPER,
+ use_rbe=False,
+ readonly_bind_mounts=[],
+ env=[]):
"""Builds an Android target in a secure sandbox.
Args:
@@ -42,9 +51,9 @@ def build(build_target, variant, nsjail_bin, chroot, dist_dir, build_id,
config_file: A string path to an overlay configuration file.
command_wrapper: A string path to the command wrapper.
use_rbe: If true, will attempt to use RBE for the build.
- readonly_bind_mount: A string path to a path to be mounted as read-only.
- env: An array of environment variables to define in the NsJail sandbox in the
- `var=val` syntax.
+ readonly_bind_mounts: A list of string paths to be mounted as read-only.
+ env: An array of environment variables to define in the NsJail sandbox in
+ the `var=val` syntax.
Returns:
A list of commands that were executed. Each command is a list of strings.
@@ -53,7 +62,8 @@ def build(build_target, variant, nsjail_bin, chroot, dist_dir, build_id,
cfg = config.Config(config_file)
android_target = cfg.get_build_config_android_target(build_target)
if cfg.has_tag(build_target, 'skip'):
- print('Warning: skipping build_target "{}" due to tag being set'.format(build_target))
+ print('Warning: skipping build_target "{}" due to tag being set'.format(
+ build_target))
return []
else:
android_target = build_target
@@ -69,10 +79,6 @@ def build(build_target, variant, nsjail_bin, chroot, dist_dir, build_id,
'-j',
] + build_goals
- readonly_bind_mounts = []
- if readonly_bind_mount:
- readonly_bind_mounts = [readonly_bind_mount]
-
extra_nsjail_args = []
cleanup = lambda: None
nsjail_wrapper = []
@@ -108,15 +114,11 @@ def arg_parser():
# Use the top level module docstring for the help description
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--build_target',
- help='The build target.')
+ parser.add_argument('--build_target', help='The build target.')
parser.add_argument(
'--variant', default='userdebug', help='The Android build variant.')
parser.add_argument(
- '--nsjail_bin',
- required=True,
- help='Path to NsJail binary.')
+ '--nsjail_bin', required=True, help='Path to NsJail binary.')
parser.add_argument(
'--chroot',
required=True,
@@ -131,13 +133,17 @@ def arg_parser():
'--command_wrapper',
default=_DEFAULT_COMMAND_WRAPPER,
help='Path to the command wrapper. '
- 'Defaults to \'%s\'.' % _DEFAULT_COMMAND_WRAPPER)
+ 'Defaults to \'%s\'.' % _DEFAULT_COMMAND_WRAPPER)
parser.add_argument(
'--readonly_bind_mount',
+ type=str,
+ default=[],
+ action='append',
help='Path to the a path to be mounted as readonly inside the secure '
- 'build sandbox.')
+ 'build sandbox. Can be specified multiple times')
parser.add_argument(
- '--env', '-e',
+ '--env',
+ '-e',
type=str,
default=[],
action='append',
@@ -163,9 +169,7 @@ def arg_parser():
help='One or more contexts used to select build goals from the '
'configuration.')
parser.add_argument(
- '--use_rbe',
- action='store_true',
- help='Executes the build on RBE')
+ '--use_rbe', action='store_true', help='Executes the build on RBE')
return parser
@@ -191,6 +195,7 @@ def main():
cfg = config.Config(args['config_file'])
build_goals = cfg.get_build_goals(args['build_target'], set(args['context']))
+ build_flags = cfg.get_build_flags(args['build_target'], set(args['context']))
build(
build_target=args['build_target'],
@@ -199,13 +204,13 @@ def main():
chroot=args['chroot'],
config_file=args['config_file'],
command_wrapper=args['command_wrapper'],
- readonly_bind_mount=args['readonly_bind_mount'],
+ readonly_bind_mounts=args['readonly_bind_mount'],
env=args['env'],
dist_dir=args['dist_dir'],
build_id=args['build_id'],
max_cpus=args['max_cpus'],
use_rbe=args['use_rbe'],
- build_goals=build_goals)
+ build_goals=build_goals + build_flags)
if __name__ == '__main__':
diff --git a/build/sandbox/build_android_target.sh b/build/sandbox/build_android_target.sh
index 23e05ea..266bb71 100755
--- a/build/sandbox/build_android_target.sh
+++ b/build/sandbox/build_android_target.sh
@@ -40,6 +40,16 @@ fi
set -e
+BUILD_COMMAND_ARRAY=($BUILD_COMMAND)
+for i in ${BUILD_COMMAND_ARRAY[@]};
+do
+ if [[ $i =~ ^[A-Z_][A-Z0-9_]*= ]];
+ then
+ echo "build_android_target.sh: export $i";
+ export $i;
+ fi;
+done;
+
echo "build_android_target.sh: source build/envsetup.sh"
source build/envsetup.sh
echo "build_android_target.sh: lunch $ANDROID_TARGET"
@@ -54,7 +64,7 @@ cd "$BUILD_DIR"
set +e
echo "build_android_target.sh: $BUILD_COMMAND"
-$BUILD_COMMAND
+eval $BUILD_COMMAND
BUILD_COMMAND_EXIT_VALUE=$?
# Collect RBE metrics if enabled
diff --git a/build/sandbox/config.py b/build/sandbox/config.py
index 26bccbd..a1be52a 100644
--- a/build/sandbox/config.py
+++ b/build/sandbox/config.py
@@ -11,7 +11,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
"""Parses config file and provides various ways of using it."""
import xml.etree.ElementTree as ET
@@ -107,6 +106,7 @@ import collections
Overlay = collections.namedtuple('Overlay', ['name', 'replacement_paths'])
+
class BuildConfig(object):
"""Represents configuration of a build_target.
@@ -116,8 +116,8 @@ class BuildConfig(object):
tags: List of tags associated with the build target config
build_goals: List of goals to be used while building the target.
overlays: List of overlays to be mounted.
- views: A list of (source, destination) string path tuple to be mounted.
- See view nodes in XML.
+ views: A list of (source, destination) string path tuple to be mounted. See
+ view nodes in XML.
allow_readwrite_all: If true, mount source tree as rw.
allow_readwrite: List of directories to be mounted as rw.
allowed_projects_file: a string path name of a file with a containing
@@ -130,6 +130,7 @@ class BuildConfig(object):
android_target,
tags=frozenset(),
build_goals=(),
+ build_flags=(),
overlays=(),
views=(),
allow_readwrite_all=False,
@@ -141,6 +142,7 @@ class BuildConfig(object):
self.android_target = android_target
self.tags = tags
self.build_goals = list(build_goals)
+ self.build_flags = list(build_flags)
self.overlays = list(overlays)
self.views = list(views)
self.allow_readwrite_all = allow_readwrite_all
@@ -161,8 +163,7 @@ class BuildConfig(object):
@classmethod
def from_config(cls, config_elem, fs_view_map, base_config=None):
- """Creates a BuildConfig from a config XML element and an optional
- base_config.
+ """Creates a BuildConfig from a config XML element and an optional base_config.
Args:
config_elem: the config XML node element to build the configuration
@@ -188,6 +189,8 @@ class BuildConfig(object):
'allowed_projects_file', base_config.allowed_projects_file),
build_goals=_get_build_config_goals(config_elem,
base_config.build_goals),
+ build_flags=_get_build_config_flags(config_elem,
+ base_config.build_flags),
tags=_get_config_tags(config_elem, base_config.tags),
overlays=_get_overlays(config_elem, base_config.overlays),
allow_readwrite=_get_allow_readwrite(config_elem,
@@ -196,8 +199,7 @@ class BuildConfig(object):
allow_readwrite_all=_get_allowed_readwrite_all(
config_elem, base_config.allow_readwrite_all),
configurations=_get_configurations(config_elem,
- base_config.configurations)
- )
+ base_config.configurations))
def _get_configurations(config_elem, base):
@@ -226,6 +228,13 @@ def _get_build_config_goals(config_elem, base=None):
for goal in config_elem.findall('goal')]
+def _get_build_config_flags(config_elem, base=None):
+ """See _get_build_config_goals. Gets 'flag' instead of 'goal'."""
+ return base + [(goal.get('name'), set(goal.get('contexts').split(','))
+ if goal.get('contexts') else None)
+ for goal in config_elem.findall('flag')]
+
+
def _get_config_tags(config_elem, base=frozenset()):
"""Retrieves tags from build_config or target.
@@ -241,13 +250,12 @@ def _get_config_tags(config_elem, base=frozenset()):
def _get_allowed_readwrite_all(config_elem, default=False):
- """Determines if build_config or target is set to allow readwrite for all
- source paths.
+ """Determines if build_config or target is set to allow readwrite for all source paths.
Args:
config_elem: A build_config or target xml element.
- default: Value to use if element doesn't contain the
- allow_readwrite_all attribute.
+ default: Value to use if element doesn't contain the allow_readwrite_all
+ attribute.
Returns:
True if build config is set to allow readwrite for all sorce paths
@@ -264,7 +272,8 @@ def _get_overlays(config_elem, base=None):
base: Initial list of overlays to prepend to the list
Returns:
- A list of tuples of overlays and replacement paths to mount for a build_config or target.
+ A list of tuples of overlays and replacement paths to mount for a
+ build_config or target.
"""
overlays = []
for overlay in config_elem.findall('overlay'):
@@ -276,6 +285,7 @@ def _get_overlays(config_elem, base=None):
])))
return base + overlays
+
def _get_views(config_elem, fs_view_map, base=None):
"""Retrieves list of views from build_config or target.
@@ -287,13 +297,14 @@ def _get_views(config_elem, fs_view_map, base=None):
A list of (source, destination) string path tuple to be mounted. See view
nodes in XML.
"""
- return base + [fs for o in config_elem.findall('view')
- for fs in fs_view_map[o.get('name')]]
+ return base + [
+ fs for o in config_elem.findall('view')
+ for fs in fs_view_map[o.get('name')]
+ ]
def _get_allow_readwrite(config_elem, base=None):
- """Retrieves list of directories to be mounted rw from build_config or
- target.
+ """Retrieves list of directories to be mounted rw from build_config or target.
Args:
config_elem: A build_config or target xml element.
@@ -450,6 +461,18 @@ class Config:
return build_goals
+ def get_build_flags(self, build_target, contexts=frozenset()):
+ """See get_build_goals. Gets flags instead of goals."""
+ build_flags = []
+ for flag, build_contexts in self._build_config_map[
+ build_target].build_flags:
+ if not build_contexts:
+ build_flags.append(flag)
+ elif build_contexts.intersection(contexts):
+ build_flags.append(flag)
+
+ return build_flags
+
def get_rw_allowlist_map(self):
"""Return read-write allowlist map.
@@ -478,19 +501,18 @@ class Config:
overlay names corresponding to the target.
"""
return {
- b.name : [o.name for o in b.overlays
- ] for b in self._build_config_map.values()
+ b.name: [o.name for o in b.overlays
+ ] for b in self._build_config_map.values()
}
-
def get_fs_view_map(self):
"""Return the filesystem view map.
+
Returns:
A dict of filesystem views keyed by target name. A filesystem view is a
list of (source, destination) string path tuples.
"""
- return {b.name : b.views for b in self._build_config_map.values()}
-
+ return {b.name: b.views for b in self._build_config_map.values()}
def get_build_config(self, build_target):
return self._build_config_map[build_target]
diff --git a/build/sandbox/config_test.py b/build/sandbox/config_test.py
index 002c625..139b5f4 100644
--- a/build/sandbox/config_test.py
+++ b/build/sandbox/config_test.py
@@ -21,7 +21,7 @@ _TEST_CONFIG_XML = """<config>
<target name="android_target_1">
<build_config>
<goal name="droid"/>
- <goal name="dist"/>
+ <flag name="dist"/>
</build_config>
</target>
<target name="android_target_2" tags="cool,hot">
@@ -30,14 +30,14 @@ _TEST_CONFIG_XML = """<config>
<goal name="common_goal"/>
<build_config tags="warm">
<goal name="droid"/>
- <goal name="dist"/>
+ <flag name="dist"/>
<goal name="goal_for_android_target_2"/>
</build_config>
<build_config name="build_target_2" tags="dry">
<config name="fmc_framework_images" value="bt1,bt2"/>
<config name="fmc_misc_info_keys" value="misc_info_keys_2.txt"/>
<goal name="droid"/>
- <goal name="VAR=a"/>
+ <flag name="VAR=a"/>
</build_config>
</target>
<target name="android_target_3" tags="">
@@ -67,10 +67,10 @@ _TEST_CONTEXTS_CONFIG_XML = """<config>
<goal name="always" contexts=""/>
<!-- selected if ci context requested -->
- <goal name="dist" contexts="ci"/>
+ <flag name="dist" contexts="ci"/>
<!-- selected if x context requested -->
- <goal name="VAR=value" contexts="x"/>
+ <flag name="VAR=value" contexts="x"/>
<!-- selected if ci or x context requested -->
<goal name="extra_goal" contexts="ci,x"/>
@@ -80,6 +80,7 @@ _TEST_CONTEXTS_CONFIG_XML = """<config>
</config>
"""
+
class ConfigTest(unittest.TestCase):
"""unittest for Config."""
@@ -159,32 +160,28 @@ class ConfigTest(unittest.TestCase):
cfg.get_build_config_android_target('some_target'),
'android_target_4')
- def testBuildTargetToBuildGoals(self):
+ def testBuildTargetToBuildGoalsAndFlags(self):
with tempfile.NamedTemporaryFile('w+t') as test_config:
test_config.write(_TEST_CONFIG_XML)
test_config.flush()
cfg = config.factory(test_config.name)
- # Test that build_target android_target_1 has goals droid and dist.
- self.assertEqual(
- cfg.get_build_goals('android_target_1'),
- ['droid', 'dist'])
+ self.assertEqual(cfg.get_build_goals('android_target_1'), ['droid'])
+ self.assertEqual(cfg.get_build_flags('android_target_1'), ['dist'])
- # Test that build_target android_target_2 has goals droid, dist, and
- # goal_for_android_target_2.
self.assertEqual(
cfg.get_build_goals('android_target_2'),
- ['common_goal', 'droid', 'dist', 'goal_for_android_target_2'])
+ ['common_goal', 'droid', 'goal_for_android_target_2'])
+ self.assertEqual(cfg.get_build_flags('android_target_2'), ['dist'])
- # Test that build_target build_target_2 has goals droid and VAR=a.
self.assertEqual(
- cfg.get_build_goals('build_target_2'),
- ['common_goal', 'droid', 'VAR=a'])
+ cfg.get_build_goals('build_target_2'), ['common_goal', 'droid'])
+ self.assertEqual(cfg.get_build_flags('build_target_2'), ['VAR=a'])
# Test empty goals
- self.assertEqual(cfg.get_build_goals('no_goals_target'),[])
+ self.assertEqual(cfg.get_build_goals('no_goals_target'), [])
- def testBuildTargetToBuildGoalsWithContexts(self):
+ def testBuildTargetToBuildGoalsAndFlagsWithContexts(self):
with tempfile.NamedTemporaryFile('w+t') as test_config:
test_config.write(_TEST_CONTEXTS_CONFIG_XML)
test_config.flush()
@@ -212,19 +209,19 @@ class ConfigTest(unittest.TestCase):
# the x goals.
build_goals = cfg.get_build_goals('test_target', set(['x']))
+ build_flags = cfg.get_build_flags('test_target', set(['x']))
- self.assertEqual(
- build_goals,
- ['droid', 'always', 'VAR=value', 'extra_goal'])
+ self.assertEqual(build_goals, ['droid', 'always', 'extra_goal'])
+ self.assertEqual(build_flags, ['VAR=value'])
# Test that when requested_contexts is set(['ci', 'x']), we select the
# "always" goals, the ci goals, and the x goals.
build_goals = cfg.get_build_goals('test_target', set(['ci', 'x']))
+ build_flags = cfg.get_build_flags('test_target', set(['ci', 'x']))
- self.assertEqual(
- build_goals,
- ['droid', 'always', 'dist', 'VAR=value', 'extra_goal'])
+ self.assertEqual(build_goals, ['droid', 'always', 'extra_goal'])
+ self.assertEqual(build_flags, ['dist', 'VAR=value'])
def testAllowReadWriteAll(self):
with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -292,16 +289,19 @@ class ConfigTest(unittest.TestCase):
cfg = config.factory(test_config.name)
bc_at2 = cfg.get_build_config('android_target_2')
- self.assertDictEqual(bc_at2.configurations, {
- 'fmc_framework_images': 'image1,image2',
- 'fmc_misc_info_keys': 'misc_info_keys.txt'
- })
+ self.assertDictEqual(
+ bc_at2.configurations, {
+ 'fmc_framework_images': 'image1,image2',
+ 'fmc_misc_info_keys': 'misc_info_keys.txt'
+ })
bc_bt2 = cfg.get_build_config('build_target_2')
- self.assertDictEqual(bc_bt2.configurations, {
- 'fmc_framework_images': 'bt1,bt2',
- 'fmc_misc_info_keys': 'misc_info_keys_2.txt'
- })
+ self.assertDictEqual(
+ bc_bt2.configurations, {
+ 'fmc_framework_images': 'bt1,bt2',
+ 'fmc_misc_info_keys': 'misc_info_keys_2.txt'
+ })
+
if __name__ == '__main__':
unittest.main()
diff --git a/build/sandbox/nsjail.py b/build/sandbox/nsjail.py
index c388d0b..4d23040 100644
--- a/build/sandbox/nsjail.py
+++ b/build/sandbox/nsjail.py
@@ -351,7 +351,17 @@ def run_command(nsjail_command,
print(' '.join(nsjail_command), file=stdout)
if not dry_run:
- subprocess.check_call(nsjail_command, stdout=stdout, stderr=stderr)
+ try:
+ subprocess.check_call(nsjail_command, stdout=stdout, stderr=stderr)
+ except subprocess.CalledProcessError as error:
+ if len(error.cmd) > 13:
+ cmd = error.cmd[:6] + ['...elided...'] + error.cmd[-6:]
+ else:
+ cmd = error.cmd
+ msg = 'nsjail command %s failed with return code %d' % (cmd, error.returncode)
+ # Raise from None to avoid exception chaining.
+ raise RuntimeError(msg) from None
+
def parse_args():
"""Parse command line arguments.
diff --git a/build/sandbox/nsjail_test.py b/build/sandbox/nsjail_test.py
index a73bbdb..8ea93ef 100644
--- a/build/sandbox/nsjail_test.py
+++ b/build/sandbox/nsjail_test.py
@@ -75,7 +75,7 @@ class NsjailTest(unittest.TestCase):
self.assertEqual(stdout, expected)
def testFailingJailedCommand(self):
- with self.assertRaises(subprocess.CalledProcessError):
+ with self.assertRaises(RuntimeError):
nsjail.run(
nsjail_bin='/bin/false',
chroot='/chroot',
diff --git a/build/sandbox/rbe.py b/build/sandbox/rbe.py
index fba368f..6d959b8 100644
--- a/build/sandbox/rbe.py
+++ b/build/sandbox/rbe.py
@@ -40,6 +40,10 @@ _RBE_ENV = {
'RBE_JAVAC': 'true',
'RBE_D8': 'true',
'RBE_R8': 'true',
+ 'RBE_CXX_EXEC_STRATEGY' : 'racing',
+ 'RBE_JAVAC_EXEC_STRATEGY' : 'racing',
+ 'RBE_R8_EXEC_STRATEGY' : 'racing',
+ 'RBE_D8_EXEC_STRATEGY' : 'racing',
}
diff --git a/build/sandbox/sample_config.xml b/build/sandbox/sample_config.xml
index dbbd412..3c75217 100644
--- a/build/sandbox/sample_config.xml
+++ b/build/sandbox/sample_config.xml
@@ -3,19 +3,19 @@
Defines sample build configuration file.
-->
<config>
- <target name="aosp_cf_x86_phone_default" android_target="aosp_cf_x86_phone"
+ <target name="aosp_cf_x86_64_phone_default" android_target="aosp_cf_x86_64_phone"
allow_readwrite_all="true">
<!-- Target elements can define elements and attributes that are inherited
by build_config child elements. -->
<goal name="droid"/>
<build_config>
- <!-- build_config name will inherit the name aosp_cf_x86_phone_default
- and append dist to the goal list. -->
- <goal name="dist"/>
+ <!-- build_config name will inherit the name aosp_cf_x86_64_phone_default
+ and append dist to the flag list. -->
+ <flag name="dist"/>
</build_config>
- <build_config name="aosp_cf_x86_phone_no_dist" tags="skip">
+ <build_config name="aosp_cf_x86_64_phone_no_dist" tags="skip">
</build_config>
- <build_config name="aosp_cf_x86_phone_ro" allow_readwrite_all="false">
+ <build_config name="aosp_cf_x86_64_phone_ro" allow_readwrite_all="false">
<!-- This build_config will override allow_readwrite_all attribute. -->
</build_config>
<build_config name="aosp_cf_x86_tv" android_target="aosp_cf_x86_tv">
@@ -26,7 +26,7 @@ Defines sample build configuration file.
<!-- If android_target isn't provided target will use name as default
android_target. -->
<goal name="droid"/>
- <goal name="dist"/>
+ <flag name="dist"/>
<build_config>
<!-- build_config will inherit the name and android_target:
aosp_car_arm64. -->
diff --git a/fetcher/fetcher_lib.py b/fetcher/fetcher_lib.py
index 0ec0173..9701494 100644
--- a/fetcher/fetcher_lib.py
+++ b/fetcher/fetcher_lib.py
@@ -103,7 +103,8 @@ def create_client(http):
Returns:
An authorized android build api client.
"""
- return build(serviceName='androidbuildinternal', version='v2beta1', http=http)
+ return build(serviceName='androidbuildinternal', version='v2beta1', http=http,
+ static_discovery=False)
def create_client_from_json_keyfile(json_keyfile_name=None):
diff --git a/gki/Android.bp b/gki/Android.bp
new file mode 100644
index 0000000..d5b886d
--- /dev/null
+++ b/gki/Android.bp
@@ -0,0 +1,50 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "tools_treble_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["tools_treble_license"],
+}
+
+python_defaults {
+ name: "repack_gki_defaults",
+ version: {
+ py2: {
+ enabled: false,
+ embedded_launcher: false,
+ },
+ py3: {
+ enabled: true,
+ embedded_launcher: false,
+ },
+ },
+}
+
+python_library_host {
+ name: "repack_gki_lib",
+ defaults: ["repack_gki_defaults"],
+ srcs: [
+ "repack_gki_lib.py",
+ ],
+ libs: [
+ "fetcher-lib",
+ ],
+ pkg_path: "treble/gki",
+}
+
+python_binary_host {
+ name: "repack_gki",
+ main: "repack_gki.py",
+ defaults: ["repack_gki_defaults"],
+ srcs: [
+ "repack_gki.py",
+ ],
+ libs: [
+ "repack_gki_lib",
+ ],
+ required: [
+ "mkbootimg",
+ "unpack_bootimg",
+ ],
+}
diff --git a/gki/repack_gki.py b/gki/repack_gki.py
new file mode 100644
index 0000000..90b632e
--- /dev/null
+++ b/gki/repack_gki.py
@@ -0,0 +1,141 @@
+"""Repacks GKI boot images with the given kernel images."""
+import argparse
+import json
+import os
+import shutil
+import tempfile
+
+from treble.fetcher import fetcher_lib
+from treble.gki import repack_gki_lib
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument(
+ '--json_keyfile',
+ help='JSON keyfile containing credentials. '
+ '(Default: Use default credential file)')
+ parser.add_argument(
+ '--ramdisk_build_id',
+ required=True,
+ help='Download from the specified build.')
+ parser.add_argument(
+ '--ramdisk_target',
+ required=True,
+ help='Name of the ramdisk target from the ramdisk branch.')
+ parser.add_argument(
+ '--kernel_build_id',
+ required=True,
+ help='Download from the specified build.')
+ parser.add_argument(
+ '--kernel_target',
+ required=True,
+ help='Name of the kernel target from the kernel branch.')
+ parser.add_argument(
+ '--kernel_debug_target',
+ required=True,
+ help='Name of the kernel debug target from the kernel branch.')
+ parser.add_argument(
+ '--kernel_version',
+ required=True,
+ help='The Kernel version to use when repacking.')
+ parser.add_argument(
+ '--out_dir', required=True, help='Save output to this directory.')
+
+ args = parser.parse_args()
+ client = fetcher_lib.create_client_from_json_keyfile(
+ json_keyfile_name=args.json_keyfile)
+
+ if not os.path.exists(args.out_dir):
+ os.makedirs(args.out_dir)
+
+ with tempfile.TemporaryDirectory() as tmp_bootimg_dir, \
+ tempfile.TemporaryDirectory() as tmp_kernel_dir:
+ # Fetch boot images.
+ repack_gki_lib.fetch_bootimg(
+ client=client,
+ out_dir=tmp_bootimg_dir,
+ build_id=args.ramdisk_build_id,
+ kernel_version=args.kernel_version,
+ target=args.ramdisk_target,
+ )
+
+ # Fetch kernel artifacts.
+ kernel_dir, kernel_debug_dir = repack_gki_lib.fetch_kernel(
+ client=client,
+ out_dir=tmp_kernel_dir,
+ build_id=args.kernel_build_id,
+ kernel_target=args.kernel_target,
+ kernel_debug_target=args.kernel_debug_target,
+ )
+
+ # Save kernel artifacts to the out dir.
+ kernel_out_dir = os.path.join(args.out_dir, 'kernel', args.kernel_version)
+ if not os.path.exists(kernel_out_dir):
+ os.makedirs(kernel_out_dir)
+
+ def copy_kernel_file(in_dir, filename, outname=None):
+ if not outname:
+ outname = filename
+ shutil.copy(
+ os.path.join(in_dir, filename), os.path.join(kernel_out_dir, outname))
+
+ copy_kernel_file(kernel_dir, 'System.map')
+ copy_kernel_file(kernel_dir, 'abi_symbollist')
+ copy_kernel_file(kernel_dir, 'vmlinux')
+ copy_kernel_file(kernel_dir, 'Image',
+ 'kernel-{}'.format(args.kernel_version))
+ copy_kernel_file(kernel_dir, 'Image.lz4',
+ 'kernel-{}-lz4'.format(args.kernel_version))
+ copy_kernel_file(kernel_dir, 'Image.gz',
+ 'kernel-{}-gz'.format(args.kernel_version))
+ copy_kernel_file(kernel_debug_dir, 'System.map', 'System.map-allsyms')
+ copy_kernel_file(kernel_debug_dir, 'abi-generated.xml')
+ copy_kernel_file(kernel_debug_dir, 'abi-full-generated.xml')
+ copy_kernel_file(kernel_debug_dir, 'Image',
+ 'kernel-{}-allsyms'.format(args.kernel_version))
+ copy_kernel_file(kernel_debug_dir, 'Image.lz4',
+ 'kernel-{}-lz4-allsyms'.format(args.kernel_version))
+ copy_kernel_file(kernel_debug_dir, 'Image.gz',
+ 'kernel-{}-gz-allsyms'.format(args.kernel_version))
+
+ # Repack individual boot images using the fetched kernel artifacts,
+ # then save to the out dir.
+ repack_gki_lib.repack_bootimgs(tmp_bootimg_dir, kernel_dir,
+ kernel_debug_dir)
+ shutil.copytree(tmp_bootimg_dir, args.out_dir, dirs_exist_ok=True)
+
+ # Repack boot images inside the img.zip and save to the out dir.
+ img_zip_name = [f for f in os.listdir(tmp_bootimg_dir) if '-img-' in f][0]
+ img_zip_path = os.path.join(tmp_bootimg_dir, img_zip_name)
+ repack_gki_lib.repack_img_zip(img_zip_path, kernel_dir, kernel_debug_dir,
+ args.kernel_version)
+ shutil.copy(img_zip_path, args.out_dir)
+
+ # Replace kernels within the target_files.zip and save to the out dir.
+ # TODO(b/209035444): GSI target_files does not yet include a 5.15 boot.img.
+ if args.kernel_version != '5.15':
+ target_files_zip_name = [
+ f for f in os.listdir(tmp_bootimg_dir) if '-target_files-' in f
+ ][0]
+ target_files_zip_path = os.path.join(tmp_bootimg_dir, target_files_zip_name)
+ repack_gki_lib.replace_target_files_zip_kernels(target_files_zip_path,
+ kernel_out_dir,
+ args.kernel_version)
+ shutil.copy(target_files_zip_path, args.out_dir)
+
+ # Copy otatools.zip from the ramdisk build, used for GKI signing.
+ shutil.copy(os.path.join(tmp_bootimg_dir, 'otatools.zip'), args.out_dir)
+
+ # Write prebuilt-info.txt using the prebuilt artifact build IDs.
+ data = {
+ 'ramdisk-build-id': int(args.ramdisk_build_id),
+ 'kernel-build-id': int(args.kernel_build_id),
+ }
+ with open(os.path.join(kernel_out_dir, 'prebuilt-info.txt'), 'w') as f:
+ json.dump(data, f, indent=4)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/gki/repack_gki_lib.py b/gki/repack_gki_lib.py
new file mode 100644
index 0000000..9051a65
--- /dev/null
+++ b/gki/repack_gki_lib.py
@@ -0,0 +1,164 @@
+"""Helper library for repacking GKI boot images."""
+import os
+import shutil
+import subprocess
+import tempfile
+
+from treble.fetcher import fetcher_lib
+
+
+def fetch_bootimg(client, out_dir, build_id, kernel_version, target):
+ """Fetches boot.img artifacts from a given build ID."""
+ fetcher_lib.fetch_artifacts(
+ client=client,
+ build_id=build_id,
+ target=target,
+ pattern=r'(gsi_.*-img-.*\.zip|gsi_.*-target_files-.*\.zip|boot-debug-{version}.*\.img|boot-test-harness-{version}.*\.img|otatools.zip)'
+ .format(version=kernel_version),
+ out_dir=out_dir)
+
+
+def fetch_kernel(client, out_dir, build_id, kernel_target, kernel_debug_target):
+ """Fetches kernel artifacts from a given build ID."""
+ kernel_dir = os.path.join(out_dir, 'kernel')
+ kernel_debug_dir = os.path.join(out_dir, 'kernel_debug')
+ os.makedirs(kernel_dir)
+ os.makedirs(kernel_debug_dir)
+
+ fetcher_lib.fetch_artifacts(
+ client=client,
+ build_id=build_id,
+ target=kernel_target,
+ pattern=r'(Image|Image.lz4|System\.map|abi_symbollist|vmlinux)',
+ out_dir=kernel_dir)
+ fetcher_lib.fetch_artifacts(
+ client=client,
+ build_id=build_id,
+ target=kernel_debug_target,
+ pattern=r'(Image|Image.lz4|System\.map|abi-generated.xml|abi-full-generated.xml)',
+ out_dir=kernel_debug_dir)
+
+ print('Compressing kernels')
+
+ def compress_kernel(kernel_path):
+ zipped_kernel_path = os.path.join(os.path.dirname(kernel_path), 'Image.gz')
+ with open(zipped_kernel_path, 'wb') as zipped_kernel:
+ cmd = [
+ 'gzip',
+ '-nc',
+ kernel_path,
+ ]
+ print(' '.join(cmd))
+ subprocess.check_call(cmd, stdout=zipped_kernel)
+
+ compress_kernel(os.path.join(kernel_dir, 'Image'))
+ compress_kernel(os.path.join(kernel_debug_dir, 'Image'))
+
+ return kernel_dir, kernel_debug_dir
+
+
+def _replace_kernel(bootimg_path, kernel_path):
+ """Unpacks a boot.img, replaces the kernel, then repacks."""
+ with tempfile.TemporaryDirectory() as unpack_dir:
+ print('Unpacking bootimg %s' % bootimg_path)
+ cmd = [
+ 'out/host/linux-x86/bin/unpack_bootimg',
+ '--boot_img',
+ bootimg_path,
+ '--out',
+ unpack_dir,
+ '--format',
+ 'mkbootimg',
+ ]
+ print(' '.join(cmd))
+ mkbootimg_args = subprocess.check_output(cmd).decode('utf-8').split(' ')
+ print('Copying kernel %s' % kernel_path)
+ shutil.copy(kernel_path, os.path.join(unpack_dir, 'kernel'))
+ print('Repacking with mkbootimg')
+ cmd = [
+ 'out/host/linux-x86/bin/mkbootimg',
+ '--output',
+ bootimg_path,
+ ] + mkbootimg_args
+ print(' '.join(cmd))
+ subprocess.check_call(cmd)
+
+
+def repack_bootimgs(bootimg_dir, kernel_dir, kernel_debug_dir):
+ """Repacks all boot images in a given dir using the provided kernels."""
+ for bootimg_path in os.listdir(bootimg_dir):
+ bootimg_path = os.path.join(bootimg_dir, bootimg_path)
+ if not bootimg_path.endswith('.img'):
+ continue
+
+ kernel_name = 'Image'
+ if '-gz' in bootimg_path:
+ kernel_name = 'Image.gz'
+ elif '-lz4' in bootimg_path:
+ kernel_name = 'Image.lz4'
+
+ kernel_path = os.path.join(kernel_dir, kernel_name)
+ if bootimg_path.endswith('-allsyms.img'):
+ kernel_path = os.path.join(kernel_debug_dir, kernel_name)
+
+ _replace_kernel(bootimg_path, kernel_path)
+
+
+def repack_img_zip(img_zip_path, kernel_dir, kernel_debug_dir, kernel_version):
+ """Repacks boot images within an img.zip archive."""
+ with tempfile.TemporaryDirectory() as unzip_dir:
+ # TODO(b/209035444): 5.15 GSI boot.img is not yet available, so reuse 5.10 boot.img
+ # which should have an identical ramdisk.
+ if kernel_version == '5.15':
+ kernel_version = '5.10'
+ pattern = 'boot-{}*'.format(kernel_version)
+ print('Unzipping %s to repack bootimgs' % img_zip_path)
+ cmd = [
+ 'unzip',
+ '-d',
+ unzip_dir,
+ img_zip_path,
+ pattern,
+ ]
+ print(' '.join(cmd))
+ subprocess.check_call(cmd)
+ repack_bootimgs(unzip_dir, kernel_dir, kernel_debug_dir)
+ cmd = [
+ 'zip',
+ img_zip_path,
+ pattern,
+ ]
+ print(' '.join(cmd))
+ subprocess.check_call(cmd, cwd=unzip_dir)
+
+
+def replace_target_files_zip_kernels(target_files_zip_path, kernel_out_dir,
+ kernel_version):
+ """Replaces the BOOT/kernel-* kernels within a target_files.zip archive."""
+ with tempfile.TemporaryDirectory() as unzip_dir:
+ pattern = 'BOOT/kernel-{}*'.format(kernel_version)
+ print(
+ 'Unzipping %s to replace kernels in preparation for signing' %
+ target_files_zip_path,)
+ cmd = [
+ 'unzip',
+ '-d',
+ unzip_dir,
+ target_files_zip_path,
+ pattern,
+ ]
+ print(' '.join(cmd))
+ subprocess.check_call(cmd)
+ for kernel in os.listdir(kernel_out_dir):
+ if kernel.startswith('kernel-{}'.format(kernel_version)):
+ print('Copying %s' % kernel)
+ shutil.copy(
+ os.path.join(kernel_out_dir, kernel),
+ os.path.join(unzip_dir, 'BOOT'))
+ cmd = [
+ 'zip',
+ target_files_zip_path,
+ pattern,
+ ]
+ print(' '.join(cmd))
+ subprocess.check_call(cmd, cwd=unzip_dir)
diff --git a/split/Android.bp b/split/Android.bp
index 331354b..f35167f 100644
--- a/split/Android.bp
+++ b/split/Android.bp
@@ -87,9 +87,6 @@ python_test_host {
"xml_diff.py",
"xml_diff_test.py",
],
- libs: [
- "py-mock",
- ],
test_config: "test.xml",
test_suites: ["general-tests"],
}
diff --git a/split/manifest_split.py b/split/manifest_split.py
index d5f9b95..5114f24 100644
--- a/split/manifest_split.py
+++ b/split/manifest_split.py
@@ -121,10 +121,12 @@ class ManifestSplitConfig:
this project, for projects that should be added to the resulting manifest.
path_mappings: A list of PathMappingConfigs to modify a path in the build
sandbox to the path in the manifest.
+ ignore_paths: Set of paths to ignore when parsing module_info_file
"""
remove_projects: Dict[str, str]
add_projects: Dict[str, str]
path_mappings: List[PathMappingConfig]
+ ignore_paths: Set[str]
@classmethod
def from_config_files(cls, config_files: List[str]):
@@ -139,6 +141,8 @@ class ManifestSplitConfig:
remove_projects: Dict[str, str] = {}
add_projects: Dict[str, str] = {}
path_mappings = []
+ """ Always ignore paths in out/ directory. """
+ ignore_paths = set(["out/"])
for config_file in config_files:
root = ET.parse(config_file).getroot()
@@ -155,7 +159,10 @@ class ManifestSplitConfig:
for child in root.findall("path_mapping")
])
- return cls(remove_projects, add_projects, path_mappings)
+ ignore_paths.update(
+ {c.attrib["name"]: config_file for c in root.findall("ignore_path")})
+
+ return cls(remove_projects, add_projects, path_mappings, ignore_paths)
def get_repo_projects(repo_list_file, manifest, path_mappings):
@@ -195,7 +202,7 @@ def get_repo_projects(repo_list_file, manifest, path_mappings):
class ModuleInfo:
"""Contains various mappings to/from module/project"""
- def __init__(self, module_info_file, repo_projects):
+ def __init__(self, module_info_file, repo_projects, ignore_paths):
"""Initialize a module info instance.
Builds various maps related to platform build system modules and how they
@@ -204,6 +211,7 @@ class ModuleInfo:
Args:
module_info_file: The path to a module-info.json file from a build.
repo_projects: The output of the get_repo_projects function.
+ ignore_paths: Set of paths to ignore from module_info_file data
Raises:
ValueError: A module from module-info.json belongs to a path not
@@ -221,14 +229,18 @@ class ModuleInfo:
with open(module_info_file) as module_info_file:
module_info = json.load(module_info_file)
+ # Check that module contains a path and the path is not in set of
+ # ignore paths
def module_has_valid_path(module):
- return ("path" in module_info[module] and module_info[module]["path"] and
- not module_info[module]["path"][0].startswith("out/"))
+ paths = module.get("path")
+ if not paths:
+ return False
+ return all(not paths[0].startswith(p) for p in ignore_paths)
module_paths = {
module: module_info[module]["path"][0]
for module in module_info
- if module_has_valid_path(module)
+ if module_has_valid_path(module_info[module])
}
module_project_paths = {
module: scan_repo_projects(repo_projects, module_paths[module])
@@ -519,7 +531,8 @@ def create_split_manifest(targets, manifest_file, split_manifest_file,
# While we still have projects whose modules we haven't checked yet,
if module_info_file:
- module_info = ModuleInfo(module_info_file, repo_projects)
+ module_info = ModuleInfo(module_info_file, repo_projects,
+ config.ignore_paths)
checked_projects = set()
projects_to_check = input_projects.difference(checked_projects)
logger.info("Checking module-info dependencies for direct and adjacent modules...")
diff --git a/split/manifest_split_test.py b/split/manifest_split_test.py
index 546d3c1..d9c6f76 100644
--- a/split/manifest_split_test.py
+++ b/split/manifest_split_test.py
@@ -14,12 +14,12 @@
"""Test manifest split."""
import json
-import mock
import os
import re
import subprocess
import tempfile
import unittest
+import unittest.mock
import xml.etree.ElementTree as ET
import manifest_split
@@ -121,8 +121,9 @@ class ManifestSplitTest(unittest.TestCase):
'system/project4': 'platform/project4',
'vendor/google/project3': 'vendor/project3',
}
+ ignore_paths = set(['out/'])
module_info = manifest_split.ModuleInfo(module_info_file.name,
- repo_projects)
+ repo_projects, ignore_paths)
self.assertEqual(
module_info.project_modules, {
'platform/project1': set(['target1a', 'target1b']),
@@ -163,11 +164,13 @@ class ManifestSplitTest(unittest.TestCase):
}""")
module_info_file.flush()
repo_projects = {}
+ ignore_paths = set()
with self.assertRaisesRegex(ValueError,
'Unknown module path for module target1'):
- manifest_split.ModuleInfo(module_info_file.name, repo_projects)
+ manifest_split.ModuleInfo(module_info_file.name, repo_projects,
+ ignore_paths)
- @mock.patch.object(subprocess, 'check_output', autospec=True)
+ @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
def test_get_ninja_inputs(self, mock_check_output):
mock_check_output.return_value = b"""
path/to/input1
@@ -179,7 +182,7 @@ class ManifestSplitTest(unittest.TestCase):
inputs = manifest_split.get_ninja_inputs('unused', 'unused', ['droid'])
self.assertEqual(inputs, {'path/to/input1', 'path/to/input2'})
- @mock.patch.object(subprocess, 'check_output', autospec=True)
+ @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
def test_get_ninja_inputs_includes_test_mapping(self, mock_check_output):
mock_check_output.return_value = b"""
path/to/input1
@@ -192,7 +195,7 @@ class ManifestSplitTest(unittest.TestCase):
self.assertEqual(
inputs, {'path/to/input1', 'path/to/input2', 'path/to/TEST_MAPPING'})
- @mock.patch.object(subprocess, 'check_output', autospec=True)
+ @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
def test_get_kati_makefiles(self, mock_check_output):
with tempfile.TemporaryDirectory() as temp_dir:
os.chdir(temp_dir)
@@ -291,7 +294,7 @@ class ManifestSplitTest(unittest.TestCase):
ET.tostring(projects[0]).strip().decode(),
'<project name="platform/project1" path="system/project1" />')
- @mock.patch.object(subprocess, 'check_output', autospec=True)
+ @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
def test_create_split_manifest(self, mock_check_output):
with tempfile.NamedTemporaryFile('w+t') as repo_list_file, \
tempfile.NamedTemporaryFile('w+t') as manifest_file, \
@@ -444,9 +447,9 @@ class ManifestSplitTest(unittest.TestCase):
self.assertEqual(debug_data['vendor/project1']['kati_makefiles'][0],
product_makefile)
- @mock.patch.object(manifest_split, 'get_ninja_inputs', autospec=True)
- @mock.patch.object(manifest_split, 'get_kati_makefiles', autospec=True)
- @mock.patch.object(manifest_split.ModuleInfo, '__init__', autospec=True)
+ @unittest.mock.patch.object(manifest_split, 'get_ninja_inputs', autospec=True)
+ @unittest.mock.patch.object(manifest_split, 'get_kati_makefiles', autospec=True)
+ @unittest.mock.patch.object(manifest_split.ModuleInfo, '__init__', autospec=True)
def test_create_split_manifest_skip_kati_module_info(self, mock_init,
mock_get_kati_makefiles,
mock_get_ninja_inputs):
@@ -483,7 +486,7 @@ class ManifestSplitTest(unittest.TestCase):
mock_get_kati_makefiles.assert_not_called()
mock_init.assert_not_called()
- @mock.patch.object(subprocess, 'check_output', autospec=True)
+ @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
def test_create_split_manifest_installed_prebuilt(self, mock_check_output):
# The purpose of this test is to verify that create_split_manifests treats
diff --git a/vf/merge.sh b/vf/merge.sh
index 20ceb6b..8e76241 100755
--- a/vf/merge.sh
+++ b/vf/merge.sh
@@ -5,14 +5,14 @@
set -e
-while getopts ":t:d:v:b:m:" option ; do
+while getopts ":t:d:v:b:m:r:" option ; do
case "${option}" in
t) TARGET=${OPTARG} ;;
d) DIST_DIR=${OPTARG} ;;
v) VENDOR_DIR=${OPTARG} ;;
b) BUILD_ID=${OPTARG} ;;
- # TODO(b/170638547) Remove the need for merge configs.
m) MERGE_CONFIG_DIR=${OPTARG} ;;
+ r) HAS_RADIO_IMG=${OPTARG} ;;
*) echo "Unexpected argument: -${OPTARG}" >&2 ;;
esac
done
@@ -33,9 +33,8 @@ if [[ -z "${BUILD_ID}" ]]; then
echo "error: -b build id argument not set"
exit 1
fi
-if [[ -z "${MERGE_CONFIG_DIR}" ]]; then
- echo "error: -m merge config dir argument not set"
- exit 1
+if [[ -z "${HAS_RADIO_IMG}" ]]; then
+ HAS_RADIO_IMG="true"
fi
# Move the system-only build artifacts to a separate folder
@@ -48,20 +47,33 @@ mv -f ${DIST_DIR}/${TARGET}-*.zip ${SYSTEM_DIR}
source build/envsetup.sh
lunch ${TARGET}-userdebug
+EXTRA_FLAGS=""
+if [[ "${MERGE_CONFIG_DIR}" ]]; then
+ EXTRA_FLAGS+=" --framework-item-list ${MERGE_CONFIG_DIR}/framework_item_list.txt \
+ --framework-misc-info-keys ${MERGE_CONFIG_DIR}/framework_misc_info_keys.txt \
+ --vendor-item-list ${MERGE_CONFIG_DIR}/vendor_item_list.txt"
+fi
out/host/linux-x86/bin/merge_target_files \
--framework-target-files ${SYSTEM_DIR}/${TARGET}-target_files*.zip \
--vendor-target-files ${VENDOR_DIR}/*-target_files-*.zip \
- --framework-item-list ${MERGE_CONFIG_DIR}/framework_item_list.txt \
- --framework-misc-info-keys ${MERGE_CONFIG_DIR}/framework_misc_info_keys.txt \
- --vendor-item-list ${MERGE_CONFIG_DIR}/vendor_item_list.txt \
--allow-duplicate-apkapex-keys \
--output-target-files ${DIST_DIR}/${TARGET}-target_files-${BUILD_ID}.zip \
--output-img ${DIST_DIR}/${TARGET}-img-${BUILD_ID}.zip \
- --output-ota ${DIST_DIR}/${TARGET}-ota-${BUILD_ID}.zip
+ --output-ota ${DIST_DIR}/${TARGET}-ota-${BUILD_ID}.zip \
+ ${EXTRA_FLAGS}
# Copy bootloader.img, radio.img, and android-info.txt, needed for flashing.
cp ${VENDOR_DIR}/bootloader.img ${DIST_DIR}/bootloader.img
-cp ${VENDOR_DIR}/radio.img ${DIST_DIR}/radio.img
+# Copy radio.img unless arg is "false" (eg. Android TV targets)
+if [[ $HAS_RADIO_IMG = "true" ]]; then
+ cp ${VENDOR_DIR}/radio.img ${DIST_DIR}/radio.img
+fi
+
+# Copy vendor otatools.zip, needed by sign_target_files_apks
+if [[ -f "${VENDOR_DIR}/otatools.zip" ]]; then
+ cp ${VENDOR_DIR}/otatools.zip ${DIST_DIR}/otatools_vendor.zip
+fi
+
unzip -j -d ${DIST_DIR} \
${VENDOR_DIR}/*-target_files-*.zip \
- OTA/android-info.txt
+ OTA/android-info.txt \ No newline at end of file